Compare commits

...

30 Commits

Author SHA1 Message Date
Sean Massa
e54da8a2e5 Publish Stable
- @vercel/build-utils@5.8.2
 - vercel@28.12.3
 - @vercel/client@12.2.30
 - @vercel/edge@0.2.5
 - @vercel/error-utils@1.0.7
 - @vercel/frameworks@1.2.3
 - @vercel/fs-detectors@3.7.3
 - @vercel/gatsby-plugin-vercel-analytics@1.0.5
 - @vercel/go@2.2.28
 - @vercel/hydrogen@0.0.42
 - @vercel/next@3.3.13
 - @vercel/node-bridge@3.1.7
 - @vercel/node@2.8.10
 - @vercel/python@3.1.38
 - @vercel/redwood@1.0.49
 - @vercel/remix@1.2.3
 - @vercel/routing-utils@2.1.7
 - @vercel/ruby@1.3.54
 - @vercel/static-build@1.1.5
 - @vercel/static-config@2.0.10
2023-01-13 15:06:45 -06:00
Sean Massa
a066bedf95 add the lockfile before commit 2023-01-13 15:06:16 -06:00
Sean Massa
09b23e53ba update lockfile because lerna did not do its job 2023-01-13 15:05:29 -06:00
Sean Massa
b793a67588 Publish Stable
- @vercel/build-utils@5.8.1
 - vercel@28.12.2
 - @vercel/client@12.2.29
 - @vercel/edge@0.2.4
 - @vercel/error-utils@1.0.6
 - @vercel/frameworks@1.2.2
 - @vercel/fs-detectors@3.7.2
 - @vercel/gatsby-plugin-vercel-analytics@1.0.4
 - @vercel/go@2.2.27
 - @vercel/hydrogen@0.0.41
 - @vercel/next@3.3.12
 - @vercel/node-bridge@3.1.6
 - @vercel/node@2.8.9
 - @vercel/python@3.1.37
 - @vercel/redwood@1.0.48
 - @vercel/remix@1.2.2
 - @vercel/routing-utils@2.1.6
 - @vercel/ruby@1.3.53
 - @vercel/static-build@1.1.4
 - @vercel/static-config@2.0.9
2023-01-13 15:01:55 -06:00
Sean Massa
31dd354b3a remove unnecessary checkout 2023-01-13 15:00:52 -06:00
Sean Massa
529ff3b2d7 add "version" lifecycle hook for lerna to update lockfile before commit 2023-01-13 14:59:49 -06:00
Sean Massa
e71d5638ee Publish Stable
- @vercel/build-utils@5.8.0
 - vercel@28.12.1
 - @vercel/client@12.2.28
 - @vercel/edge@0.2.3
 - @vercel/error-utils@1.0.5
 - @vercel/frameworks@1.2.1
 - @vercel/fs-detectors@3.7.1
 - @vercel/gatsby-plugin-vercel-analytics@1.0.3
 - @vercel/go@2.2.26
 - @vercel/hydrogen@0.0.40
 - @vercel/next@3.3.11
 - @vercel/node-bridge@3.1.5
 - @vercel/node@2.8.8
 - @vercel/python@3.1.36
 - @vercel/redwood@1.0.47
 - @vercel/remix@1.2.1
 - @vercel/routing-utils@2.1.5
 - @vercel/ruby@1.3.52
 - @vercel/static-build@1.1.3
 - @vercel/static-config@2.0.8
2023-01-13 14:47:25 -06:00
Ethan Arrowood
8c16e765ee [tests] adjust root and api deps to use worskpace:* (#9223) 2023-01-13 14:29:23 -06:00
Ethan Arrowood
a008c9c7fe [build-utils] Support empty directory entries for glob() and download() (#9164)
Reopening #9103 since it was reverted.
2023-01-13 12:01:37 -08:00
Sean Massa
62b28ad0b4 Publish Stable
- @vercel/build-utils@5.7.6
 - vercel@28.12.0
 - @vercel/client@12.2.27
 - @vercel/edge@0.2.2
 - @vercel/error-utils@1.0.4
 - @vercel/frameworks@1.2.0
 - @vercel/fs-detectors@3.7.0
 - @vercel/gatsby-plugin-vercel-analytics@1.0.2
 - @vercel/go@2.2.25
 - @vercel/hydrogen@0.0.39
 - @vercel/next@3.3.10
 - @vercel/node-bridge@3.1.4
 - @vercel/node@2.8.7
 - @vercel/python@3.1.35
 - @vercel/redwood@1.0.46
 - @vercel/remix@1.2.0
 - @vercel/routing-utils@2.1.4
 - @vercel/ruby@1.3.51
 - @vercel/static-build@1.1.2
 - @vercel/static-config@2.0.7
2023-01-13 14:00:46 -06:00
Sean Massa
7c50f2916e [frameworks] update framework detectors with packages (#9167)
In [a different PR](https://github.com/vercel/vercel/pull/9009), detecting frameworks by package name will also provide framework version metadata to the build. Should we update these framework detectors to look up their respective packages or were they not doing that already for a reason?

I left the old detectors in place as fallbacks, which looks like:

```
some: [
  {
    path: 'package.json',
    matchContent:
      '"(dev)?(d|D)ependencies":\\s*{[^}]*"remix":\\s*".+?"[^}]*}',
  },
  {
    path: 'remix.config.js',
  },
],
```

Please review carefully.
2023-01-13 19:04:59 +00:00
github-actions[bot]
a521dadafb [examples] Upgrade Next.js to version 13.1.2 (#9217)
This auto-generated PR updates Next.js to version 13.1.2

Co-authored-by: vercel-release-bot <infra+release@vercel.com>
2023-01-13 13:26:57 -05:00
Steven
1efb5d6c0d [fs-detectors] Add support for api/**/*.tsx zero config detection (#9216)
There are cases where you would want to use `.tsx` with Serverless Functions, such as OG Image Generation.

This PR adds zero config detection for `.tsx` file extensions and also consolidates the glob pattern into a single string matching all valid `@vercel/node` cases.

https://twitter.com/hasparus/status/1593136849404694528

https://linear.app/vercel/issue/VCCLI-322
2023-01-13 17:59:49 +00:00
Sean Massa
72df5ce8f6 [examples][frameworks] add tests for all examples being detected (#9197)
This PR adds tests (under `packages/fs-detectors`) that ensure are `./examples` get detected as the appropriate framework.
2023-01-13 17:24:00 +00:00
Chris Barber
e20b74687f [docs] Updated local dev instructions in CLI readme (#9215)
When I run `pnpm`, it just prints the help screen. I need to specify the install command `pnpm i`.
2023-01-13 16:25:53 +00:00
Sean Massa
8f1358bd15 [cli][frameworks][fs-detectors][next] detect framework versions (#9009)
This PR:

- updates `packages/frameworks` to have most supported frameworks specify which dependency version should reflect the overall framework version
- updates `packages/fs-detectors` to allow framework detection that returns the full `Framework` record instead of just the slug
- updates `packages/next` to return the detected Next.js version in the build result
- updates `packages/cli` to leverage these changes so that `vc build` can add `framework: { version: string; }` to `config.json` output

The result is that Build Output API and supported frameworks will return their framework version in the build result of `vc build` when possible, which is used by the build container  when creating the deployment. The dashboard later retrieves this value to display in richer deployment outputs.

Supports:

- https://github.com/vercel/api/pull/15601
- https://github.com/vercel/front/pull/18319

---

With the related build container updates, we get to see Next.js version in the build output. You'll see this with BOA+Prebuilt or a normal deploy:

<img width="1228" alt="Screen Shot 2022-12-09 at 2 48 12 PM" src="https://user-images.githubusercontent.com/41545/206793639-f9cd3bdf-b822-45dd-9564-95b94994271d.png">

---

### The Path to this PR

I went through all the supported frameworks and figured out how to best determine their versions. For most of them, we can check a known dependency's installed version number. 

We can get most of the way only checking npm. For a handful, we'd have to support Go/Ruby/Rust/Whatever dependencies.

I started with a more complex method signature to allow for later expansion without changing the signature. It looked like this, in practice:

```
async getVersion(dependencies: DependencyMap) => depedencies['next']
```

However, after checking all currently supported frameworks, I don't think this will end up being necessary. It also has the constraint that all dependencies have to be gathered and presented to the function even though it only needs to check for one or two. That's not a huge deal if we have them already where we need them, but we don't. We could use a variant here where this function does its own lookups, but this seemed unnecessary and would beg for duplication and small variances that could cause bugs.

Further, if we only look at `package.json`, we're going to either see a specific version of a version range. To be precise, we have to look at the installed version of the package. That means checking one of the various types of lockfiles that can exist or poking into node_modules.

If we poke into node_modules to detect the installed version, we introduce another point where Yarn 3 (default mode) will not be supported. If we read lockfiles, we have to potentially parse `npm`, `pnpm`, and `yarn` lockfiles.

If we use `npm ls <package-name>`, that also fails in Yarn 3 (default mode). We could accept that and go forward anyway, which would look like:

```
const args = `ls ${packageName} --depth=0 --json`.split(' ');
const { stdout } = await execa('npm', args, { cwd });
const regex = new RegExp(String.raw`${packageName}@([\.\d]+)`);
const matches = stdout.match(regex);
if (matches) {
  return matches[1];
}
```

But it turns out there's a `--json` option! That's what I ended up using, for now.

We could explore the lockfile route more, but after some initial digging, it' non-trivial. There are 3 main lockfiles we'd want to check for (npm, pnpm, and yarn) and there are different lockfile versions that put necessary data in different places. I looked for existing tools that parse this, but I didn't find any. We could certainly go down this path, but the effort doesn't seem worth it when `npm ls` gets us really close.

---

### Follow-up Versioning

Now that we know how to determine version per framework, we can vary configuration by version. In a future PR, we could allow a given value to vary by version number:

```
name: (version) => {
  if (semver.gt(version, '9.8.7')) {
    return 'some-framework-2''
  }

  return 'some-framework';
}
```

However, it may still be easier to differentiate significant versions by adding multiple entries in the list.
2023-01-13 07:50:00 +00:00
Logan McAnsh
74c0b3e1bb [remix] Add support for .cjs and .mjs extensions for "remix.config" (#8793)
Adds support for `remix.config.mjs` and `remix.config.cjs` and
also updates the example/fixtures to the latest version of Remix.

See: https://github.com/remix-run/remix/pull/3675
2023-01-12 19:26:52 -08:00
Chris Barber
eb0a031aeb [tests] Adding missing jest-matcher-utils dependency (#9214)
Fixes the following error when tests are run:

```
Cannot find module 'jest-matcher-utils' from 'test/mocks/matchers/to-output.ts'
```

Log: https://github.com/vercel/vercel/actions/runs/3898221824/jobs/6656730316#step:10:3049
2023-01-12 23:32:11 +00:00
Chris Barber
f327be2d1f [cli] Bumped get latest version test timeouts (#9213) 2023-01-12 22:45:21 +00:00
Ethan Arrowood
16a5867f6b [fs-detectors] use project path instead of name for turbo filter (#9210)
Fixes: https://github.com/orgs/vercel/discussions/1218
2023-01-12 18:01:38 +00:00
Steven
90cbd675fa [cli] Remove qs dependency (#9206)
There is no need for `qs` because the query string for `teamId` is already handled here:

5b88f673f8/packages/cli/src/util/client.ts (L99)
2023-01-12 02:01:16 +00:00
Ethan Arrowood
9c768b98b7 [tests] Migrate from yarn to pnpm (#9198)
<picture data-single-emoji=":pnpm:" title=":pnpm:"><img class="emoji" src="https://single-emoji.vercel.app/api/emoji/eyJhbGciOiJkaXIiLCJlbmMiOiJBMjU2R0NNIn0..4mJzrO94AnSn0Pue.4apgaKtTUdQ-wxNyahjdJj28u8bbXreLoTA8AGqYjLta3MrsFvbo9DsQFth4CoIkBgXFhQ5_BVcKNfYbwLg4bKzyIvItKe4OFS8AzG7Kkicz2kUUZk0.nXyK_PvHzZFGA-MQB6XHfA" alt=":pnpm:" width="20" height="auto" align="absmiddle"></picture> 

yarn has become increasingly more difficult to use as the v1 we rely on no longer receives updates. pnpm is faster and is actively maintained. 

This PR migrates us to pnpm.
2023-01-11 23:35:13 +00:00
JJ Kasper
4c3bc05322 Publish Stable
- @vercel/build-utils@5.7.5
 - vercel@28.11.1
 - @vercel/client@12.2.26
 - @vercel/fs-detectors@3.6.2
 - @vercel/gatsby-plugin-vercel-analytics@1.0.1
 - @vercel/go@2.2.24
 - @vercel/hydrogen@0.0.38
 - @vercel/next@3.3.9
 - @vercel/node@2.8.6
 - @vercel/python@3.1.34
 - @vercel/redwood@1.0.45
 - @vercel/remix@1.1.7
 - @vercel/ruby@1.3.50
 - @vercel/static-build@1.1.1
2023-01-11 12:01:03 -08:00
JJ Kasper
3f47587a8b [next] fix lambda creation when using edge runtime (#9204)
Co-authored-by: remorses <beats.by.morse@gmail.com>
2023-01-11 09:35:21 -08:00
Nathan Rajlich
84f93d8af4 [build-utils] Support directory entries in Lambda#createZip() (#9201)
Adds support for empty directory entries in the Lambda `createZip()` function.
2023-01-11 10:21:42 +00:00
Chris Barber
e1aaf8080b [cli] Replace update-notifier dependency with build in (#9098)
This PR replaces the `update-notifier` dependency with a custom implementation.

There are a few reasons: the dependency is quite large, it requires ESM in order to update, can sometimes suggest an update to an older version, and used dependencies with known security issues.

The result looks like:

<img width="768" alt="image" src="https://user-images.githubusercontent.com/97262/208452226-b7508299-f830-4d42-a96a-7646ec8227aa.png">

Note: This PR is the successor to https://github.com/vercel/vercel/pull/8090.
2023-01-11 03:45:36 +00:00
JJ Kasper
0857352967 [next] Fix dynamic routes order for app dir (#9202)
This ensures the RSC dynamic routes come before the HTML route as it's more specific and the HTML route can end up matching first unexpectedly. 

Test deployment with fix: https://discord-gmki6w031-vtest314-ijjk-testing.vercel.app/

Fixes: https://github.com/vercel/next.js/issues/44728
2023-01-11 00:24:28 +00:00
Chris Barber
f92d229a63 [cli] Rollback team check (#9120)
https://linear.app/vercel/issue/VCCLI-377/rollback-failing-for-enterprise-teams

When running `vc rollback` for a deployment belonging to another team, the command will fail requesting the rollback. Technically, the command should have failed trying to get the deployment. This PR checks that the current team matches the deployment being rolled back.

![image](https://user-images.githubusercontent.com/97262/210431585-ffb73658-b15c-4adb-b110-a8c5e816db32.png)

This PR also cleans up a bunch of deployment related things. There were 3 functions to get a deployment and now there's just one which uses the latest v13 API. Get deployment error handling now throws instead of returning an error. The `Deployment` type definition has been updated to match the v13 response and mock deployment data was also updated.
2023-01-10 21:05:08 +00:00
Steven
427a2a58cf [tests] Update turbo to 1.7.0-canary.9 (#9193)
Let's try out turbo canary

Co-authored-by: tknickman <tom.knickman@vercel.com>
2023-01-10 12:51:12 -05:00
Ethan Arrowood
ccb5f301ad [static-build] @vercel/static-build to use @vercel/gatsby-plugin-vercel-analytics (#9194)
Updates the `static-build` injector to use the new plugin.

Still need to verify somehow that the newly published plugin is working as expected. It should be fine since it was a copy-paste from the previous plugin repo, but always good to verify before we break everything! 

This PR also updates the README in `@vercel/gatsby-plugin-vercel-analytics`
2023-01-10 15:22:31 +00:00
153 changed files with 27711 additions and 17927 deletions

View File

@@ -6,7 +6,7 @@ Please read our [Code of Conduct](CODE_OF_CONDUCT.md) and follow it in all your
## Local development
This project is configured in a monorepo, where one repository contains multiple npm packages. Dependencies are installed and managed with `yarn`, not `npm` CLI.
This project is configured in a monorepo, where one repository contains multiple npm packages. Dependencies are installed and managed with `pnpm`, not `npm` CLI.
To get started, execute the following:
@@ -14,22 +14,22 @@ To get started, execute the following:
git clone https://github.com/vercel/vercel
cd vercel
corepack enable
yarn install
yarn bootstrap
yarn build
yarn lint
yarn test-unit
pnpm install
pnpm bootstrap
pnpm build
pnpm lint
pnpm test-unit
```
Make sure all the tests pass before making changes.
### Running Vercel CLI Changes
You can use `yarn dev` from the `cli` package to invoke Vercel CLI with local changes:
You can use `pnpm dev` from the `cli` package to invoke Vercel CLI with local changes:
```
cd ./packages/cli
yarn dev <cli-commands...>
pnpm dev <cli-commands...>
```
See [CLI Local Development](../packages/cli#local-development) for more details.
@@ -39,7 +39,7 @@ See [CLI Local Development](../packages/cli#local-development) for more details.
Once you are done with your changes (we even suggest doing it along the way), make sure all the tests still pass by running:
```
yarn test-unit
pnpm test-unit
```
from the root of the project.
@@ -102,7 +102,7 @@ When you run this script, you'll see all the imported files. If anything file is
Sometimes you want to test changes to a Builder against an existing project, maybe with `vercel dev` or actual deployment. You can avoid publishing every Builder change to npm by uploading the Builder as a tarball.
1. Change directory to the desired Builder `cd ./packages/node`
2. Run `yarn build` to compile typescript and other build steps
2. Run `pnpm build` to compile typescript and other build steps
3. Run `npm pack` to create a tarball file
4. Run `vercel *.tgz` to upload the tarball file and get a URL
5. Edit any existing `vercel.json` project and replace `use` with the URL

View File

@@ -47,20 +47,22 @@ jobs:
uses: actions/cache@v3
with:
path: '**/node_modules'
key: yarn-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('yarn.lock') }}
restore-keys: yarn-${{ matrix.os }}-${{ matrix.node }}
key: pnpm-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: pnpm-${{ matrix.os }}-${{ matrix.node }}
- name: install pnpm@7.24.2
run: npm i -g pnpm@7.24.2
- name: Install
if: ${{ steps.check-release.outputs.IS_RELEASE == 'true' }}
run: yarn install --check-files --frozen-lockfile --network-timeout 1000000
run: pnpm install
- name: Build
if: ${{ steps.check-release.outputs.IS_RELEASE == 'true' }}
run: yarn build
run: pnpm build
env:
GA_TRACKING_ID: ${{ secrets.GA_TRACKING_ID }}
SENTRY_DSN: ${{ secrets.SENTRY_DSN }}
- name: Publish
if: ${{ steps.check-release.outputs.IS_RELEASE == 'true' }}
run: yarn publish-from-github
run: pnpm publish-from-github
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN_ELEVATED }}
GA_TRACKING_ID: ${{ secrets.GA_TRACKING_ID }}

View File

@@ -41,11 +41,13 @@ jobs:
- uses: actions/cache@v3
with:
path: '**/node_modules'
key: yarn-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('yarn.lock') }}
restore-keys: yarn-${{ matrix.os }}-${{ matrix.node }}
- run: yarn install --network-timeout 1000000 --frozen-lockfile
- run: yarn run build
- run: yarn test-integration-cli
key: pnpm-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: pnpm-${{ matrix.os }}-${{ matrix.node }}
- name: install pnpm@7.24.2
run: npm i -g pnpm@7.24.2
- run: pnpm install
- run: pnpm run build
- run: pnpm test-integration-cli
env:
VERCEL_TEST_TOKEN: ${{ secrets.VERCEL_TEST_TOKEN }}
VERCEL_TEST_REGISTRATION_URL: ${{ secrets.VERCEL_TEST_REGISTRATION_URL }}

View File

@@ -41,14 +41,16 @@ jobs:
- uses: actions/cache@v3
with:
path: '**/node_modules'
key: yarn-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('yarn.lock') }}
restore-keys: yarn-${{ matrix.os }}-${{ matrix.node }}
- run: yarn install --network-timeout 1000000 --frozen-lockfile
- run: yarn run build
- run: yarn run lint
key: pnpm-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: pnpm-${{ matrix.os }}-${{ matrix.node }}
- name: install pnpm@7.24.2
run: npm i -g pnpm@7.24.2
- run: pnpm install
- run: pnpm run build
- run: pnpm run lint
if: matrix.os == 'ubuntu-latest' && matrix.node == 14 # only run lint once
- run: yarn run test-unit
- run: yarn workspace vercel run coverage
- run: pnpm run test-unit
- run: pnpm -C packages/cli run coverage
if: matrix.os == 'ubuntu-latest' && matrix.node == 14 # only run coverage once
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -39,9 +39,11 @@ jobs:
- uses: actions/cache@v3
with:
path: '**/node_modules'
key: yarn-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('yarn.lock') }}
restore-keys: yarn-${{ matrix.os }}-${{ matrix.node }}
- run: yarn install --network-timeout 1000000 --frozen-lockfile
key: pnpm-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: pnpm-${{ matrix.os }}-${{ matrix.node }}
- name: install pnpm@7.24.2
run: npm i -g pnpm@7.24.2
- run: pnpm install
- id: set-tests
run: |
TESTS_ARRAY=$(node utils/chunk-tests.js $SCRIPT_NAME)
@@ -80,14 +82,17 @@ jobs:
- uses: actions/cache@v3
with:
path: '**/node_modules'
key: yarn-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('yarn.lock') }}
restore-keys: yarn-${{ matrix.os }}-${{ matrix.node }}
key: pnpm-${{ matrix.os }}-${{ matrix.node }}-${{ hashFiles('pnpm-lock.yaml') }}
restore-keys: pnpm-${{ matrix.os }}-${{ matrix.node }}
- name: Install Hugo
if: matrix.runner == 'macos-latest'
run: curl -L -O https://github.com/gohugoio/hugo/releases/download/v0.56.0/hugo_0.56.0_macOS-64bit.tar.gz && tar -xzf hugo_0.56.0_macOS-64bit.tar.gz && mv ./hugo packages/cli/test/dev/fixtures/08-hugo/
- run: yarn install --network-timeout 1000000
- name: install pnpm@7.24.2
run: npm i -g pnpm@7.24.2
- run: pnpm install
- name: Build ${{matrix.packageName}} and all its dependencies
run: node utils/gen.js && node_modules/.bin/turbo run build --cache-dir=".turbo" --scope=${{matrix.packageName}} --include-dependencies --no-deps

View File

@@ -1,4 +1,4 @@
#!/bin/sh
. "$(dirname "$0")/_/husky.sh"
yarn pre-commit
pnpm pre-commit

5
.npmrc Normal file
View File

@@ -0,0 +1,5 @@
save-exact=true
hoist-pattern[]=!"**/@types/**"
hoist-pattern[]=!"**/typedoc"
hoist-pattern[]=!"**/typedoc-plugin-markdown"
hoist-pattern[]=!"**/typedoc-plugin-mdn-links"

View File

@@ -1 +0,0 @@
save-prefix ""

View File

@@ -33,9 +33,9 @@ For details on how to use Vercel, check out our [documentation](https://vercel.c
## Contributing
This project uses [yarn](https://yarnpkg.com/) to install dependencies and run scripts.
This project uses [pnpm](https://pnpm.io/) to install dependencies and run scripts.
You can use the `dev` script to run local changes as if you were invoking Vercel CLI. For example, `vercel deploy --cwd=/path/to/project` could be run with local changes with `yarn dev deploy --cwd=/path/to/project`.
You can use the `dev` script to run local changes as if you were invoking Vercel CLI. For example, `vercel deploy --cwd=/path/to/project` could be run with local changes with `pnpm dev deploy --cwd=/path/to/project`.
See the [Contributing Guidelines](./.github/CONTRIBUTING.md) for more details.

View File

@@ -13,6 +13,8 @@ function initSentry() {
sentryInitDone = true;
init({
// Cannot figure out whats going wrong here. VSCode resolves this fine. But when we build it blows up.
// @ts-ignore
dsn: assertEnv('SENTRY_DSN'),
environment: process.env.NODE_ENV || 'production',
release: `${serviceName}`,

View File

@@ -4,9 +4,7 @@
"version": "0.0.0",
"description": "API for the vercel/vercel repo",
"main": "index.js",
"scripts": {
"//TODO": "We should add this pkg to yarn workspaces"
},
"scripts": {},
"dependencies": {
"@sentry/node": "5.11.1",
"got": "10.2.1",
@@ -16,9 +14,9 @@
"unzip-stream": "0.3.0"
},
"devDependencies": {
"@types/node": "14.18.33",
"@types/node": "16.18.11",
"@types/node-fetch": "2.5.4",
"@vercel/node": "1.9.0",
"typescript": "3.9.6"
"@vercel/node": "workspace:*",
"typescript": "4.3.4"
}
}

View File

@@ -12,5 +12,5 @@
"resolveJsonModule": true,
"isolatedModules": true
},
"include": ["examples", "frameworks.ts"]
"include": ["examples", "frameworks.ts", "_lib"]
}

View File

@@ -1,524 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
"@sentry/apm@5.11.1":
version "5.11.1"
resolved "https://registry.yarnpkg.com/@sentry/apm/-/apm-5.11.1.tgz#cc89fa4150056fbf009f92eca94fccc3980db34e"
integrity sha512-4iZH11p/7w9IMLT9hqNY1+EqLESltiIoF6/YsbpK93sXWGEs8VQ83IuvGuKWxajvHgDmj4ND0TxIliTsYqTqFw==
dependencies:
"@sentry/browser" "5.11.1"
"@sentry/hub" "5.11.1"
"@sentry/minimal" "5.11.1"
"@sentry/types" "5.11.0"
"@sentry/utils" "5.11.1"
tslib "^1.9.3"
"@sentry/browser@5.11.1":
version "5.11.1"
resolved "https://registry.yarnpkg.com/@sentry/browser/-/browser-5.11.1.tgz#337ffcb52711b23064c847a07629e966f54a5ebb"
integrity sha512-oqOX/otmuP92DEGRyZeBuQokXdeT9HQRxH73oqIURXXNLMP3PWJALSb4HtT4AftEt/2ROGobZLuA4TaID6My/Q==
dependencies:
"@sentry/core" "5.11.1"
"@sentry/types" "5.11.0"
"@sentry/utils" "5.11.1"
tslib "^1.9.3"
"@sentry/core@5.11.1":
version "5.11.1"
resolved "https://registry.yarnpkg.com/@sentry/core/-/core-5.11.1.tgz#9e2da485e196ae32971545c1c49ee6fe719930e2"
integrity sha512-BpvPosVNT20Xso4gAV54Lu3KqDmD20vO63HYwbNdST5LUi8oYV4JhvOkoBraPEM2cbBwQvwVcFdeEYKk4tin9A==
dependencies:
"@sentry/hub" "5.11.1"
"@sentry/minimal" "5.11.1"
"@sentry/types" "5.11.0"
"@sentry/utils" "5.11.1"
tslib "^1.9.3"
"@sentry/hub@5.11.1":
version "5.11.1"
resolved "https://registry.yarnpkg.com/@sentry/hub/-/hub-5.11.1.tgz#ddcb865563fae53852d405885c46b4c6de68a91b"
integrity sha512-ucKprYCbGGLLjVz4hWUqHN9KH0WKUkGf5ZYfD8LUhksuobRkYVyig0ZGbshECZxW5jcDTzip4Q9Qimq/PkkXBg==
dependencies:
"@sentry/types" "5.11.0"
"@sentry/utils" "5.11.1"
tslib "^1.9.3"
"@sentry/minimal@5.11.1":
version "5.11.1"
resolved "https://registry.yarnpkg.com/@sentry/minimal/-/minimal-5.11.1.tgz#0e705d01a567282d8fbbda2aed848b4974cc3cec"
integrity sha512-HK8zs7Pgdq7DsbZQTThrhQPrJsVWzz7MaluAbQA0rTIAJ3TvHKQpsVRu17xDpjZXypqWcKCRsthDrC4LxDM1Bg==
dependencies:
"@sentry/hub" "5.11.1"
"@sentry/types" "5.11.0"
tslib "^1.9.3"
"@sentry/node@5.11.1":
version "5.11.1"
resolved "https://registry.yarnpkg.com/@sentry/node/-/node-5.11.1.tgz#2a9c18cd1209cfdf7a69b9d91303413149d2c910"
integrity sha512-FbJs0blJ36gEzE0rc2yBfA/KE+kXOLl8MUfFTcyJCBdCGF8XMETDCmgINnJ4TyBUJviwKoPw2TCk9TL2pa/A1w==
dependencies:
"@sentry/apm" "5.11.1"
"@sentry/core" "5.11.1"
"@sentry/hub" "5.11.1"
"@sentry/types" "5.11.0"
"@sentry/utils" "5.11.1"
cookie "^0.3.1"
https-proxy-agent "^4.0.0"
lru_map "^0.3.3"
tslib "^1.9.3"
"@sentry/types@5.11.0":
version "5.11.0"
resolved "https://registry.yarnpkg.com/@sentry/types/-/types-5.11.0.tgz#40f0f3174362928e033ddd9725d55e7c5cb7c5b6"
integrity sha512-1Uhycpmeo1ZK2GLvrtwZhTwIodJHcyIS6bn+t4IMkN9MFoo6ktbAfhvexBDW/IDtdLlCGJbfm8nIZerxy0QUpg==
"@sentry/utils@5.11.1":
version "5.11.1"
resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-5.11.1.tgz#aa19fcc234cf632257b2281261651d2fac967607"
integrity sha512-O0Zl4R2JJh8cTkQ8ZL2cDqGCmQdpA5VeXpuBbEl1v78LQPkBDISi35wH4mKmLwMsLBtTVpx2UeUHBj0KO5aLlA==
dependencies:
"@sentry/types" "5.11.0"
tslib "^1.9.3"
"@sindresorhus/is@^1.0.0":
version "1.2.0"
resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-1.2.0.tgz#63ce3638cb85231f3704164c90a18ef816da3fb7"
integrity sha512-mwhXGkRV5dlvQc4EgPDxDxO6WuMBVymGFd1CA+2Y+z5dG9MNspoQ+AWjl/Ld1MnpCL8AKbosZlDVohqcIwuWsw==
"@szmarczak/http-timer@^4.0.0":
version "4.0.0"
resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.0.tgz#309789ccb7842ff1e41848cf43da587f78068836"
integrity sha512-3yoXv8OtGr/r3R5gaWWNQ3VUoQ5G3Gmo8DXX95V14ZVvE2b7Pj6Ide9uIDON8ym4D/ItyfL9ejohYUPqOyvRXw==
dependencies:
defer-to-connect "^1.1.1"
"@types/cacheable-request@^6.0.1":
version "6.0.1"
resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.1.tgz#5d22f3dded1fd3a84c0bbeb5039a7419c2c91976"
integrity sha512-ykFq2zmBGOCbpIXtoVbz4SKY5QriWPh3AjyU4G74RYbtt5yOc5OfaY75ftjg7mikMOla1CTGpX3lLbuJh8DTrQ==
dependencies:
"@types/http-cache-semantics" "*"
"@types/keyv" "*"
"@types/node" "*"
"@types/responselike" "*"
"@types/http-cache-semantics@*":
version "4.0.0"
resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.0.tgz#9140779736aa2655635ee756e2467d787cfe8a2a"
integrity sha512-c3Xy026kOF7QOTn00hbIllV1dLR9hG9NkSrLQgCVs8NF6sBU+VGWjD3wLPhmh1TYAc7ugCFsvHYMN4VcBN1U1A==
"@types/keyv@*":
version "3.1.1"
resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-3.1.1.tgz#e45a45324fca9dab716ab1230ee249c9fb52cfa7"
integrity sha512-MPtoySlAZQ37VoLaPcTHCu1RWJ4llDkULYZIzOYxlhxBqYPB0RsRlmMU0R6tahtFe27mIdkHV+551ZWV4PLmVw==
dependencies:
"@types/node" "*"
"@types/node-fetch@2.5.4":
version "2.5.4"
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.5.4.tgz#5245b6d8841fc3a6208b82291119bc11c4e0ce44"
integrity sha512-Oz6id++2qAOFuOlE1j0ouk1dzl3mmI1+qINPNBhi9nt/gVOz0G+13Ao6qjhdF0Ys+eOkhu6JnFmt38bR3H0POQ==
dependencies:
"@types/node" "*"
"@types/node@*", "@types/node@13.1.4":
version "13.1.4"
resolved "https://registry.yarnpkg.com/@types/node/-/node-13.1.4.tgz#4cfd90175a200ee9b02bd6b1cd19bc349741607e"
integrity sha512-Lue/mlp2egZJoHXZr4LndxDAd7i/7SQYhV0EjWfb/a4/OZ6tuVwMCVPiwkU5nsEipxEf7hmkSU7Em5VQ8P5NGA==
"@types/responselike@*":
version "1.0.0"
resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29"
integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA==
dependencies:
"@types/node" "*"
"@vercel/node@1.9.0":
version "1.9.0"
resolved "https://registry.yarnpkg.com/@vercel/node/-/node-1.9.0.tgz#6b64f3b9a962ddb1089276fad00f441a1f4b9cf0"
integrity sha512-Vk/ZpuY4Cdc8oUwBi/kf8qETRaJb/KYdFddVkLuS10QwA0yJx+RQ11trhZ1KFUdc27aBr5S2k8/dDxK8sLr+IA==
dependencies:
"@types/node" "*"
ts-node "8.9.1"
typescript "3.9.3"
agent-base@5:
version "5.1.1"
resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-5.1.1.tgz#e8fb3f242959db44d63be665db7a8e739537a32c"
integrity sha512-TMeqbNl2fMW0nMjTEPOwe3J/PRFP4vqeoNuQMG0HlMrtm5QxKqdvAkZ1pRBQ/ulIyDD5Yq0nJ7YbdD8ey0TO3g==
arg@^4.1.0:
version "4.1.3"
resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
binary@^0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/binary/-/binary-0.3.0.tgz#9f60553bc5ce8c3386f3b553cff47462adecaa79"
integrity sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=
dependencies:
buffers "~0.1.1"
chainsaw "~0.1.0"
bl@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/bl/-/bl-3.0.0.tgz#3611ec00579fd18561754360b21e9f784500ff88"
integrity sha512-EUAyP5UHU5hxF8BPT0LKW8gjYLhq1DQIcneOX/pL/m2Alo+OYDQAJlHq+yseMP50Os2nHXOSic6Ss3vSQeyf4A==
dependencies:
readable-stream "^3.0.1"
buffer-from@^1.0.0:
version "1.1.1"
resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef"
integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A==
buffers@~0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb"
integrity sha1-skV5w77U1tOWru5tmorn9Ugqt7s=
cacheable-lookup@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-0.2.1.tgz#f474ae2c686667d7ea08c43409ad31b2b31b26c2"
integrity sha512-BQ8MRjxJASEq2q+w0SusPU3B054gS278K8sj58QCLMZIso5qG05+MdCdmXxuyVlfvI8h4bPsNOavVUauVCGxrg==
dependencies:
keyv "^3.1.0"
cacheable-request@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.0.tgz#12421aa084e943ec81eac8c93e56af90c624788a"
integrity sha512-UVG4gMn3WjnAeFBBx7RFoprgOANIAkMwN5Dta6ONmfSwrCxfm0Ip7g0mIBxIRJZX9aDsoID0Ry3dU5Pr0csKKA==
dependencies:
clone-response "^1.0.2"
get-stream "^5.1.0"
http-cache-semantics "^4.0.0"
keyv "^3.0.0"
lowercase-keys "^2.0.0"
normalize-url "^4.1.0"
responselike "^2.0.0"
chainsaw@~0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/chainsaw/-/chainsaw-0.1.0.tgz#5eab50b28afe58074d0d58291388828b5e5fbc98"
integrity sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=
dependencies:
traverse ">=0.3.0 <0.4"
chownr@^1.1.1:
version "1.1.3"
resolved "https://registry.yarnpkg.com/chownr/-/chownr-1.1.3.tgz#42d837d5239688d55f303003a508230fa6727142"
integrity sha512-i70fVHhmV3DtTl6nqvZOnIjbY0Pe4kAUjwHj8z0zAdgBtYrJyYwLKCCuRBQ5ppkyL0AkN7HKRnETdmdp1zqNXw==
clone-response@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b"
integrity sha1-0dyXOSAxTfZ/vrlCI7TuNQI56Ws=
dependencies:
mimic-response "^1.0.0"
cookie@^0.3.1:
version "0.3.1"
resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb"
integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s=
debug@4:
version "4.1.1"
resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791"
integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==
dependencies:
ms "^2.1.1"
decompress-response@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-5.0.0.tgz#7849396e80e3d1eba8cb2f75ef4930f76461cb0f"
integrity sha512-TLZWWybuxWgoW7Lykv+gq9xvzOsUjQ9tF09Tj6NSTYGMTCHNXzrPnD6Hi+TgZq19PyTAGH4Ll/NIM/eTGglnMw==
dependencies:
mimic-response "^2.0.0"
defer-to-connect@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.1.tgz#88ae694b93f67b81815a2c8c769aef6574ac8f2f"
integrity sha512-J7thop4u3mRTkYRQ+Vpfwy2G5Ehoy82I14+14W4YMDLKdWloI9gSzRbV30s/NckQGVJtPkWNcW4oMAUigTdqiQ==
diff@^4.0.1:
version "4.0.2"
resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==
duplexer3@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2"
integrity sha1-7gHdHKwO08vH/b6jfcCo8c4ALOI=
end-of-stream@^1.1.0, end-of-stream@^1.4.1:
version "1.4.4"
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0"
integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==
dependencies:
once "^1.4.0"
fs-constants@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad"
integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==
get-stream@^5.0.0, get-stream@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.1.0.tgz#01203cdc92597f9b909067c3e656cc1f4d3c4dc9"
integrity sha512-EXr1FOzrzTfGeL0gQdeFEvOMm2mzMOglyiOXSTpPC+iAjAKftbr3jpCMWynogwYnM+eSj9sHGc6wjIcDvYiygw==
dependencies:
pump "^3.0.0"
got@10.2.1:
version "10.2.1"
resolved "https://registry.yarnpkg.com/got/-/got-10.2.1.tgz#7087485482fb31aa6e6399fd493dd04639da117b"
integrity sha512-IQX//hGm5oLjUj743GJG30U2RzjS58ZlhQQjwQXjsyR50TTD+etVMHlMEbNxYJGWVFa0ASgDVhRkAvQPe6M9iQ==
dependencies:
"@sindresorhus/is" "^1.0.0"
"@szmarczak/http-timer" "^4.0.0"
"@types/cacheable-request" "^6.0.1"
cacheable-lookup "^0.2.1"
cacheable-request "^7.0.0"
decompress-response "^5.0.0"
duplexer3 "^0.1.4"
get-stream "^5.0.0"
lowercase-keys "^2.0.0"
mimic-response "^2.0.0"
p-cancelable "^2.0.0"
responselike "^2.0.0"
to-readable-stream "^2.0.0"
type-fest "^0.8.0"
http-cache-semantics@^4.0.0:
version "4.0.3"
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.0.3.tgz#495704773277eeef6e43f9ab2c2c7d259dda25c5"
integrity sha512-TcIMG3qeVLgDr1TEd2XvHaTnMPwYQUQMIBLy+5pLSDKYFc7UIqj39w8EGzZkaxoLv/l2K8HaI0t5AVA+YYgUew==
https-proxy-agent@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-4.0.0.tgz#702b71fb5520a132a66de1f67541d9e62154d82b"
integrity sha512-zoDhWrkR3of1l9QAL8/scJZyLu8j/gBkcwcaQOZh7Gyh/+uJQzGVETdgT30akuwkpL8HTRfssqI3BZuV18teDg==
dependencies:
agent-base "5"
debug "4"
inherits@^2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
json-buffer@3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898"
integrity sha1-Wx85evx11ne96Lz8Dkfh+aPZqJg=
keyv@^3.0.0, keyv@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9"
integrity sha512-9ykJ/46SN/9KPM/sichzQ7OvXyGDYKGTaDlKMGCAlg2UK8KRy4jb0d8sFc+0Tt0YYnThq8X2RZgCg74RPxgcVA==
dependencies:
json-buffer "3.0.0"
lowercase-keys@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479"
integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==
lru_map@^0.3.3:
version "0.3.3"
resolved "https://registry.yarnpkg.com/lru_map/-/lru_map-0.3.3.tgz#b5c8351b9464cbd750335a79650a0ec0e56118dd"
integrity sha1-tcg1G5Rky9dQM1p5ZQoOwOVhGN0=
make-error@^1.1.1:
version "1.3.6"
resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
integrity sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==
mimic-response@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b"
integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==
mimic-response@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-2.0.0.tgz#996a51c60adf12cb8a87d7fb8ef24c2f3d5ebb46"
integrity sha512-8ilDoEapqA4uQ3TwS0jakGONKXVJqpy+RpM+3b7pLdOjghCrEiGp9SRkFbUHAmZW9vdnrENWHjaweIoTIJExSQ==
minimist@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
integrity sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=
mkdirp@^0.5.1:
version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
dependencies:
minimist "0.0.8"
ms@^2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009"
integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==
node-fetch@2.6.1:
version "2.6.1"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==
normalize-url@^4.1.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.0.tgz#453354087e6ca96957bd8f5baf753f5982142129"
integrity sha512-2s47yzUxdexf1OhyRi4Em83iQk0aPvwTddtFz4hnSSw9dCEsLEGf6SwIO8ss/19S9iBb5sJaOuTvTGDeZI00BQ==
once@^1.3.1, once@^1.4.0:
version "1.4.0"
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
dependencies:
wrappy "1"
p-cancelable@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.0.0.tgz#4a3740f5bdaf5ed5d7c3e34882c6fb5d6b266a6e"
integrity sha512-wvPXDmbMmu2ksjkB4Z3nZWTSkJEb9lqVdMaCKpZUGJG9TMiNp9XcbG3fn9fPKjem04fJMJnXoyFPk2FmgiaiNg==
parse-github-url@1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/parse-github-url/-/parse-github-url-1.0.2.tgz#242d3b65cbcdda14bb50439e3242acf6971db395"
integrity sha512-kgBf6avCbO3Cn6+RnzRGLkUsv4ZVqv/VfAYkRsyBcgkshNvVBkRn1FEZcW0Jb+npXQWm2vHPnnOqFteZxRRGNw==
pump@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
dependencies:
end-of-stream "^1.1.0"
once "^1.3.1"
readable-stream@^3.0.1, readable-stream@^3.1.1:
version "3.4.0"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.4.0.tgz#a51c26754658e0a3c21dbf59163bd45ba6f447fc"
integrity sha512-jItXPLmrSR8jmTRmRWJXCnGJsfy85mB3Wd/uINMXA65yrnFo0cPClFIUWzo2najVNSl+mx7/4W8ttlLWJe99pQ==
dependencies:
inherits "^2.0.3"
string_decoder "^1.1.1"
util-deprecate "^1.0.1"
responselike@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.0.tgz#26391bcc3174f750f9a79eacc40a12a5c42d7723"
integrity sha512-xH48u3FTB9VsZw7R+vvgaKeLKzT6jOogbQhEe/jewwnZgzPcnyWui2Av6JpoYZF/91uueC+lqhWqeURw5/qhCw==
dependencies:
lowercase-keys "^2.0.0"
safe-buffer@~5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.0.tgz#b74daec49b1148f88c64b68d49b1e815c1f2f519"
integrity sha512-fZEwUGbVl7kouZs1jCdMLdt95hdIv0ZeHg6L7qPeciMZhZ+/gdesW4wgTARkrFWEpspjEATAzUGPG8N2jJiwbg==
source-map-support@^0.5.17:
version "0.5.19"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61"
integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw==
dependencies:
buffer-from "^1.0.0"
source-map "^0.6.0"
source-map@^0.6.0:
version "0.6.1"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263"
integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==
string_decoder@^1.1.1:
version "1.3.0"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
dependencies:
safe-buffer "~5.2.0"
tar-fs@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.0.0.tgz#677700fc0c8b337a78bee3623fdc235f21d7afad"
integrity sha512-vaY0obB6Om/fso8a8vakQBzwholQ7v5+uy+tF3Ozvxv1KNezmVQAiWtcNmMHFSFPqL3dJA8ha6gdtFbfX9mcxA==
dependencies:
chownr "^1.1.1"
mkdirp "^0.5.1"
pump "^3.0.0"
tar-stream "^2.0.0"
tar-stream@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.1.0.tgz#d1aaa3661f05b38b5acc9b7020efdca5179a2cc3"
integrity sha512-+DAn4Nb4+gz6WZigRzKEZl1QuJVOLtAwwF+WUxy1fJ6X63CaGaUAxJRD2KEn1OMfcbCjySTYpNC6WmfQoIEOdw==
dependencies:
bl "^3.0.0"
end-of-stream "^1.4.1"
fs-constants "^1.0.0"
inherits "^2.0.3"
readable-stream "^3.1.1"
to-readable-stream@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/to-readable-stream/-/to-readable-stream-2.1.0.tgz#82880316121bea662cdc226adb30addb50cb06e8"
integrity sha512-o3Qa6DGg1CEXshSdvWNX2sN4QHqg03SPq7U6jPXRahlQdl5dK8oXjkU/2/sGrnOZKeGV1zLSO8qPwyKklPPE7w==
"traverse@>=0.3.0 <0.4":
version "0.3.9"
resolved "https://registry.yarnpkg.com/traverse/-/traverse-0.3.9.tgz#717b8f220cc0bb7b44e40514c22b2e8bbc70d8b9"
integrity sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=
ts-node@8.9.1:
version "8.9.1"
resolved "https://registry.yarnpkg.com/ts-node/-/ts-node-8.9.1.tgz#2f857f46c47e91dcd28a14e052482eb14cfd65a5"
integrity sha512-yrq6ODsxEFTLz0R3BX2myf0WBCSQh9A+py8PBo1dCzWIOcvisbyH6akNKqDHMgXePF2kir5mm5JXJTH3OUJYOQ==
dependencies:
arg "^4.1.0"
diff "^4.0.1"
make-error "^1.1.1"
source-map-support "^0.5.17"
yn "3.1.1"
tslib@^1.9.3:
version "1.10.0"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.10.0.tgz#c3c19f95973fb0a62973fb09d90d961ee43e5c8a"
integrity sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==
type-fest@^0.8.0:
version "0.8.1"
resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d"
integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==
typescript@3.9.3:
version "3.9.3"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.3.tgz#d3ac8883a97c26139e42df5e93eeece33d610b8a"
integrity sha512-D/wqnB2xzNFIcoBG9FG8cXRDjiqSTbG2wd8DMZeQyJlP1vfTkIxH4GKveWaEBYySKIg+USu+E+EDIR47SqnaMQ==
typescript@3.9.6:
version "3.9.6"
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.9.6.tgz#8f3e0198a34c3ae17091b35571d3afd31999365a"
integrity sha512-Pspx3oKAPJtjNwE92YS05HQoY7z2SFyOpHo9MqJor3BXAGNaPUs83CuVp9VISFkSjyRfiTpmKuAYGJB7S7hOxw==
unzip-stream@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/unzip-stream/-/unzip-stream-0.3.0.tgz#c30c054cd6b0d64b13a23cd3ece911eb0b2b52d8"
integrity sha512-NG1h/MdGIX3HzyqMjyj1laBCmlPYhcO4xEy7gEqqzGiSLw7XqDQCnY4nYSn5XSaH8mQ6TFkaujrO8d/PIZN85A==
dependencies:
binary "^0.3.0"
mkdirp "^0.5.1"
util-deprecate@^1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
wrappy@1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
yn@3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"
integrity sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==

View File

@@ -8,6 +8,8 @@ First, run the development server:
npm run dev
# or
yarn dev
# or
pnpm dev
```
Open [http://localhost:3000](http://localhost:3000) with your browser to see the result.

View File

@@ -0,0 +1,8 @@
{
"compilerOptions": {
"baseUrl": ".",
"paths": {
"@/*": ["./*"]
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -9,10 +9,10 @@
"lint": "next lint"
},
"dependencies": {
"@next/font": "13.1.1",
"eslint": "8.30.0",
"eslint-config-next": "13.1.1",
"next": "13.1.1",
"@next/font": "13.1.2",
"eslint": "8.31.0",
"eslint-config-next": "13.1.2",
"next": "13.1.2",
"react": "18.2.0",
"react-dom": "18.2.0"
}

View File

@@ -1,4 +1,4 @@
import '../styles/globals.css'
import '@/styles/globals.css'
export default function App({ Component, pageProps }) {
return <Component {...pageProps} />

View File

@@ -1,7 +1,7 @@
import Head from 'next/head'
import Image from 'next/image'
import { Inter } from '@next/font/google'
import styles from '../styles/Home.module.css'
import styles from '@/styles/Home.module.css'
const inter = Inter({ subsets: ['latin'] })

View File

@@ -10,7 +10,7 @@
"@remix-run/react": "^1.7.6",
"@remix-run/vercel": "^1.7.6",
"@vercel/analytics": "^0.1.5",
"@vercel/node": "^2.6.3",
"@vercel/node": "^2.7.0",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
@@ -20,7 +20,7 @@
"@remix-run/serve": "^1.7.6",
"@types/react": "^18.0.25",
"@types/react-dom": "^18.0.9",
"eslint": "^8.27.0",
"eslint": "^8.28.0",
"typescript": "^4.9.3"
},
"engines": {

File diff suppressed because it is too large Load Diff

View File

@@ -12,7 +12,7 @@
},
"devDependencies": {
"@sveltejs/adapter-auto": "next",
"@sveltejs/kit": "next",
"@sveltejs/kit": "1.0.0-next.589",
"@types/cookie": "^0.5.1",
"prettier": "^2.6.2",
"prettier-plugin-svelte": "^2.7.0",

View File

@@ -1,5 +1,5 @@
{
"npmClient": "yarn",
"npmClient": "pnpm",
"useWorkspaces": true,
"packages": ["packages/*"],
"command": {

View File

@@ -3,30 +3,26 @@
"version": "0.0.0",
"private": true,
"license": "Apache-2.0",
"packageManager": "yarn@1.22.19",
"workspaces": {
"packages": [
"packages/*"
],
"nohoist": [
"**/@types/**",
"**/typedoc",
"**/typedoc-plugin-markdown",
"**/typedoc-plugin-mdn-links"
]
},
"packageManager": "pnpm@7.24.2",
"dependencies": {
"lerna": "3.16.4"
"lerna": "5.6.2"
},
"devDependencies": {
"@types/node": "14.18.33",
"@typescript-eslint/eslint-plugin": "5.21.0",
"@typescript-eslint/parser": "5.21.0",
"@vercel/build-utils": "workspace:*",
"@vercel/ncc": "0.24.0",
"@vercel/next": "workspace:*",
"async-retry": "1.2.3",
"buffer-replace": "1.0.0",
"create-svelte": "2.0.1",
"dot": "1.1.3",
"eslint": "8.14.0",
"eslint-config-prettier": "8.5.0",
"eslint-plugin-jest": "26.1.5",
"execa": "3.2.0",
"fs-extra": "11.1.0",
"husky": "7.0.4",
"jest": "28.0.2",
"json5": "2.1.1",
@@ -34,22 +30,24 @@
"node-fetch": "2.6.7",
"npm-package-arg": "6.1.0",
"prettier": "2.6.2",
"source-map-support": "0.5.12",
"ts-eager": "2.0.2",
"ts-jest": "28.0.5",
"turbo": "1.6.3"
"turbo": "1.7.0-canary.9"
},
"scripts": {
"lerna": "lerna",
"version": "pnpm install && git add pnpm-lock.yaml",
"bootstrap": "lerna bootstrap",
"publish-stable": "echo 'Run `yarn changelog` for instructions'",
"publish-stable": "echo 'Run `pnpm changelog` for instructions'",
"publish-canary": "git checkout main && git pull && lerna version prerelease --preid canary --message \"Publish Canary\" --exact",
"publish-from-github": "./utils/publish.sh",
"changelog": "node utils/changelog.js",
"build": "node utils/gen.js && turbo run build",
"vercel-build": "yarn build && yarn run pack && cd api && node -r ts-eager/register ./_lib/script/build.ts",
"vercel-build": "pnpm build && pnpm run pack && cd api && node -r ts-eager/register ./_lib/script/build.ts",
"pre-commit": "lint-staged",
"test": "jest --rootDir=\"test\" --testPathPattern=\"\\.test.js\"",
"test-unit": "yarn test && node utils/gen.js && turbo run test-unit",
"test-unit": "pnpm test && node utils/gen.js && turbo run test-unit",
"test-integration-cli": "node utils/gen.js && turbo run test-integration-cli",
"test-integration-once": "node utils/gen.js && turbo run test-integration-once",
"test-integration-dev": "node utils/gen.js && turbo run test-integration-dev",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/build-utils",
"version": "5.7.4",
"version": "5.8.2",
"license": "MIT",
"main": "./dist/index.js",
"types": "./dist/index.d.js",
@@ -13,8 +13,8 @@
"scripts": {
"build": "node build",
"test": "jest --env node --verbose --runInBand --bail",
"test-unit": "yarn test test/unit.*test.*",
"test-integration-once": "yarn test test/integration.test.ts"
"test-unit": "pnpm test test/unit.*test.*",
"test-integration-once": "pnpm test test/integration.test.ts"
},
"devDependencies": {
"@iarna/toml": "2.2.3",
@@ -25,8 +25,10 @@
"@types/glob": "7.2.0",
"@types/jest": "27.4.1",
"@types/js-yaml": "3.12.1",
"@types/minimatch": "^5.1.2",
"@types/ms": "0.7.31",
"@types/multistream": "2.1.1",
"@types/node": "14.18.33",
"@types/node-fetch": "^2.1.6",
"@types/semver": "6.0.0",
"@types/yazl": "2.4.2",
@@ -36,8 +38,10 @@
"async-sema": "2.1.4",
"cross-spawn": "6.0.5",
"end-of-stream": "1.4.1",
"execa": "3.2.0",
"fs-extra": "10.0.0",
"glob": "8.0.3",
"ignore": "4.0.6",
"into-stream": "5.0.0",
"js-yaml": "3.13.1",
"minimatch": "3.0.4",

View File

@@ -2,15 +2,20 @@ import path from 'path';
import debug from '../debug';
import FileFsRef from '../file-fs-ref';
import { File, Files, Meta } from '../types';
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
import { remove, mkdirp, readlink, symlink, chmod } from 'fs-extra';
import streamToBuffer from './stream-to-buffer';
export interface DownloadedFiles {
[filePath: string]: FileFsRef;
}
const S_IFMT = 61440; /* 0170000 type of file */
const S_IFDIR = 16384; /* 0040000 directory */
const S_IFLNK = 40960; /* 0120000 symbolic link */
const S_IFMT = 61440; /* 0170000 type of file */
export function isDirectory(mode: number): boolean {
return (mode & S_IFMT) === S_IFDIR;
}
export function isSymbolicLink(mode: number): boolean {
return (mode & S_IFMT) === S_IFLNK;
@@ -46,6 +51,12 @@ export async function downloadFile(
): Promise<FileFsRef> {
const { mode } = file;
if (isDirectory(mode)) {
await mkdirp(fsPath);
await chmod(fsPath, mode);
return FileFsRef.fromFsPath({ mode, fsPath });
}
// If the source is a symlink, try to create it instead of copying the file.
// Note: creating symlinks on Windows requires admin priviliges or symlinks
// enabled in the group policy. We may want to improve the error message.

View File

@@ -15,12 +15,7 @@ export default async function glob(
opts: GlobOptions | string,
mountpoint?: string
): Promise<Record<string, FileFsRef>> {
let options: GlobOptions;
if (typeof opts === 'string') {
options = { cwd: opts };
} else {
options = opts;
}
const options = typeof opts === 'string' ? { cwd: opts } : opts;
if (!options.cwd) {
throw new Error(
@@ -34,13 +29,18 @@ export default async function glob(
const results: Record<string, FileFsRef> = {};
const statCache: Record<string, Stats> = {};
const symlinks: Record<string, boolean | undefined> = {};
options.symlinks = {};
options.statCache = statCache;
options.stat = true;
options.dot = true;
const files = await vanillaGlob(pattern, {
...options,
symlinks,
statCache,
stat: true,
dot: true,
});
const files = await vanillaGlob(pattern, options);
const dirs = new Set<string>();
const dirsWithEntries = new Set<string>();
for (const relativePath of files) {
const fsPath = normalizePath(path.join(options.cwd, relativePath));
@@ -49,12 +49,20 @@ export default async function glob(
stat,
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
);
const isSymlink = options.symlinks![fsPath];
if (isSymlink || stat.isFile()) {
const isSymlink = symlinks[fsPath];
if (isSymlink || stat.isFile() || stat.isDirectory()) {
if (isSymlink) {
stat = await lstat(fsPath);
}
// Some bookkeeping to track which directories already have entries within
const dirname = path.dirname(relativePath);
dirsWithEntries.add(dirname);
if (stat.isDirectory()) {
dirs.add(relativePath);
continue;
}
let finalPath = relativePath;
if (mountpoint) {
finalPath = path.join(mountpoint, finalPath);
@@ -64,5 +72,20 @@ export default async function glob(
}
}
// Add empty directory entries
for (const relativePath of dirs) {
if (dirsWithEntries.has(relativePath)) continue;
let finalPath = relativePath;
if (mountpoint) {
finalPath = path.join(mountpoint, finalPath);
}
const fsPath = normalizePath(path.join(options.cwd, relativePath));
const stat = statCache[fsPath];
results[finalPath] = new FileFsRef({ mode: stat.mode, fsPath });
}
return results;
}

View File

@@ -137,6 +137,9 @@ export function execAsync(
child.on('close', (code, signal) => {
if (code === 0 || opts.ignoreNon0Exit) {
return resolve({
// ignoring the next line due to do some Node.js type issue when we removed hoisting of dependencies in https://github.com/vercel/vercel/pull/9198
// should eventually be fixed when this method is remove by https://github.com/vercel/vercel/pull/9200 or we update to Node 16
// @ts-ignore
code,
stdout: Buffer.concat(stdoutList).toString(),
stderr: Buffer.concat(stderrList).toString(),

View File

@@ -8,6 +8,7 @@ import download, {
downloadFile,
DownloadedFiles,
isSymbolicLink,
isDirectory,
} from './fs/download';
import getWriteableDirectory from './fs/get-writable-directory';
import glob, { GlobOptions } from './fs/glob';
@@ -82,6 +83,7 @@ export {
streamToBuffer,
debug,
isSymbolicLink,
isDirectory,
getLambdaOptionsFromFunction,
scanParentDirs,
getIgnoreFilter,

View File

@@ -3,7 +3,7 @@ import Sema from 'async-sema';
import { ZipFile } from 'yazl';
import minimatch from 'minimatch';
import { readlink } from 'fs-extra';
import { isSymbolicLink } from './fs/download';
import { isSymbolicLink, isDirectory } from './fs/download';
import streamToBuffer from './fs/stream-to-buffer';
import type { Files, Config } from './types';
@@ -200,6 +200,8 @@ export async function createZip(files: Files): Promise<Buffer> {
const symlinkTarget = symlinkTargets.get(name);
if (typeof symlinkTarget === 'string') {
zipFile.addBuffer(Buffer.from(symlinkTarget, 'utf8'), name, opts);
} else if (file.mode && isDirectory(file.mode)) {
zipFile.addEmptyDirectory(name, opts);
} else {
const stream = file.toStream();
stream.on('error', reject);

View File

@@ -7,6 +7,9 @@
"dependencies": {
"react": "16.8.0",
"swr": "1.3.0"
},
"engines": {
"node": "16.x"
}
},
"node_modules/js-tokens": {

View File

@@ -0,0 +1,195 @@
import path from 'path';
import fs, { readlink } from 'fs-extra';
import { strict as assert, strictEqual } from 'assert';
import { download, glob, FileBlob } from '../src';
describe('download()', () => {
let warningMessages: string[];
const originalConsoleWarn = console.warn;
beforeEach(() => {
warningMessages = [];
console.warn = m => {
warningMessages.push(m);
};
});
afterEach(() => {
console.warn = originalConsoleWarn;
});
it('should re-create FileFsRef symlinks properly', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = await glob('**', path.join(__dirname, 'symlinks'));
assert.equal(Object.keys(files).length, 4);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
const files2 = await download(files, outDir);
assert.equal(Object.keys(files2).length, 4);
const [linkStat, linkDirStat, aStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
]);
assert(linkStat.isSymbolicLink());
assert(linkDirStat.isSymbolicLink());
assert(aStat.isFile());
const [linkDirContents, linkTextContents] = await Promise.all([
readlink(path.join(outDir, 'link-dir')),
readlink(path.join(outDir, 'link.txt')),
]);
strictEqual(linkDirContents, 'dir');
strictEqual(linkTextContents, './a.txt');
});
it('should re-create FileBlob symlinks properly', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = {
'a.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'a text',
}),
'dir/b.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'b text',
}),
'link-dir': new FileBlob({
mode: 41453,
contentType: undefined,
data: 'dir',
}),
'link.txt': new FileBlob({
mode: 41453,
contentType: undefined,
data: 'a.txt',
}),
};
strictEqual(Object.keys(files).length, 4);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
const files2 = await download(files, outDir);
strictEqual(Object.keys(files2).length, 4);
const [linkStat, linkDirStat, aStat, dirStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
fs.lstat(path.join(outDir, 'dir')),
]);
assert(linkStat.isSymbolicLink());
assert(linkDirStat.isSymbolicLink());
assert(aStat.isFile());
assert(dirStat.isDirectory());
const [linkDirContents, linkTextContents] = await Promise.all([
readlink(path.join(outDir, 'link-dir')),
readlink(path.join(outDir, 'link.txt')),
]);
strictEqual(linkDirContents, 'dir');
strictEqual(linkTextContents, 'a.txt');
});
it('should download symlinks even with incorrect file', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = {
'dir/file.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'file text',
}),
linkdir: new FileBlob({
mode: 41453,
contentType: undefined,
data: 'dir',
}),
'linkdir/file.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'this file should be discarded',
}),
};
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
await fs.mkdirp(outDir);
await download(files, outDir);
const [dir, file, linkdir] = await Promise.all([
fs.lstat(path.join(outDir, 'dir')),
fs.lstat(path.join(outDir, 'dir/file.txt')),
fs.lstat(path.join(outDir, 'linkdir')),
]);
expect(dir.isFile()).toBe(false);
expect(dir.isSymbolicLink()).toBe(false);
expect(file.isFile()).toBe(true);
expect(file.isSymbolicLink()).toBe(false);
expect(linkdir.isSymbolicLink()).toBe(true);
expect(warningMessages).toEqual([
'Warning: file "linkdir/file.txt" is within a symlinked directory "linkdir" and will be ignored',
]);
});
it('should create empty directory entries', async () => {
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
const files = {
'empty-dir': new FileBlob({
mode: 16877, // drwxr-xr-x
contentType: undefined,
data: '',
}),
dir: new FileBlob({
mode: 16877,
contentType: undefined,
data: '',
}),
'dir/subdir': new FileBlob({
mode: 16877,
contentType: undefined,
data: '',
}),
'another/subdir': new FileBlob({
mode: 16895, // drwxrwxrwx
contentType: undefined,
data: '',
}),
};
await download(files, outDir);
for (const [p, f] of Object.entries(files)) {
const stat = await fs.lstat(path.join(outDir, p));
expect(stat.isDirectory()).toEqual(true);
if (process.platform !== 'win32') {
// Don't test Windows since it doesn't support the same permissions
expect(stat.mode).toEqual(f.mode);
}
}
});
});

View File

@@ -0,0 +1,38 @@
import fs from 'fs-extra';
import { join } from 'path';
import { tmpdir } from 'os';
import { glob, isDirectory } from '../src';
describe('glob()', () => {
it('should return entries for empty directories', async () => {
const dir = await fs.mkdtemp(join(tmpdir(), 'build-utils-test'));
try {
await Promise.all([
fs.writeFile(join(dir, 'root.txt'), 'file at the root'),
fs.mkdirp(join(dir, 'empty-dir')),
fs
.mkdirp(join(dir, 'dir-with-file'))
.then(() =>
fs.writeFile(join(dir, 'dir-with-file/data.json'), '{"a":"b"}')
),
fs.mkdirp(join(dir, 'another/subdir')),
]);
const files = await glob('**', dir);
const fileNames = Object.keys(files).sort();
expect(fileNames).toHaveLength(4);
expect(fileNames).toEqual([
'another/subdir',
'dir-with-file/data.json',
'empty-dir',
'root.txt',
]);
expect(isDirectory(files['another/subdir'].mode)).toEqual(true);
expect(isDirectory(files['empty-dir'].mode)).toEqual(true);
expect(isDirectory(files['dir-with-file/data.json'].mode)).toEqual(false);
expect(isDirectory(files['root.txt'].mode)).toEqual(false);
expect(files['dir-with-file']).toBeUndefined();
} finally {
await fs.remove(dir);
}
});
});

View File

@@ -0,0 +1,97 @@
import path from 'path';
import { tmpdir } from 'os';
import fs from 'fs-extra';
import { createZip } from '../src/lambda';
import { FileBlob, glob, spawnAsync } from '../src';
const MODE_DIRECTORY = 16877; /* drwxr-xr-x */
const MODE_FILE = 33188; /* -rw-r--r-- */
describe('Lambda', () => {
it('should create zip file with symlinks', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = await glob('**', path.join(__dirname, 'symlinks'));
expect(Object.keys(files)).toHaveLength(4);
const outFile = path.join(__dirname, 'symlinks.zip');
await fs.remove(outFile);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
await fs.mkdirp(outDir);
await fs.writeFile(outFile, await createZip(files));
await spawnAsync('unzip', [outFile], { cwd: outDir });
const [linkStat, linkDirStat, aStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
]);
expect(linkStat.isSymbolicLink()).toEqual(true);
expect(linkDirStat.isSymbolicLink()).toEqual(true);
expect(aStat.isFile()).toEqual(true);
});
it('should create zip file with empty directory', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const dir = await fs.mkdtemp(path.join(tmpdir(), 'create-zip-empty-dir'));
try {
const files = {
a: new FileBlob({
data: 'contents',
mode: MODE_FILE,
}),
empty: new FileBlob({
data: '',
mode: MODE_DIRECTORY,
}),
'b/a': new FileBlob({
data: 'inside dir b',
mode: MODE_FILE,
}),
c: new FileBlob({
data: '',
mode: MODE_DIRECTORY,
}),
'c/a': new FileBlob({
data: 'inside dir c',
mode: MODE_FILE,
}),
};
const outFile = path.join(dir, 'lambda.zip');
const outDir = path.join(dir, 'out');
await fs.mkdirp(outDir);
await fs.writeFile(outFile, await createZip(files));
await spawnAsync('unzip', [outFile], { cwd: outDir });
expect(fs.statSync(path.join(outDir, 'empty')).isDirectory()).toEqual(
true
);
expect(fs.statSync(path.join(outDir, 'b')).isDirectory()).toEqual(true);
expect(fs.statSync(path.join(outDir, 'c')).isDirectory()).toEqual(true);
expect(fs.readFileSync(path.join(outDir, 'a'), 'utf8')).toEqual(
'contents'
);
expect(fs.readFileSync(path.join(outDir, 'b/a'), 'utf8')).toEqual(
'inside dir b'
);
expect(fs.readFileSync(path.join(outDir, 'c/a'), 'utf8')).toEqual(
'inside dir c'
);
expect(fs.readdirSync(path.join(outDir, 'empty'))).toHaveLength(0);
} finally {
await fs.remove(dir);
}
});
});

View File

@@ -1,20 +1,15 @@
import ms from 'ms';
import path from 'path';
import fs, { readlink } from 'fs-extra';
import { strict as assert, strictEqual } from 'assert';
import { createZip } from '../src/lambda';
import fs from 'fs-extra';
import { strict as assert } from 'assert';
import { getSupportedNodeVersion } from '../src/fs/node-version';
import download from '../src/fs/download';
import {
glob,
spawnAsync,
getNodeVersion,
getLatestNodeVersion,
getDiscontinuedNodeVersions,
runNpmInstall,
runPackageJsonScript,
scanParentDirs,
FileBlob,
Prerender,
} from '../src';
@@ -51,171 +46,6 @@ afterEach(() => {
console.warn = originalConsoleWarn;
});
it('should re-create FileFsRef symlinks properly', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = await glob('**', path.join(__dirname, 'symlinks'));
assert.equal(Object.keys(files).length, 4);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
const files2 = await download(files, outDir);
assert.equal(Object.keys(files2).length, 4);
const [linkStat, linkDirStat, aStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
]);
assert(linkStat.isSymbolicLink());
assert(linkDirStat.isSymbolicLink());
assert(aStat.isFile());
const [linkDirContents, linkTextContents] = await Promise.all([
readlink(path.join(outDir, 'link-dir')),
readlink(path.join(outDir, 'link.txt')),
]);
strictEqual(linkDirContents, 'dir');
strictEqual(linkTextContents, './a.txt');
});
it('should re-create FileBlob symlinks properly', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = {
'a.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'a text',
}),
'dir/b.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'b text',
}),
'link-dir': new FileBlob({
mode: 41453,
contentType: undefined,
data: 'dir',
}),
'link.txt': new FileBlob({
mode: 41453,
contentType: undefined,
data: 'a.txt',
}),
};
strictEqual(Object.keys(files).length, 4);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
const files2 = await download(files, outDir);
strictEqual(Object.keys(files2).length, 4);
const [linkStat, linkDirStat, aStat, dirStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
fs.lstat(path.join(outDir, 'dir')),
]);
assert(linkStat.isSymbolicLink());
assert(linkDirStat.isSymbolicLink());
assert(aStat.isFile());
assert(dirStat.isDirectory());
const [linkDirContents, linkTextContents] = await Promise.all([
readlink(path.join(outDir, 'link-dir')),
readlink(path.join(outDir, 'link.txt')),
]);
strictEqual(linkDirContents, 'dir');
strictEqual(linkTextContents, 'a.txt');
});
it('should create zip files with symlinks properly', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = await glob('**', path.join(__dirname, 'symlinks'));
assert.equal(Object.keys(files).length, 4);
const outFile = path.join(__dirname, 'symlinks.zip');
await fs.remove(outFile);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
await fs.mkdirp(outDir);
await fs.writeFile(outFile, await createZip(files));
await spawnAsync('unzip', [outFile], { cwd: outDir });
const [linkStat, linkDirStat, aStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
]);
assert(linkStat.isSymbolicLink());
assert(linkDirStat.isSymbolicLink());
assert(aStat.isFile());
});
it('should download symlinks even with incorrect file', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = {
'dir/file.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'file text',
}),
linkdir: new FileBlob({
mode: 41453,
contentType: undefined,
data: 'dir',
}),
'linkdir/file.txt': new FileBlob({
mode: 33188,
contentType: undefined,
data: 'this file should be discarded',
}),
};
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
await fs.mkdirp(outDir);
await download(files, outDir);
const [dir, file, linkdir] = await Promise.all([
fs.lstat(path.join(outDir, 'dir')),
fs.lstat(path.join(outDir, 'dir/file.txt')),
fs.lstat(path.join(outDir, 'linkdir')),
]);
expect(dir.isFile()).toBe(false);
expect(dir.isSymbolicLink()).toBe(false);
expect(file.isFile()).toBe(true);
expect(file.isSymbolicLink()).toBe(false);
expect(linkdir.isSymbolicLink()).toBe(true);
expect(warningMessages).toEqual([
'Warning: file "linkdir/file.txt" is within a symlinked directory "linkdir" and will be ignored',
]);
});
it('should only match supported node versions, otherwise throw an error', async () => {
expect(await getSupportedNodeVersion('14.x', false)).toHaveProperty(
'major',
@@ -474,21 +304,21 @@ it('should support initialHeaders and initialStatus correctly', async () => {
});
it('should support require by path for legacy builders', () => {
const index = require('@vercel/build-utils');
const index = require('../');
const download2 = require('@vercel/build-utils/fs/download.js');
const getWriteableDirectory2 = require('@vercel/build-utils/fs/get-writable-directory.js');
const glob2 = require('@vercel/build-utils/fs/glob.js');
const rename2 = require('@vercel/build-utils/fs/rename.js');
const download2 = require('../fs/download.js');
const getWriteableDirectory2 = require('../fs/get-writable-directory.js');
const glob2 = require('../fs/glob.js');
const rename2 = require('../fs/rename.js');
const {
runNpmInstall: runNpmInstall2,
} = require('@vercel/build-utils/fs/run-user-scripts.js');
const streamToBuffer2 = require('@vercel/build-utils/fs/stream-to-buffer.js');
} = require('../fs/run-user-scripts.js');
const streamToBuffer2 = require('../fs/stream-to-buffer.js');
const FileBlob2 = require('@vercel/build-utils/file-blob.js');
const FileFsRef2 = require('@vercel/build-utils/file-fs-ref.js');
const FileRef2 = require('@vercel/build-utils/file-ref.js');
const { Lambda: Lambda2 } = require('@vercel/build-utils/lambda.js');
const FileBlob2 = require('../file-blob.js');
const FileFsRef2 = require('../file-fs-ref.js');
const FileRef2 = require('../file-ref.js');
const { Lambda: Lambda2 } = require('../lambda.js');
expect(download2).toBe(index.download);
expect(getWriteableDirectory2).toBe(index.getWriteableDirectory);
@@ -590,9 +420,9 @@ it('should detect package.json in nested backend', async () => {
'../../node/test/fixtures/18.1-nested-packagejson/backend'
);
const result = await scanParentDirs(fixture);
expect(result.cliType).toEqual('yarn');
expect(result.lockfileVersion).toEqual(undefined);
// There is no lockfile but this test will pick up vercel/vercel/yarn.lock
expect(result.cliType).toEqual('pnpm');
// There is no lockfile but this test will pick up vercel/vercel/pnpm-lock.yaml
expect(result.lockfileVersion).toEqual(5.4);
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
});
@@ -602,9 +432,9 @@ it('should detect package.json in nested frontend', async () => {
'../../node/test/fixtures/18.1-nested-packagejson/frontend'
);
const result = await scanParentDirs(fixture);
expect(result.cliType).toEqual('yarn');
expect(result.lockfileVersion).toEqual(undefined);
// There is no lockfile but this test will pick up vercel/vercel/yarn.lock
expect(result.cliType).toEqual('pnpm');
// There is no lockfile but this test will pick up vercel/vercel/pnpm-lock.yaml
expect(result.lockfileVersion).toEqual(5.4);
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
});

View File

@@ -2,7 +2,7 @@ import { walkParentDirs } from '../src';
import { strict } from 'assert';
import { join } from 'path';
import { promises } from 'fs';
const { deepEqual, notDeepEqual, fail } = strict;
const { notDeepEqual, fail } = strict;
const { readFile } = promises;
const fixture = (name: string) => join(__dirname, 'walk', name);
const filename = 'file.txt';
@@ -10,7 +10,7 @@ const filename = 'file.txt';
async function assertContent(target: string | null, contents: string) {
notDeepEqual(target, null);
const actual = await readFile(target!, 'utf8');
deepEqual(actual.trim(), contents.trim());
strict.deepEqual(actual.trim(), contents.trim());
}
describe('Test `walkParentDirs`', () => {
@@ -21,7 +21,7 @@ describe('Test `walkParentDirs`', () => {
await walkParentDirs({ base, start, filename });
fail('Expected error');
} catch (error) {
deepEqual(
strict.deepEqual(
(error as Error).message,
'Expected "base" to be absolute path'
);
@@ -35,7 +35,7 @@ describe('Test `walkParentDirs`', () => {
await walkParentDirs({ base, start, filename });
fail('Expected error');
} catch (error) {
deepEqual(
strict.deepEqual(
(error as Error).message,
'Expected "start" to be absolute path'
);
@@ -67,21 +67,21 @@ describe('Test `walkParentDirs`', () => {
const base = fixture('not-found');
const start = base;
const target = await walkParentDirs({ base, start, filename });
deepEqual(target, null);
strict.deepEqual(target, null);
});
it('should not find nested two', async () => {
const base = fixture('not-found');
const start = join(base, 'two');
const target = await walkParentDirs({ base, start, filename });
deepEqual(target, null);
strict.deepEqual(target, null);
});
it('should not find nested three', async () => {
const base = fixture('not-found');
const start = join(base, 'two', 'three');
const target = await walkParentDirs({ base, start, filename });
deepEqual(target, null);
strict.deepEqual(target, null);
});
it('should find only one', async () => {

View File

@@ -41,8 +41,8 @@ To develop Vercel CLI, first check out the source code, install dependencies, an
```bash
git clone https://github.com/vercel/vercel.git
cd vercel
yarn
yarn build
pnpm install
pnpm build
```
At this point you can make modifications to the CLI source code and test them out locally. The CLI source code is located in the `packages/cli` directory.
@@ -51,15 +51,15 @@ At this point you can make modifications to the CLI source code and test them ou
cd packages/cli
```
### `yarn dev <cli-commands...>`
### `pnpm dev <cli-commands...>`
From within the `packages/cli` directory, you can use the "dev" script to quickly execute Vercel CLI from its TypeScript source code directly (without having to manually compile first). For example:
```bash
yarn dev deploy
yarn dev whoami
yarn dev login
yarn dev switch --debug
pnpm dev deploy
pnpm dev whoami
pnpm dev login
pnpm dev switch --debug
```
When you are satisfied with your changes, make a commit and create a pull request!

View File

@@ -1,6 +1,6 @@
{
"name": "vercel",
"version": "28.11.0",
"version": "28.12.3",
"preferGlobal": true,
"license": "Apache-2.0",
"description": "The command-line interface for Vercel",
@@ -13,9 +13,9 @@
"scripts": {
"preinstall": "node ./scripts/preinstall.js",
"test": "jest --env node --verbose --bail",
"test-unit": "yarn test test/unit/",
"test-unit": "pnpm test test/unit/",
"test-integration-cli": "rimraf test/fixtures/integration && ava test/integration.js --serial --fail-fast --verbose",
"test-integration-dev": "yarn test test/dev/",
"test-integration-dev": "pnpm test test/dev/",
"coverage": "codecov",
"build": "ts-node ./scripts/build.ts",
"dev": "ts-node ./src/index.ts"
@@ -41,17 +41,16 @@
"node": ">= 14"
},
"dependencies": {
"@vercel/build-utils": "5.7.4",
"@vercel/go": "2.2.23",
"@vercel/hydrogen": "0.0.37",
"@vercel/next": "3.3.8",
"@vercel/node": "2.8.5",
"@vercel/python": "3.1.33",
"@vercel/redwood": "1.0.44",
"@vercel/remix": "1.1.6",
"@vercel/ruby": "1.3.49",
"@vercel/static-build": "1.1.0",
"update-notifier": "5.1.0"
"@vercel/build-utils": "workspace:5.8.2",
"@vercel/go": "workspace:2.2.28",
"@vercel/hydrogen": "workspace:0.0.42",
"@vercel/next": "workspace:3.3.13",
"@vercel/node": "workspace:2.8.10",
"@vercel/python": "workspace:3.1.38",
"@vercel/redwood": "workspace:1.0.49",
"@vercel/remix": "workspace:1.2.3",
"@vercel/ruby": "workspace:1.3.54",
"@vercel/static-build": "workspace:1.1.5"
},
"devDependencies": {
"@alex_neo/jest-expect-message": "1.0.5",
@@ -84,6 +83,7 @@
"@types/npm-package-arg": "6.1.0",
"@types/pluralize": "0.0.29",
"@types/psl": "1.1.0",
"@types/qs": "6.9.7",
"@types/semver": "6.0.1",
"@types/tar-fs": "1.16.1",
"@types/text-table": "0.2.0",
@@ -93,12 +93,13 @@
"@types/which": "1.3.2",
"@types/write-json-file": "2.2.1",
"@types/yauzl-promise": "2.1.0",
"@vercel/client": "12.2.25",
"@vercel/error-utils": "1.0.3",
"@vercel/frameworks": "1.1.18",
"@vercel/fs-detectors": "3.6.1",
"@vercel/client": "workspace:12.2.30",
"@vercel/error-utils": "workspace:1.0.7",
"@vercel/frameworks": "workspace:1.2.3",
"@vercel/fs-detectors": "workspace:3.7.3",
"@vercel/fun": "1.0.4",
"@vercel/ncc": "0.24.0",
"@vercel/routing-utils": "workspace:2.1.7",
"@zeit/source-map-support": "0.6.2",
"ajv": "6.12.2",
"alpha-sort": "2.0.1",
@@ -138,6 +139,7 @@
"is-port-reachable": "3.1.0",
"is-url": "1.2.2",
"jaro-winkler": "0.2.8",
"jest-matcher-utils": "29.3.1",
"json5": "2.2.1",
"jsonlines": "0.1.1",
"line-async-iterator": "3.0.0",

View File

@@ -1,7 +1,7 @@
import cpy from 'cpy';
import execa from 'execa';
import { join } from 'path';
import { remove, writeFile } from 'fs-extra';
import { remove, readJSON, writeFile } from 'fs-extra';
const dirRoot = join(__dirname, '..');
const distRoot = join(dirRoot, 'dist');
@@ -43,16 +43,16 @@ async function main() {
stdio: 'inherit',
});
const pkg = await readJSON(join(dirRoot, 'package.json'));
const dependencies = Object.keys(pkg?.dependencies ?? {});
// Do the initial `ncc` build
console.log();
const args = [
'ncc',
'build',
'--external',
'update-notifier',
'src/index.ts',
];
await execa('yarn', args, { stdio: 'inherit', cwd: dirRoot });
console.log('Dependencies:', dependencies);
const externs: Array<string> = [];
for (const dep of dependencies) {
externs.push('--external', dep);
}
const args = ['ncc', 'build', 'src/index.ts', ...externs];
await execa('pnpm', args, { stdio: 'inherit', cwd: dirRoot });
// `ncc` has some issues with `@vercel/fun`'s runtime files:
// - Executable bits on the `bootstrap` files appear to be lost:
@@ -66,10 +66,7 @@ async function main() {
// get compiled into the final ncc bundle file, however, we want them to be
// present in the npm package because the contents of those files are involved
// with `fun`'s cache invalidation mechanism and they need to be shasum'd.
const runtimes = join(
dirRoot,
'../../node_modules/@vercel/fun/dist/src/runtimes'
);
const runtimes = join(dirRoot, 'node_modules/@vercel/fun/dist/src/runtimes');
await cpy('**/*', join(distRoot, 'runtimes'), {
parents: true,
cwd: runtimes,
@@ -78,6 +75,10 @@ async function main() {
// Band-aid to bundle stuff that `ncc` neglects to bundle
await cpy(join(dirRoot, 'src/util/projects/VERCEL_DIR_README.txt'), distRoot);
await cpy(join(dirRoot, 'src/util/dev/builder-worker.js'), distRoot);
await cpy(
join(dirRoot, 'src/util/get-latest-version/get-latest-worker.js'),
distRoot
);
console.log('Finished building Vercel CLI');
}

View File

@@ -6,7 +6,7 @@ import { Output } from '../../util/output';
import * as ERRORS from '../../util/errors-ts';
import assignAlias from '../../util/alias/assign-alias';
import Client from '../../util/client';
import getDeploymentByIdOrHost from '../../util/deploy/get-deployment-by-id-or-host';
import getDeployment from '../../util/get-deployment';
import { getDeploymentForAlias } from '../../util/alias/get-deployment-by-alias';
import getScope from '../../util/get-scope';
import setupDomain from '../../util/domains/setup-domain';
@@ -136,36 +136,13 @@ export default async function set(
const [deploymentIdOrHost, aliasTarget] = args;
const deployment = handleCertError(
output,
await getDeploymentByIdOrHost(client, contextName, deploymentIdOrHost)
await getDeployment(client, contextName, deploymentIdOrHost)
);
if (deployment === 1) {
return deployment;
}
if (deployment instanceof ERRORS.DeploymentNotFound) {
output.error(
`Failed to find deployment "${deployment.meta.id}" under ${chalk.bold(
contextName
)}`
);
return 1;
}
if (deployment instanceof ERRORS.DeploymentPermissionDenied) {
output.error(
`No permission to access deployment "${
deployment.meta.id
}" under ${chalk.bold(deployment.meta.context)}`
);
return 1;
}
if (deployment instanceof ERRORS.InvalidDeploymentId) {
output.error(deployment.message);
return 1;
}
if (deployment === null) {
output.error(
`Couldn't find a deployment to alias. Please provide one as an argument.`

View File

@@ -15,17 +15,11 @@ import Client from '../../util/client';
import { getPkgName } from '../../util/pkg-name';
import { Deployment, PaginationOptions } from '../../types';
import { normalizeURL } from '../../util/bisect/normalize-url';
interface DeploymentV6
extends Pick<
Deployment,
'url' | 'target' | 'projectId' | 'ownerId' | 'meta' | 'inspectorUrl'
> {
createdAt: number;
}
import getScope from '../../util/get-scope';
import getDeployment from '../../util/get-deployment';
interface Deployments {
deployments: DeploymentV6[];
deployments: Deployment[];
pagination: PaginationOptions;
}
@@ -63,6 +57,8 @@ const help = () => {
export default async function main(client: Client): Promise<number> {
const { output } = client;
const scope = await getScope(client);
const { contextName } = scope;
const argv = getArgs(client.argv.slice(2), {
'--bad': String,
@@ -145,7 +141,9 @@ export default async function main(client: Client): Promise<number> {
output.spinner('Retrieving deployments…');
// `getDeployment` cannot be parallelized because it might prompt for login
const badDeployment = await getDeployment(client, bad).catch(err => err);
const badDeployment = await getDeployment(client, contextName, bad).catch(
err => err
);
if (badDeployment) {
if (badDeployment instanceof Error) {
@@ -162,7 +160,9 @@ export default async function main(client: Client): Promise<number> {
}
// `getDeployment` cannot be parallelized because it might prompt for login
const goodDeployment = await getDeployment(client, good).catch(err => err);
const goodDeployment = await getDeployment(client, contextName, good).catch(
err => err
);
if (goodDeployment) {
if (goodDeployment instanceof Error) {
@@ -204,7 +204,7 @@ export default async function main(client: Client): Promise<number> {
}
// Fetch all the project's "READY" deployments with the pagination API
let deployments: DeploymentV6[] = [];
let deployments: Deployment[] = [];
let next: number | undefined = badDeployment.createdAt + 1;
do {
const query = new URLSearchParams();
@@ -279,7 +279,7 @@ export default async function main(client: Client): Promise<number> {
const commit = getCommit(deployment);
if (commit) {
const shortSha = commit.sha.substring(0, 7);
const firstLine = commit.message.split('\n')[0];
const firstLine = commit.message?.split('\n')[0];
output.log(`${chalk.bold('Commit:')} [${shortSha}] ${firstLine}`);
}
@@ -356,7 +356,7 @@ export default async function main(client: Client): Promise<number> {
const commit = getCommit(lastBad);
if (commit) {
const shortSha = commit.sha.substring(0, 7);
const firstLine = commit.message.split('\n')[0];
const firstLine = commit.message?.split('\n')[0];
result.push(` ${chalk.bold('Commit:')} [${shortSha}] ${firstLine}`);
}
@@ -368,18 +368,7 @@ export default async function main(client: Client): Promise<number> {
return 0;
}
function getDeployment(
client: Client,
hostname: string
): Promise<DeploymentV6> {
const query = new URLSearchParams();
query.set('url', hostname);
query.set('resolve', '1');
query.set('noState', '1');
return client.fetch<DeploymentV6>(`/v10/deployments/get?${query}`);
}
function getCommit(deployment: DeploymentV6) {
function getCommit(deployment: Deployment) {
const sha =
deployment.meta?.githubCommitSha ||
deployment.meta?.gitlabCommitSha ||

View File

@@ -17,7 +17,11 @@ import {
BuildResultV3,
NowBuildError,
} from '@vercel/build-utils';
import { detectBuilders } from '@vercel/fs-detectors';
import {
detectBuilders,
detectFrameworkRecord,
LocalFileSystemDetector,
} from '@vercel/fs-detectors';
import minimatch from 'minimatch';
import {
appendRoutesToPhase,
@@ -59,6 +63,9 @@ import { toEnumerableError } from '../util/error';
import { validateConfig } from '../util/validate-config';
import { setMonorepoDefaultSettings } from '../util/build/monorepo';
import frameworks from '@vercel/frameworks';
import { detectFrameworkVersion } from '@vercel/fs-detectors';
import semver from 'semver';
type BuildResult = BuildResultV2 | BuildResultV3;
@@ -69,6 +76,20 @@ interface SerializedBuilder extends Builder {
apiVersion: number;
}
/**
* Build Output API `config.json` file interface.
*/
interface BuildOutputConfig {
version?: 3;
wildcard?: BuildResultV2Typical['wildcard'];
images?: BuildResultV2Typical['images'];
routes?: BuildResultV2Typical['routes'];
overrides?: Record<string, PathOverride>;
framework?: {
version: string;
};
}
/**
* Contents of the `builds.json` file.
*/
@@ -434,7 +455,7 @@ async function doBuild(
// Execute Builders for detected entrypoints
// TODO: parallelize builds (except for frontend)
const sortedBuilders = sortBuilders(builds);
const buildResults: Map<Builder, BuildResult> = new Map();
const buildResults: Map<Builder, BuildResult | BuildOutputConfig> = new Map();
const overrides: PathOverride[] = [];
const repoRootPath = cwd;
const corepackShimDir = await initCorepack({ repoRootPath });
@@ -538,8 +559,7 @@ async function doBuild(
// Merge existing `config.json` file into the one that will be produced
const configPath = join(outputDir, 'config.json');
// TODO: properly type
const existingConfig = await readJSONFile<any>(configPath);
const existingConfig = await readJSONFile<BuildOutputConfig>(configPath);
if (existingConfig instanceof CantParseJSONFile) {
throw existingConfig;
}
@@ -585,15 +605,17 @@ async function doBuild(
const mergedOverrides: Record<string, PathOverride> =
overrides.length > 0 ? Object.assign({}, ...overrides) : undefined;
const framework = await getFramework(cwd, buildResults);
// Write out the final `config.json` file based on the
// user configuration and Builder build results
// TODO: properly type
const config = {
const config: BuildOutputConfig = {
version: 3,
routes: mergedRoutes,
images: mergedImages,
wildcard: mergedWildcard,
overrides: mergedOverrides,
framework,
};
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
@@ -608,6 +630,50 @@ async function doBuild(
);
}
async function getFramework(
cwd: string,
buildResults: Map<Builder, BuildResult | BuildOutputConfig>
): Promise<{ version: string } | undefined> {
const detectedFramework = await detectFrameworkRecord({
fs: new LocalFileSystemDetector(cwd),
frameworkList: frameworks,
});
if (!detectedFramework) {
return;
}
// determine framework version from build result
if (detectedFramework.useRuntime) {
for (const [build, buildResult] of buildResults.entries()) {
if (
'framework' in buildResult &&
build.use === detectedFramework.useRuntime.use
) {
return buildResult.framework;
}
}
}
// determine framework version from listed package.json version
if (detectedFramework.detectedVersion) {
// check for a valid, explicit version, not a range
if (semver.valid(detectedFramework.detectedVersion)) {
return {
version: detectedFramework.detectedVersion,
};
}
}
// determine framework version with runtime lookup
const frameworkVersion = detectFrameworkVersion(detectedFramework);
if (frameworkVersion) {
return {
version: frameworkVersion,
};
}
}
function expandBuild(files: string[], build: Builder): Builder[] {
if (!build.use) {
throw new NowBuildError({
@@ -648,7 +714,7 @@ function expandBuild(files: string[], build: Builder): Builder[] {
function mergeImages(
images: BuildResultV2Typical['images'],
buildResults: Iterable<BuildResult>
buildResults: Iterable<BuildResult | BuildOutputConfig>
): BuildResultV2Typical['images'] {
for (const result of buildResults) {
if ('images' in result && result.images) {
@@ -659,7 +725,7 @@ function mergeImages(
}
function mergeWildcard(
buildResults: Iterable<BuildResult>
buildResults: Iterable<BuildResult | BuildOutputConfig>
): BuildResultV2Typical['wildcard'] {
let wildcard: BuildResultV2Typical['wildcard'] = undefined;
for (const result of buildResults) {

View File

@@ -19,15 +19,13 @@ import toHumanPath from '../../util/humanize-path';
import Now from '../../util';
import stamp from '../../util/output/stamp';
import createDeploy from '../../util/deploy/create-deploy';
import getDeploymentByIdOrHost from '../../util/deploy/get-deployment-by-id-or-host';
import getDeployment from '../../util/get-deployment';
import parseMeta from '../../util/parse-meta';
import linkStyle from '../../util/output/link';
import param from '../../util/output/param';
import {
BuildsRateLimited,
DeploymentNotFound,
DeploymentPermissionDenied,
InvalidDeploymentId,
DomainNotFound,
DomainNotVerified,
DomainPermissionDenied,
@@ -629,21 +627,8 @@ export default async (client: Client): Promise<number> => {
return 1;
}
const deploymentResponse = await getDeploymentByIdOrHost(
client,
contextName,
deployment.id,
'v10'
);
if (
deploymentResponse instanceof DeploymentNotFound ||
deploymentResponse instanceof DeploymentPermissionDenied ||
deploymentResponse instanceof InvalidDeploymentId
) {
output.error(deploymentResponse.message);
return 1;
}
// get the deployment just to double check that it actually deployed
await getDeployment(client, contextName, deployment.id);
if (deployment === null) {
error('Uploading failed. Please try again.');

View File

@@ -34,7 +34,7 @@ const help = () => {
)} Connect your Vercel Project to your Git repository defined in your local .git config
${chalk.cyan(`$ ${getPkgName()} git connect`)}
${chalk.gray(
''
)} Connect your Vercel Project to a Git repository using the remote URL
@@ -96,6 +96,7 @@ export default async function main(client: Client) {
}
const { org, project } = linkedProject;
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
switch (subcommand) {
case 'connect':

View File

@@ -9,12 +9,10 @@ import { handleError } from '../util/error';
import getScope from '../util/get-scope';
import { getPkgName, getCommandName } from '../util/pkg-name';
import Client from '../util/client';
import { getDeployment } from '../util/get-deployment';
import { Deployment } from '@vercel/client';
import { Build } from '../types';
import getDeployment from '../util/get-deployment';
import { Build, Deployment } from '../types';
import title from 'title';
import { isErrnoException } from '@vercel/error-utils';
import { isAPIError } from '../util/errors-ts';
import { URL } from 'url';
const help = () => {
@@ -49,7 +47,6 @@ const help = () => {
};
export default async function main(client: Client) {
let deployment;
let argv;
try {
@@ -101,30 +98,11 @@ export default async function main(client: Client) {
);
// resolve the deployment, since we might have been given an alias
try {
deployment = await getDeployment(client, deploymentIdOrHost);
} catch (err: unknown) {
if (isAPIError(err)) {
if (err.status === 404) {
error(
`Failed to find deployment "${deploymentIdOrHost}" in ${chalk.bold(
contextName
)}`
);
return 1;
}
if (err.status === 403) {
error(
`No permission to access deployment "${deploymentIdOrHost}" in ${chalk.bold(
contextName
)}`
);
return 1;
}
}
// unexpected
throw err;
}
const deployment = await getDeployment(
client,
contextName,
deploymentIdOrHost
);
const {
id,
@@ -138,11 +116,11 @@ export default async function main(client: Client) {
const { builds } =
deployment.version === 2
? await client.fetch<{ builds: Build[] }>(`/v1/deployments/${id}/builds`)
? await client.fetch<{ builds: Build[] }>(`/v11/deployments/${id}/builds`)
: { builds: [] };
log(
`Fetched deployment ${chalk.bold(url)} in ${chalk.bold(
`Fetched deployment "${chalk.bold(url)}" in ${chalk.bold(
contextName
)} ${elapsed(Date.now() - depFetchStart)}`
);
@@ -163,7 +141,7 @@ export default async function main(client: Client) {
}
print('\n\n');
if (aliases.length > 0) {
if (aliases !== undefined && aliases.length > 0) {
print(chalk.bold(' Aliases\n\n'));
let aliasList = '';
for (const alias of aliases) {
@@ -202,8 +180,6 @@ function stateString(s: Deployment['readyState']) {
switch (s) {
case 'INITIALIZING':
case 'BUILDING':
case 'DEPLOYING':
case 'ANALYZING':
return chalk.yellow(CIRCLE) + sTitle;
case 'ERROR':
return chalk.red(CIRCLE) + sTitle;

View File

@@ -7,8 +7,7 @@ import getScope from '../util/get-scope';
import { getPkgName } from '../util/pkg-name';
import getArgs from '../util/get-args';
import Client from '../util/client';
import { getDeployment } from '../util/get-deployment';
import { isAPIError } from '../util/errors-ts';
import getDeployment from '../util/get-deployment';
const help = () => {
console.log(`
@@ -125,28 +124,9 @@ export default async function main(client: Client) {
let deployment;
try {
deployment = await getDeployment(client, id);
} catch (err: unknown) {
deployment = await getDeployment(client, contextName, id);
} finally {
output.stopSpinner();
if (isAPIError(err)) {
if (err.status === 404) {
output.error(
`Failed to find deployment "${id}" in ${chalk.bold(contextName)}`
);
return 1;
}
if (err.status === 403) {
output.error(
`No permission to access deployment "${id}" in ${chalk.bold(
contextName
)}`
);
return 1;
}
}
// unexpected
throw err;
}
output.log(

View File

@@ -11,7 +11,7 @@ import getScope from '../util/get-scope';
import { isValidName } from '../util/is-valid-name';
import removeProject from '../util/projects/remove-project';
import getProjectByIdOrName from '../util/projects/get-project-by-id-or-name';
import getDeploymentByIdOrHost from '../util/deploy/get-deployment-by-id-or-host';
import getDeployment from '../util/get-deployment';
import getDeploymentsByProjectId, {
DeploymentPartial,
} from '../util/deploy/get-deployments-by-project-id';
@@ -133,7 +133,7 @@ export default async function main(client: Client) {
id =>
d &&
!(d instanceof NowError) &&
(d.uid === id || d.name === id || d.url === normalizeURL(id))
(d.id === id || d.name === id || d.url === normalizeURL(id))
);
const [deploymentList, projectList] = await Promise.all<any>([
@@ -142,7 +142,7 @@ export default async function main(client: Client) {
if (!contextName) {
throw new Error('Context name is not defined');
}
return getDeploymentByIdOrHost(client, contextName, idOrHost);
return getDeployment(client, contextName, idOrHost).catch(err => err);
})
),
Promise.all(
@@ -180,7 +180,7 @@ export default async function main(client: Client) {
aliases = await Promise.all(
deployments.map(async depl => {
const { aliases } = await getAliases(client, depl.uid);
const { aliases } = await getAliases(client, depl.id);
return aliases;
})
);
@@ -238,7 +238,7 @@ export default async function main(client: Client) {
const start = Date.now();
await Promise.all<any>([
...deployments.map(depl => now.remove(depl.uid, { hard })),
...deployments.map(depl => now.remove(depl.id, { hard })),
...projects.map(project => removeProject(client, project.id)),
]);
@@ -275,9 +275,9 @@ function readConfirmation(
const deploymentTable = table(
deployments.map(depl => {
const time = chalk.gray(`${ms(Date.now() - depl.created)} ago`);
const time = chalk.gray(`${ms(Date.now() - depl.createdAt)} ago`);
const url = depl.url ? chalk.underline(`https://${depl.url}`) : '';
return [` ${depl.uid}`, url, time];
return [` ${depl.id}`, url, time];
}),
{ align: ['l', 'r', 'l'], hsep: ' '.repeat(6) }
);

View File

@@ -18,7 +18,7 @@ import sourceMap from '@zeit/source-map-support';
import { mkdirp } from 'fs-extra';
import chalk from 'chalk';
import epipebomb from 'epipebomb';
import updateNotifier from 'update-notifier';
import getLatestVersion from './util/get-latest-version';
import { URL } from 'url';
import * as Sentry from '@sentry/node';
import hp from './util/humanize-path';
@@ -55,13 +55,6 @@ import { VercelConfig } from '@vercel/client';
const isCanary = pkg.version.includes('canary');
// Checks for available update and returns an instance
const notifier = updateNotifier({
pkg,
distTag: isCanary ? 'canary' : 'latest',
updateCheckInterval: 1000 * 60 * 60 * 24 * 7, // 1 week
});
const VERCEL_DIR = getGlobalPathConfig();
const VERCEL_CONFIG_PATH = configFiles.getConfigFilePath();
const VERCEL_AUTH_CONFIG_PATH = configFiles.getAuthConfigFilePath();
@@ -149,22 +142,26 @@ const main = async () => {
}
// Print update information, if available
if (notifier.update && notifier.update.latest !== pkg.version && isTTY) {
const { latest } = notifier.update;
console.log(
info(
if (isTTY && !process.env.NO_UPDATE_NOTIFIER) {
// Check if an update is available. If so, `latest` will contain a string
// of the latest version, otherwise `undefined`.
const latest = getLatestVersion({
distTag: isCanary ? 'canary' : 'latest',
output,
pkg,
});
if (latest) {
output.log(
`${chalk.black.bgCyan('UPDATE AVAILABLE')} ` +
`Run ${cmd(
await getUpdateCommand()
)} to install ${getTitleName()} CLI ${latest}`
)
);
);
console.log(
info(
`Changelog: https://github.com/vercel/vercel/releases/tag/vercel@${latest}`
)
);
output.log(
`Changelog: https://github.com/vercel/vercel/releases/tag/vercel@${latest}\n`
);
}
}
// The second argument to the command can be:

View File

@@ -1,4 +1,6 @@
import type { BuilderFunctions } from '@vercel/build-utils';
import type { Readable, Writable } from 'stream';
import type { Route } from '@vercel/routing-utils';
export type ProjectSettings = import('@vercel/build-utils').ProjectSettings;
@@ -116,32 +118,105 @@ export type Cert = {
expiration: string;
};
type RouteOrMiddleware =
| Route
| {
src: string;
continue: boolean;
middleware: 0;
};
export type Deployment = {
uid: string;
url: string;
alias?: string[];
aliasAssigned?: boolean | null | number;
aliasError?: null | { code: string; message: string };
aliasFinal?: string | null;
aliasWarning?: null | {
code: string;
message: string;
link?: string;
action?: string;
};
bootedAt?: number;
build?: { env: string[] };
builds?: { use: string; src?: string; config?: { [key: string]: any } };
buildErrorAt?: number;
buildingAt: number;
canceledAt?: number;
checksState?: 'completed' | 'registered' | 'running';
checksConclusion?: 'canceled' | 'failed' | 'skipped' | 'succeeded';
createdAt: number;
createdIn?: string;
creator: { uid: string; username?: string };
env?: string[];
errorCode?: string;
errorLink?: string;
errorMessage?: string | null;
errorStep?: string;
functions?: BuilderFunctions | null;
gitSource?: {
org?: string;
owner?: string;
prId?: number | null;
projectId: number;
ref?: string | null;
repoId?: number;
repoUuid: string;
sha?: string;
slug?: string;
type: string;
workspaceUuid: string;
};
id: string;
initReadyAt?: number;
inspectorUrl?: string | null;
lambdas?: Build[];
meta?: {
[key: string]: string | undefined;
};
monorepoManager?: string | null;
name: string;
type: 'LAMBDAS';
state:
ownerId?: string;
plan?: 'enterprise' | 'hobby' | 'oss' | 'pro';
previewCommentsEnabled?: boolean;
projectId?: string;
projectSettings?: {
buildCommand?: string | null;
devCommand?: string | null;
framework?: string;
installCommand?: string | null;
outputDirectory?: string | null;
};
public: boolean;
ready?: number;
readyState:
| 'BUILDING'
| 'ERROR'
| 'INITIALIZING'
| 'QUEUED'
| 'READY'
| 'CANCELED';
version?: number;
created: number;
createdAt: number;
ready?: number;
buildingAt?: number;
creator: { uid: string; username: string };
target: string | null;
ownerId: string;
projectId: string;
inspectorUrl: string;
meta: {
[key: string]: any;
regions: string[];
routes?: RouteOrMiddleware[] | null;
source?: 'cli' | 'git' | 'import' | 'import/repo' | 'clone/repo';
status:
| 'BUILDING'
| 'ERROR'
| 'INITIALIZING'
| 'QUEUED'
| 'READY'
| 'CANCELED';
target?: 'staging' | 'production' | null;
team?: {
id: string;
name: string;
slug: string;
};
alias?: string[];
ttyBuildLogs?: boolean;
type: 'LAMBDAS';
url: string;
userAliases?: string[];
version: 2;
};
export type Alias = {

View File

@@ -1,4 +1,4 @@
import { Deployment } from '../../types';
import type { Deployment } from '../../types';
import { Output } from '../output';
import Client from '../client';
import createAlias from './create-alias';

View File

@@ -1,4 +1,4 @@
import { Deployment } from '../../types';
import type { Deployment } from '../../types';
import { Output } from '../output';
import * as ERRORS from '../errors-ts';
import Client from '../client';
@@ -62,7 +62,7 @@ async function performCreateAlias(
) {
try {
return await client.fetch<AliasRecord>(
`/now/deployments/${deployment.uid}/aliases`,
`/now/deployments/${deployment.id}/aliases`,
{
method: 'POST',
body: { alias },
@@ -79,7 +79,7 @@ async function performCreateAlias(
if (err.code === 'deployment_not_found') {
return new ERRORS.DeploymentNotFound({
context: contextName,
id: deployment.uid,
id: deployment.id,
});
}
if (err.code === 'gone') {

View File

@@ -5,7 +5,7 @@ import { Output } from '../output';
import { User } from '../../types';
import { VercelConfig } from '../dev/types';
import getDeploymentsByAppName from '../deploy/get-deployments-by-appname';
import getDeploymentByIdOrHost from '../deploy/get-deployment-by-id-or-host';
import getDeployment from '../get-deployment';
async function getAppLastDeployment(
output: Output,
@@ -22,7 +22,7 @@ async function getAppLastDeployment(
// Try to fetch deployment details
if (deploymentItem) {
return getDeploymentByIdOrHost(client, contextName, deploymentItem.uid);
return await getDeployment(client, contextName, deploymentItem.uid);
}
return null;
@@ -42,13 +42,11 @@ export async function getDeploymentForAlias(
// When there are no args at all we try to get the targets from the config
if (args.length === 2) {
const [deploymentId] = args;
const deployment = await getDeploymentByIdOrHost(
client,
contextName,
deploymentId
);
output.stopSpinner();
return deployment;
try {
return await getDeployment(client, contextName, deploymentId);
} finally {
output.stopSpinner();
}
}
const appName =
@@ -59,13 +57,15 @@ export async function getDeploymentForAlias(
return null;
}
const deployment = await getAppLastDeployment(
output,
client,
appName,
user,
contextName
);
output.stopSpinner();
return deployment;
try {
return await getAppLastDeployment(
output,
client,
appName,
user,
contextName
);
} finally {
output.stopSpinner();
}
}

View File

@@ -1,78 +0,0 @@
import type Client from '../client';
import toHost from '../to-host';
import { Deployment } from '../../types';
import {
DeploymentNotFound,
DeploymentPermissionDenied,
InvalidDeploymentId,
isAPIError,
} from '../errors-ts';
import mapCertError from '../certs/map-cert-error';
type APIVersion = 'v5' | 'v10';
export default async function getDeploymentByIdOrHost(
client: Client,
contextName: string,
idOrHost: string,
apiVersion: APIVersion = 'v5'
) {
try {
const { deployment } =
idOrHost.indexOf('.') !== -1
? await getDeploymentByHost(
client,
toHost(idOrHost) as string,
apiVersion
)
: await getDeploymentById(client, idOrHost, apiVersion);
return deployment;
} catch (err: unknown) {
if (isAPIError(err)) {
if (err.status === 404) {
return new DeploymentNotFound({ id: idOrHost, context: contextName });
}
if (err.status === 403) {
return new DeploymentPermissionDenied(idOrHost, contextName);
}
if (err.status === 400 && err.message.includes('`id`')) {
return new InvalidDeploymentId(idOrHost);
}
const certError = mapCertError(err);
if (certError) {
return certError;
}
}
throw err;
}
}
async function getDeploymentById(
client: Client,
id: string,
apiVersion: APIVersion
) {
const deployment = await client.fetch<Deployment>(
`/${apiVersion}/now/deployments/${encodeURIComponent(id)}`
);
return { deployment };
}
type Response = {
id: string;
};
async function getDeploymentByHost(
client: Client,
host: string,
apiVersion: APIVersion
) {
const response = await client.fetch<Response>(
`/v10/now/deployments/get?url=${encodeURIComponent(
host
)}&resolve=1&noState=1`
);
return getDeploymentById(client, response.id, apiVersion);
}

View File

@@ -1,19 +0,0 @@
import { NowError } from '../now-error';
import Client from '../client';
import getDeploymentByIdOrHost from './get-deployment-by-id-or-host';
export default async function getDeploymentByIdOrThrow(
client: Client,
contextName: string,
idOrHost: string
) {
const deployment = await getDeploymentByIdOrHost(
client,
contextName,
idOrHost
);
if (deployment instanceof NowError) {
throw deployment;
}
return deployment;
}

View File

@@ -6,7 +6,7 @@ type Response = {
deployments: DeploymentPartial[];
};
export interface DeploymentPartial {
uid: string;
id: string;
name: string;
url: string;
created: number;

View File

@@ -7,7 +7,8 @@ import jsonlines from 'jsonlines';
import { eraseLines } from 'ansi-escapes';
import Client from './client';
import { getDeployment } from './get-deployment';
import getDeployment from './get-deployment';
import getScope from './get-scope';
export interface FindOpts {
direction: 'forward' | 'backward';
@@ -37,6 +38,7 @@ async function printEvents(
{ mode, onEvent, quiet, findOpts }: PrintEventsOptions
) {
const { log, debug } = client.output;
const { contextName } = await getScope(client);
// we keep track of how much we log in case we
// drop the connection and have to start over
@@ -74,7 +76,11 @@ async function printEvents(
poller = (function startPoller() {
return setTimeout(async () => {
try {
const json = await getDeployment(client, deploymentIdOrURL);
const json = await getDeployment(
client,
contextName,
deploymentIdOrURL
);
if (json.readyState === 'READY') {
stream.end();
finish();

View File

@@ -1,27 +1,53 @@
import { stringify } from 'querystring';
import { Deployment } from '@vercel/client';
import Client from './client';
import type Client from './client';
import {
DeploymentNotFound,
DeploymentPermissionDenied,
InvalidDeploymentId,
isAPIError,
} from './errors-ts';
import type { Deployment } from '../types';
import mapCertError from './certs/map-cert-error';
import toHost from './to-host';
export async function getDeployment(
/**
* Retrieves a v13 deployment.
*
* @param client - The Vercel CLI client instance.
* @param contextName - The scope context/team name.
* @param hostOrId - A deployment host or id.
* @returns The deployment information.
*/
export default async function getDeployment(
client: Client,
contextName: string,
hostOrId: string
): Promise<Deployment> {
let url = `/v13/deployments`;
if (hostOrId.includes('.')) {
let host = hostOrId.replace(/^https:\/\//i, '');
if (host.slice(-1) === '/') {
host = host.slice(0, -1);
}
url += `/get?${stringify({
url: host,
})}`;
} else {
url += `/${encodeURIComponent(hostOrId)}`;
hostOrId = toHost(hostOrId);
}
const deployment = await client.fetch<Deployment>(url);
return deployment;
try {
return await client.fetch<Deployment>(
`/v13/deployments/${encodeURIComponent(hostOrId)}`
);
} catch (err: unknown) {
if (isAPIError(err)) {
if (err.status === 404) {
throw new DeploymentNotFound({ id: hostOrId, context: contextName });
}
if (err.status === 403) {
throw new DeploymentPermissionDenied(hostOrId, contextName);
}
if (err.status === 400 && err.message.includes('`id`')) {
throw new InvalidDeploymentId(hostOrId);
}
const certError = mapCertError(err);
if (certError) {
throw certError;
}
}
throw err;
}
}

View File

@@ -0,0 +1,225 @@
/**
* This file is spawned in the background and checks npm for the latest version
* of the CLI, then writes the version to the cache file.
*
* NOTE: Since this file runs asynchronously in the background, it's possible
* for multiple instances of this file to be running at the same time leading
* to a race condition where the most recent instance will overwrite the
* previous cache file resetting the `notified` flag and cause the update
* notification to appear for multiple consequetive commands. Not the end of
* the world, but something to be aware of.
*
* IMPORTANT! This file must NOT depend on any 3rd party dependencies. This
* file is NOT bundled by `ncc` and thus any 3rd party dependencies will never
* be available.
*/
const https = require('https');
const { mkdirSync, writeFileSync } = require('fs');
const { access, mkdir, readFile, unlink, writeFile } = require('fs/promises');
const path = require('path');
const { format, inspect } = require('util');
/**
* An simple output helper which accumulates error and debug log messages in
* memory for potential persistance to disk while immediately outputting errors
* and debug messages, when the `--debug` flag is set, to `stderr`.
*/
class WorkerOutput {
debugLog = [];
logFile = null;
constructor({ debug = true }) {
this.debugOutputEnabled = debug;
}
debug(...args) {
this.print('debug', args);
}
error(...args) {
this.print('error', args);
}
print(type, args) {
// note: `args` may contain an `Error` that will be toString()'d and thus
// no stack trace
const str = format(
...args.map(s => (typeof s === 'string' ? s : inspect(s)))
);
this.debugLog.push(`[${new Date().toISOString()}] [${type}] ${str}`);
if (type === 'debug' && this.debugOutputEnabled) {
console.error(`> '[debug] [${new Date().toISOString()}] ${str}`);
} else if (type === 'error') {
console.error(`Error: ${str}`);
}
}
setLogFile(file) {
// wire up the exit handler the first time the log file is set
if (this.logFile === null) {
process.on('exit', () => {
if (this.debugLog.length) {
mkdirSync(path.dirname(this.logFile), { recursive: true });
writeFileSync(this.logFile, this.debugLog.join('\n'));
}
});
}
this.logFile = file;
}
}
const output = new WorkerOutput({
// enable the debug logging if the `--debug` is set or if this worker script
// was directly executed
debug: process.argv.includes('--debug') || !process.connected,
});
process.on('unhandledRejection', err => {
output.error('Exiting worker due to unhandled rejection:', err);
process.exit(1);
});
// this timer will prevent this worker process from running longer than 10s
const timer = setTimeout(() => {
output.error('Worker timed out after 10 seconds');
process.exit(1);
}, 10000);
// wait for the parent to give us the work payload
process.once('message', async msg => {
output.debug('Received message from parent:', msg);
output.debug('Disconnecting from parent');
process.disconnect();
const { cacheFile, distTag, name, updateCheckInterval } = msg;
const cacheFileParsed = path.parse(cacheFile);
await mkdir(cacheFileParsed.dir, { recursive: true });
output.setLogFile(
path.join(cacheFileParsed.dir, `${cacheFileParsed.name}.log`)
);
const lockFile = path.join(
cacheFileParsed.dir,
`${cacheFileParsed.name}.lock`
);
try {
// check for a lock file and either bail if running or write our pid and continue
output.debug(`Checking lock file: ${lockFile}`);
if (await isRunning(lockFile)) {
output.debug('Worker already running, exiting');
process.exit(1);
}
output.debug(`Initializing lock file with pid ${process.pid}`);
await writeFile(lockFile, String(process.pid), 'utf-8');
// fetch the latest version from npm
const agent = new https.Agent({
keepAlive: true,
maxSockets: 15, // See: `npm config get maxsockets`
});
const headers = {
accept:
'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*',
};
const url = `https://registry.npmjs.org/-/package/${name}/dist-tags`;
output.debug(`Fetching ${url}`);
const tags = await new Promise((resolve, reject) => {
const req = https.get(
url,
{
agent,
headers,
},
res => {
let buf = '';
res.on('data', chunk => {
buf += chunk;
});
res.on('end', () => {
try {
resolve(JSON.parse(buf));
} catch (err) {
reject(err);
}
});
}
);
req.on('error', reject);
req.end();
});
const version = tags[distTag];
if (version) {
output.debug(`Found dist tag "${distTag}" with version "${version}"`);
} else {
output.error(`Dist tag "${distTag}" not found`);
output.debug('Available dist tags:', Object.keys(tags));
}
output.debug(`Writing cache file: ${cacheFile}`);
await writeFile(
cacheFile,
JSON.stringify({
expireAt: Date.now() + updateCheckInterval,
notified: false,
version,
})
);
} catch (err) {
output.error(`Failed to get package info:`, err);
} finally {
clearTimeout(timer);
if (await fileExists(lockFile)) {
output.debug(`Releasing lock file: ${lockFile}`);
await unlink(lockFile);
}
output.debug(`Worker finished successfully!`);
// force the worker to exit
process.exit(0);
}
});
// signal the parent process we're ready
if (process.connected) {
output.debug("Notifying parent we're ready");
process.send({ type: 'ready' });
} else {
console.error('No IPC bridge detected, exiting');
process.exit(1);
}
async function fileExists(file) {
return access(file)
.then(() => true)
.catch(() => false);
}
async function isRunning(lockFile) {
try {
const pid = parseInt(await readFile(lockFile, 'utf-8'));
output.debug(`Found lock file with pid: ${pid}`);
// checks for existence of a process; throws if not found
process.kill(pid, 0);
// process is still running
return true;
} catch (err) {
if (await fileExists(lockFile)) {
// lock file does not exist or process is not running and pid is stale
output.debug(`Resetting lock file: ${err.toString()}`);
await unlink(lockFile);
}
return false;
}
}

View File

@@ -0,0 +1,151 @@
import semver from 'semver';
import XDGAppPaths from 'xdg-app-paths';
import { dirname, parse as parsePath, resolve as resolvePath } from 'path';
import type { Output } from '../output';
import { existsSync, outputJSONSync, readJSONSync } from 'fs-extra';
import type { PackageJson } from '@vercel/build-utils';
import { spawn } from 'child_process';
interface GetLatestVersionOptions {
cacheDir?: string;
distTag?: string;
output?: Output;
pkg: PackageJson;
updateCheckInterval?: number;
}
interface PackageInfoCache {
version: string;
expireAt: number;
notified: boolean;
}
interface GetLatestWorkerPayload {
cacheFile?: string;
distTag?: string;
updateCheckInterval?: number;
name?: string;
}
/**
* Determines if it needs to check for a newer CLI version and returns the last
* detected version. The version could be stale, but still newer than the
* current version.
*
* @returns {String|undefined} If a newer version is found, then the lastest
* version, otherwise `undefined`.
*/
export default function getLatestVersion({
cacheDir = XDGAppPaths('com.vercel.cli').cache(),
distTag = 'latest',
output,
pkg,
updateCheckInterval = 1000 * 60 * 60 * 24 * 7, // 1 week
}: GetLatestVersionOptions): string | undefined {
if (
!pkg ||
typeof pkg !== 'object' ||
!pkg.name ||
typeof pkg.name !== 'string'
) {
throw new TypeError('Expected package to be an object with a package name');
}
const cacheFile = resolvePath(
cacheDir,
'package-updates',
`${pkg.name}-${distTag}.json`
);
let cache: PackageInfoCache | undefined;
try {
cache = readJSONSync(cacheFile);
} catch (err: any) {
// cache does not exist or malformed
if (err.code !== 'ENOENT') {
output?.debug(`Error reading latest package cache file: ${err}`);
}
}
if (!cache || cache.expireAt < Date.now()) {
spawnWorker(
{
cacheFile,
distTag,
updateCheckInterval,
name: pkg.name,
},
output
);
}
if (
cache &&
!cache.notified &&
pkg.version &&
semver.lt(pkg.version, cache.version)
) {
cache.notified = true;
outputJSONSync(cacheFile, cache);
return cache.version;
}
}
/**
* Spawn the worker, wait for the worker to report it's ready, then signal the
* worker to fetch the latest version.
*/
function spawnWorker(
payload: GetLatestWorkerPayload,
output: Output | undefined
) {
// we need to find the update worker script since the location is
// different based on production vs tests
let dir = dirname(__filename);
let script = resolvePath(dir, 'dist', 'get-latest-worker.js');
const { root } = parsePath(dir);
while (!existsSync(script)) {
dir = dirname(dir);
if (dir === root) {
// didn't find it, bail
output?.debug('Failed to find the get latest worker script!');
return;
}
script = resolvePath(dir, 'dist', 'get-latest-worker.js');
}
// spawn the worker with an IPC channel
output?.debug(`Spawning ${script}`);
const args = [script];
if (output?.debugEnabled) {
args.push('--debug');
}
const worker = spawn(process.execPath, args, {
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
windowsHide: true,
});
// we allow the child 2 seconds to let us know it's ready before we give up
const workerReadyTimer = setTimeout(() => worker.kill(), 2000);
// listen for an early on close error, but then we remove it when unref
const onClose = (code: number) => {
output?.debug(`Get latest worker exited (code ${code})`);
};
worker.on('close', onClose);
// generally, the parent won't be around long enough to handle a non-zero
// worker process exit code
worker.on('error', err => {
output?.log(`Failed to spawn get latest worker: ${err.stack}`);
});
// wait for the worker to start and notify us it is ready
worker.once('message', () => {
clearTimeout(workerReadyTimer);
worker.removeListener('close', onClose);
worker.send(payload);
worker.unref();
});
}

View File

@@ -1,5 +1,4 @@
import Client from '../client';
import { stringify } from 'qs';
import { Org } from '../../types';
import chalk from 'chalk';
import link from '../output/link';
@@ -19,9 +18,7 @@ export async function disconnectGitProvider(
org: Org,
projectId: string
) {
const fetchUrl = `/v9/projects/${projectId}/link?${stringify({
teamId: org.type === 'team' ? org.id : undefined,
})}`;
const fetchUrl = `/v9/projects/${projectId}/link`;
return client.fetch(fetchUrl, {
method: 'DELETE',
headers: {
@@ -37,9 +34,7 @@ export async function connectGitProvider(
type: string,
repo: string
) {
const fetchUrl = `/v9/projects/${projectId}/link?${stringify({
teamId: org.type === 'team' ? org.id : undefined,
})}`;
const fetchUrl = `/v9/projects/${projectId}/link`;
try {
return await client.fetch(fetchUrl, {
method: 'POST',

View File

@@ -1,38 +0,0 @@
import type Client from '../client';
import type { Deployment } from '../../types';
import getDeploymentByIdOrHost from '../deploy/get-deployment-by-id-or-host';
import handleCertError from '../certs/handle-cert-error';
/**
* Attempts to find the deployment by name or id.
* @param {Client} client - The Vercel client instance
* @param {string} contextName - The scope name
* @param {string} deployId - The deployment name or id to rollback
* @returns {Promise<Deployment>} Resolves an exit code or deployment info
*/
export default async function getDeploymentInfo(
client: Client,
contextName: string,
deployId: string
): Promise<Deployment> {
const deployment = handleCertError(
client.output,
await getDeploymentByIdOrHost(client, contextName, deployId)
);
if (deployment === 1) {
throw new Error(
`Failed to get deployment "${deployId}" in scope "${contextName}"`
);
}
if (deployment instanceof Error) {
throw deployment;
}
if (!deployment) {
throw new Error(`Couldn't find the deployment "${deployId}"`);
}
return deployment;
}

View File

@@ -1,11 +1,12 @@
import chalk from 'chalk';
import type Client from '../client';
import type { Deployment, Project, Team } from '../../types';
import { getCommandName } from '../pkg-name';
import getDeploymentInfo from './get-deployment-info';
import getDeployment from '../get-deployment';
import getScope from '../get-scope';
import getTeamById from '../teams/get-team-by-id';
import { isValidName } from '../is-valid-name';
import ms from 'ms';
import type { Project } from '../../types';
import rollbackStatus from './status';
/**
@@ -27,7 +28,7 @@ export default async function requestRollback({
project: Project;
timeout?: string;
}): Promise<number> {
const { output } = client;
const { config, output } = client;
const { contextName } = await getScope(client);
if (!isValidName(deployId)) {
@@ -37,27 +38,65 @@ export default async function requestRollback({
return 1;
}
output.spinner(
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}`
);
let deployment: Deployment;
let team: Team | undefined;
let deployment;
try {
deployment = await getDeploymentInfo(client, contextName, deployId);
} catch (err: any) {
output.error(err?.toString() || err);
return 1;
} finally {
output.stopSpinner();
output.spinner(
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}`
);
const [teamResult, deploymentResult] = await Promise.allSettled([
config.currentTeam ? getTeamById(client, config.currentTeam) : undefined,
getDeployment(client, contextName, deployId),
]);
if (teamResult.status === 'rejected') {
output.error(`Failed to retrieve team information: ${teamResult.reason}`);
return 1;
}
if (deploymentResult.status === 'rejected') {
output.error(deploymentResult.reason);
return 1;
}
team = teamResult.value;
deployment = deploymentResult.value;
// re-render the spinner text because it goes so fast
output.log(
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}`
);
} finally {
output.stopSpinner();
}
if (deployment.team?.id) {
if (!team || deployment.team.id !== team.id) {
output.error(
team
? `Deployment doesn't belong to current team ${chalk.bold(
contextName
)}`
: `Deployment belongs to a different team`
);
output.error(
`Use ${chalk.bold('vc switch')} to change your current team`
);
return 1;
}
} else if (team) {
output.error(
`Deployment doesn't belong to current team ${chalk.bold(contextName)}`
);
output.error(`Use ${chalk.bold('vc switch')} to change your current team`);
return 1;
}
// create the rollback
await client.fetch<any>(
`/v9/projects/${project.id}/rollback/${deployment.uid}`,
`/v9/projects/${project.id}/rollback/${deployment.id}`,
{
body: {}, // required
method: 'POST',
@@ -68,7 +107,7 @@ export default async function requestRollback({
output.log(
`Successfully requested rollback of ${chalk.bold(project.name)} to ${
deployment.url
} (${deployment.uid})`
} (${deployment.id})`
);
output.log(`To check rollback status, run ${getCommandName('rollback')}.`);
return 0;

View File

@@ -8,7 +8,7 @@ import type {
} from '../../types';
import elapsed from '../output/elapsed';
import formatDate from '../format-date';
import getDeploymentInfo from './get-deployment-info';
import getDeployment from '../get-deployment';
import getScope from '../get-scope';
import ms from 'ms';
import renderAliasStatus from './render-alias-status';
@@ -168,8 +168,7 @@ async function renderJobFailed({
try {
const name = (
deployment ||
(await getDeploymentInfo(client, contextName, toDeploymentId))
deployment || (await getDeployment(client, contextName, toDeploymentId))
)?.url;
output.error(
`Failed to remap all aliases to the requested deployment ${name} (${toDeploymentId})`
@@ -228,13 +227,10 @@ async function renderJobSucceeded({
}) {
const { output } = client;
// attempt to get the new deployment url
let deploymentInfo = '';
try {
const deployment = await getDeploymentInfo(
client,
contextName,
toDeploymentId
);
const deployment = await getDeployment(client, contextName, toDeploymentId);
deploymentInfo = `${chalk.bold(deployment.url)} (${toDeploymentId})`;
} catch (err: any) {
output.debug(

View File

@@ -1,20 +1,8 @@
// Native
import { parse } from 'url';
/**
* Converts a valid deployment lookup parameter to a hostname.
* `http://google.com` => google.com
* google.com => google.com
*/
function toHost(url: string): string {
if (/^https?:\/\//.test(url)) {
return parse(url).host!;
}
// Remove any path if present
// `a.b.c/` => `a.b.c`
return url.replace(/(\/\/)?([^/]+)(.*)/, '$2');
export default function toHost(url: string): string {
return url.replace(/^(?:.*?\/\/)?([^/]+).*/, '$1');
}
export default toHost;

View File

@@ -1,2 +0,0 @@
README.md
yarn.lock

View File

@@ -0,0 +1,4 @@
{
"name": "next",
"version": "13.0.4"
}

View File

@@ -0,0 +1,5 @@
{
"dependencies": {
"next": "13.0.4"
}
}

View File

@@ -1,23 +1,30 @@
import { URL } from 'url';
import chance from 'chance';
import { Deployment } from '@vercel/client';
import { client } from './client';
import { Build, User } from '../../src/types';
import { Build, Deployment, User } from '../../src/types';
import type { Request, Response } from 'express';
let deployments = new Map<string, Deployment>();
let deploymentBuilds = new Map<Deployment, Build[]>();
let alreadySetupDeplomentEndpoints = false;
type State = Deployment['readyState'];
/**
* Initializes a mock deployment and wires up the deployment endpoint
* scenarios.
*/
export function useDeployment({
creator,
state = 'READY',
createdAt,
}: {
creator: Pick<User, 'id' | 'email' | 'name' | 'username'>;
state?: State;
state?:
| 'BUILDING'
| 'ERROR'
| 'INITIALIZING'
| 'QUEUED'
| 'READY'
| 'CANCELED';
createdAt?: number;
}) {
setupDeploymentEndpoints();
@@ -28,35 +35,34 @@ export function useDeployment({
const id = `dpl_${chance().guid()}`;
const deployment: Deployment = {
id,
url: url.hostname,
name,
meta: {},
regions: [],
routes: [],
plan: 'hobby',
public: false,
version: 2,
createdAt,
createdIn: 'sfo1',
buildingAt: Date.now(),
ownerId: creator.id,
creator: {
uid: creator.id,
email: creator.email,
name: creator.name,
username: creator.username,
},
readyState: state,
state: state,
ready: createdAt + 30000,
env: {},
build: { env: {} },
target: 'production',
alias: [],
aliasAssigned: true,
aliasError: null,
build: { env: [] },
buildingAt: Date.now(),
createdAt,
createdIn: 'sfo1',
creator: {
uid: creator.id,
username: creator.username,
},
env: [],
id,
inspectorUrl: `https://vercel.com/${creator.name}/${id}`,
meta: {},
name,
ownerId: creator.id,
plan: 'hobby',
public: false,
ready: createdAt + 30000,
readyState: state,
regions: [],
routes: [],
status: state,
target: 'production',
type: 'LAMBDAS',
url: url.hostname,
version: 2,
};
deployments.set(deployment.id, deployment);
@@ -108,17 +114,18 @@ beforeEach(() => {
alreadySetupDeplomentEndpoints = false;
});
function setupDeploymentEndpoints() {
function setupDeploymentEndpoints(): void {
if (alreadySetupDeplomentEndpoints) {
return;
}
alreadySetupDeplomentEndpoints = true;
client.scenario.get('/:version/deployments/:id', (req, res) => {
client.scenario.get('/v13/deployments/:id', (req, res) => {
const { id } = req.params;
const { url } = req.query;
let deployment;
if (id === 'get') {
if (typeof url !== 'string') {
res.statusCode = 400;
@@ -127,65 +134,26 @@ function setupDeploymentEndpoints() {
deployment = Array.from(deployments.values()).find(d => {
return d.url === url;
});
} else if (id.includes('.')) {
deployment = Array.from(deployments.values()).find(d => {
return d.url === id;
});
} else {
// lookup by ID
deployment = deployments.get(id);
}
if (!deployment) {
res.statusCode = 404;
return res.json({
error: { code: 'not_found', message: 'Deployment not found', id },
});
}
res.json(deployment);
});
client.scenario.get('/v5/now/deployments/:id', (req, res) => {
const { id } = req.params;
const { url } = req.query;
let deployment;
if (id === 'get') {
if (typeof url !== 'string') {
res.statusCode = 400;
return res.json({ error: { code: 'bad_request' } });
}
deployment = Array.from(deployments.values()).find(d => {
return d.url === url;
});
} else {
// lookup by ID
deployment = deployments.get(id);
}
if (!deployment) {
res.statusCode = 404;
return res.json({
error: { code: 'not_found', message: 'Deployment not found', id },
});
}
res.json({
uid: deployment.id,
url: deployment.url,
name: '',
type: 'LAMBDAS',
state: 'READY',
version: deployment.version,
created: deployment.createdAt,
ready: deployment.ready,
buildingAt: deployment.buildingAt,
creator: {
uid: deployment.creator?.uid,
username: deployment.creator?.username,
},
target: deployment.target,
ownerId: undefined, // ?
projectId: undefined, // ?
inspectorUrl: deployment.inspectorUrl,
meta: {},
alias: deployment.alias,
});
});
client.scenario.get('/:version/deployments/:id/builds', (req, res) => {
client.scenario.get('/v11/deployments/:id/builds', (req, res) => {
const { id } = req.params;
const deployment = deployments.get(id);
if (!deployment) {
@@ -200,7 +168,7 @@ function setupDeploymentEndpoints() {
const currentDeployments = Array.from(deployments.values()).sort(
(a: Deployment, b: Deployment) => {
// sort in reverse chronological order
return b.createdAt - a.createdAt;
return (b?.createdAt || 0) - (a?.createdAt || 0);
}
);

View File

@@ -7,7 +7,7 @@ import {
} from '../../src/types';
import { formatProvider } from '../../src/util/git/connect-git-provider';
import { parseEnvironment } from '../../src/commands/pull';
import { Env } from '@vercel/build-utils/dist';
import type { Env } from '@vercel/build-utils';
const envs: ProjectEnvVariable[] = [
{
@@ -124,6 +124,7 @@ export const defaultProject = {
{
alias: ['foobar.com'],
aliasAssigned: 1571239348998,
buildingAt: 1571239348998,
createdAt: 1571239348998,
createdIn: 'sfo1',
deploymentHostname: 'a-project-name-rjtr4pz3f',

View File

@@ -2,12 +2,12 @@ import ms from 'ms';
import fs from 'fs-extra';
import { join } from 'path';
import { getWriteableDirectory } from '@vercel/build-utils';
import build from '../../../src/commands/build';
import { client } from '../../mocks/client';
import { defaultProject, useProject } from '../../mocks/project';
import { useTeams } from '../../mocks/team';
import { useUser } from '../../mocks/user';
import { setupFixture } from '../../helpers/setup-fixture';
import build from '../../../../src/commands/build';
import { client } from '../../../mocks/client';
import { defaultProject, useProject } from '../../../mocks/project';
import { useTeams } from '../../../mocks/team';
import { useUser } from '../../../mocks/user';
import { setupFixture } from '../../../helpers/setup-fixture';
import JSON5 from 'json5';
// TODO (@Ethan-Arrowood) - After shipping support for turbo and nx, revisit rush support
// import execa from 'execa';
@@ -15,7 +15,7 @@ import JSON5 from 'json5';
jest.setTimeout(ms('1 minute'));
const fixture = (name: string) =>
join(__dirname, '../../fixtures/unit/commands/build', name);
join(__dirname, '../../../fixtures/unit/commands/build', name);
describe('build', () => {
const originalCwd = process.cwd();

View File

@@ -9,10 +9,10 @@ describe('inspect', () => {
const deployment = useDeployment({ creator: user });
client.setArgv('inspect', deployment.url);
const exitCode = await inspect(client);
expect(exitCode).toEqual(0);
await expect(client.stderr).toOutput(
`> Fetched deployment ${deployment.url} in ${user.username}`
`> Fetched deployment "${deployment.url}" in ${user.username}`
);
expect(exitCode).toEqual(0);
});
it('should strip the scheme of a url', async () => {
@@ -22,7 +22,7 @@ describe('inspect', () => {
const exitCode = await inspect(client);
expect(exitCode).toEqual(0);
await expect(client.stderr).toOutput(
`> Fetched deployment ${deployment.url} in ${user.username}`
`> Fetched deployment "${deployment.url}" in ${user.username}`
);
});
@@ -30,10 +30,8 @@ describe('inspect', () => {
const user = useUser();
useDeployment({ creator: user });
client.setArgv('inspect', 'bad.com');
const exitCode = await inspect(client);
expect(exitCode).toEqual(1);
await expect(client.stderr).toOutput(
`Error: Failed to find deployment "bad.com" in ${user.username}\n`
await expect(inspect(client)).rejects.toThrow(
`Can't find the deployment "bad.com" under the context "${user.username}"`
);
});
});

View File

@@ -265,6 +265,27 @@ describe('rollback', () => {
await expect(exitCodePromise).resolves.toEqual(0);
});
it('should error if deployment belongs to different team', async () => {
const { cwd, previousDeployment } = initRollbackTest();
previousDeployment.team = {
id: 'abc',
name: 'abc',
slug: 'abc',
};
client.setArgv('rollback', previousDeployment.id, '--yes', '--cwd', cwd);
const exitCodePromise = rollback(client);
await expect(client.stderr).toOutput('Retrieving project…');
await expect(client.stderr).toOutput(
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
);
await expect(client.stderr).toOutput(
'Error: Deployment belongs to a different team'
);
await expect(exitCodePromise).resolves.toEqual(1);
});
});
type RollbackAlias = {
@@ -330,7 +351,7 @@ function initRollbackTest({
let counter = 0;
client.scenario.get(`/v9/projects/${project.id}`, (req, res) => {
client.scenario.get(`/:version/projects/${project.id}`, (req, res) => {
const data = { ...project };
if (req.query?.rollbackInfo === 'true') {
if (lastRollbackTarget && counter++ > rollbackPollCount) {
@@ -341,18 +362,6 @@ function initRollbackTest({
res.json(data);
});
client.scenario.get(`/:version/now/deployments/get`, (req, res) => {
const { url } = req.query;
if (url === previousDeployment.url) {
res.json({ id: previousDeployment.id });
} else {
res.statusCode = 404;
res.json({
error: { code: 'not_found', message: 'Deployment not found' },
});
}
});
client.scenario.get(
'/:version/projects/:project/rollback/aliases',
(req, res) => {

View File

@@ -0,0 +1,143 @@
import fs from 'fs-extra';
import sleep from '../../../src/util/sleep';
import tmp from 'tmp-promise';
import getLatestVersion from '../../../src/util/get-latest-version';
import { join } from 'path';
tmp.setGracefulCleanup();
jest.setTimeout(25000);
const cacheDir = tmp.tmpNameSync({
prefix: 'test-vercel-cli-get-latest-version-',
});
const cacheFile = join(cacheDir, 'package-updates', 'vercel-latest.json');
const pkg = {
name: 'vercel',
version: '27.3.0',
};
const versionRE = /^\d+\.\d+\.\d+$/;
describe('get latest version', () => {
afterEach(() => fs.remove(cacheDir));
it('should find newer version async', async () => {
// 1. first call, no cache file
let latest = getLatestVersion({
cacheDir,
pkg,
});
expect(latest).toEqual(undefined);
await waitForCacheFile();
let cache = await fs.readJSON(cacheFile);
expect(typeof cache).toEqual('object');
expect(typeof cache.expireAt).toEqual('number');
expect(cache.expireAt).toBeGreaterThan(Date.now());
expect(typeof cache.version).toEqual('string');
expect(cache.version).toEqual(expect.stringMatching(versionRE));
expect(cache.notified).toEqual(false);
// 2. call again and this time it'll return the version from the cache
latest = getLatestVersion({
cacheDir,
pkg,
});
expect(typeof latest).toBe('string');
expect(latest).toEqual(expect.stringMatching(versionRE));
cache = await fs.readJSON(cacheFile);
expect(cache.version).toEqual(expect.stringMatching(versionRE));
expect(cache.notified).toEqual(true);
// 3. notification already done, should skip
latest = getLatestVersion({
cacheDir,
pkg,
});
expect(latest).toEqual(undefined);
});
it('should not find a newer version', async () => {
// 1. first call, no cache file
let latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg: {
...pkg,
version: '999.0.0',
},
});
expect(latest).toEqual(undefined);
await waitForCacheFile();
// 2. call again and should recheck and still not find a new version
latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg: {
...pkg,
version: '999.0.0',
},
});
expect(latest).toEqual(undefined);
});
it('should not check twice', async () => {
// 1. first call, no cache file
let latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg,
});
expect(latest).toEqual(undefined);
// 2. immediately call again, but should hopefully still be undefined
latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg,
});
expect(latest).toEqual(undefined);
await waitForCacheFile();
// 3. call again and should recheck and find a new version
latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg,
});
expect(typeof latest).toBe('string');
expect(latest).toEqual(expect.stringMatching(versionRE));
});
it('should error if no arguments are passed in', () => {
expect(() => getLatestVersion(undefined as any)).toThrow(TypeError);
});
it('should error package is invalid', () => {
expect(() => getLatestVersion({} as any)).toThrow(TypeError);
expect(() => getLatestVersion({ pkg: null as any })).toThrow(TypeError);
expect(() => getLatestVersion({ pkg: {} })).toThrow(TypeError);
expect(() => getLatestVersion({ pkg: { name: null as any } })).toThrow(
TypeError
);
expect(() => getLatestVersion({ pkg: { name: '' } })).toThrow(TypeError);
});
});
async function waitForCacheFile() {
const seconds = 20;
for (let i = 0; i < seconds * 4; i++) {
await sleep(250);
if (await fs.pathExists(cacheFile)) {
return;
}
}
}

View File

@@ -10,8 +10,6 @@ Firstly, install the package:
```bash
npm install @vercel/client
# or
yarn add @vercel/client
```
Next, load it:

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/client",
"version": "12.2.25",
"version": "12.2.30",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"homepage": "https://vercel.com",
@@ -15,9 +15,9 @@
},
"scripts": {
"build": "tsc",
"test-integration-once": "yarn test tests/create-deployment.test.ts tests/create-legacy-deployment.test.ts tests/paths.test.ts",
"test-integration-once": "pnpm test tests/create-deployment.test.ts tests/create-legacy-deployment.test.ts tests/paths.test.ts",
"test": "jest --env node --verbose --runInBand --bail",
"test-unit": "yarn test tests/unit.*test.*"
"test-unit": "pnpm test tests/unit.*test.*"
},
"engines": {
"node": ">= 14"
@@ -43,8 +43,8 @@
]
},
"dependencies": {
"@vercel/build-utils": "5.7.4",
"@vercel/routing-utils": "2.1.3",
"@vercel/build-utils": "workspace:5.8.2",
"@vercel/routing-utils": "workspace:2.1.7",
"@zeit/fetch": "5.2.0",
"async-retry": "1.2.3",
"async-sema": "3.0.0",

View File

@@ -36,7 +36,7 @@ City of the original client IP as calculated by Vercel Proxy.
#### Defined in
[src/edge-headers.ts:4](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L4)
[packages/edge/src/edge-headers.ts:4](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L4)
---
@@ -48,7 +48,7 @@ Country of the original client IP as calculated by Vercel Proxy.
#### Defined in
[src/edge-headers.ts:8](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L8)
[packages/edge/src/edge-headers.ts:8](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L8)
---
@@ -60,7 +60,7 @@ Client IP as calcualted by Vercel Proxy.
#### Defined in
[src/edge-headers.ts:12](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L12)
[packages/edge/src/edge-headers.ts:12](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L12)
---
@@ -72,7 +72,7 @@ Latitude of the original client IP as calculated by Vercel Proxy.
#### Defined in
[src/edge-headers.ts:16](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L16)
[packages/edge/src/edge-headers.ts:16](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L16)
---
@@ -84,7 +84,7 @@ Longitude of the original client IP as calculated by Vercel Proxy.
#### Defined in
[src/edge-headers.ts:20](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L20)
[packages/edge/src/edge-headers.ts:20](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L20)
---
@@ -98,7 +98,7 @@ See [docs](https://vercel.com/docs/concepts/edge-network/headers#x-vercel-ip-cou
#### Defined in
[src/edge-headers.ts:26](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L26)
[packages/edge/src/edge-headers.ts:26](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L26)
---
@@ -110,7 +110,7 @@ The request ID for each request generated by Vercel Proxy.
#### Defined in
[src/edge-headers.ts:30](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L30)
[packages/edge/src/edge-headers.ts:30](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L30)
## Functions
@@ -140,7 +140,7 @@ Returns the location information for the incoming request.
#### Defined in
[src/edge-headers.ts:106](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L106)
[packages/edge/src/edge-headers.ts:106](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L106)
---
@@ -166,7 +166,7 @@ Returns the IP address of the request from the headers.
#### Defined in
[src/edge-headers.ts:77](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L77)
[packages/edge/src/edge-headers.ts:77](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L77)
---
@@ -209,7 +209,7 @@ const response = json(
#### Defined in
[src/response.ts:19](https://github.com/vercel/vercel/blob/main/packages/edge/src/response.ts#L19)
[packages/edge/src/response.ts:19](https://github.com/vercel/vercel/blob/main/packages/edge/src/response.ts#L19)
---
@@ -257,7 +257,7 @@ export default function middleware(_req: Request) {
#### Defined in
[src/middleware-helpers.ts:145](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L145)
[packages/edge/src/middleware-helpers.ts:145](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L145)
---
@@ -319,4 +319,4 @@ export const config = { matcher: '/api/users/:path*' };
#### Defined in
[src/middleware-helpers.ts:101](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L101)
[packages/edge/src/middleware-helpers.ts:101](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L101)

View File

@@ -26,7 +26,7 @@ along with the response headers from the origin.
#### Defined in
[src/middleware-helpers.ts:31](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L31)
[packages/edge/src/middleware-helpers.ts:31](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L31)
---
@@ -38,7 +38,7 @@ Fields to rewrite for the upstream request.
#### Defined in
[src/middleware-helpers.ts:35](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L35)
[packages/edge/src/middleware-helpers.ts:35](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L35)
---

View File

@@ -23,7 +23,7 @@ The city that the request originated from.
#### Defined in
[src/edge-headers.ts:47](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L47)
[packages/edge/src/edge-headers.ts:47](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L47)
---
@@ -35,7 +35,7 @@ The country that the request originated from.
#### Defined in
[src/edge-headers.ts:50](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L50)
[packages/edge/src/edge-headers.ts:50](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L50)
---
@@ -48,7 +48,7 @@ See [docs](https://vercel.com/docs/concepts/edge-network/headers#x-vercel-ip-cou
#### Defined in
[src/edge-headers.ts:58](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L58)
[packages/edge/src/edge-headers.ts:58](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L58)
---
@@ -60,7 +60,7 @@ The latitude of the client.
#### Defined in
[src/edge-headers.ts:61](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L61)
[packages/edge/src/edge-headers.ts:61](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L61)
---
@@ -72,7 +72,7 @@ The longitude of the client.
#### Defined in
[src/edge-headers.ts:64](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L64)
[packages/edge/src/edge-headers.ts:64](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L64)
---
@@ -84,4 +84,4 @@ The [Vercel Edge Network region](https://vercel.com/docs/concepts/edge-network/r
#### Defined in
[src/edge-headers.ts:53](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L53)
[packages/edge/src/edge-headers.ts:53](https://github.com/vercel/vercel/blob/main/packages/edge/src/edge-headers.ts#L53)

View File

@@ -35,4 +35,4 @@ export default async function middleware(request: Request): Promise<Response> {
#### Defined in
[src/middleware-helpers.ts:23](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L23)
[packages/edge/src/middleware-helpers.ts:23](https://github.com/vercel/vercel/blob/main/packages/edge/src/middleware-helpers.ts#L23)

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/edge",
"version": "0.2.1",
"version": "0.2.5",
"license": "MIT",
"main": "dist/index.js",
"module": "dist/index.mjs",
@@ -8,8 +8,8 @@
"scripts": {
"build": "tsup src/index.ts --dts --format esm,cjs",
"test": "jest --env node --verbose --runInBand --bail",
"test-unit": "yarn test",
"build:docs": "typedoc && prettier --write docs/**/*.md docs/*.md"
"test-unit": "pnpm test",
"build:docs": "typedoc && node scripts/fix-links.js && prettier --write docs/**/*.md docs/*.md"
},
"devDependencies": {
"@edge-runtime/jest-environment": "2.0.0",

View File

@@ -0,0 +1,24 @@
// this step is necessary until https://github.com/TypeStrong/typedoc/issues/2140 is fixed
const fs = require('fs');
const path = require('path');
const docs = path.join(__dirname, '..', 'docs');
const interfaces = path.join(docs, 'interfaces');
for (const dir of [docs, interfaces]) {
for (const entity of fs.readdirSync(dir)) {
try {
const entityPath = path.join(dir, entity);
const stat = fs.statSync(entityPath);
if (stat.isFile()) {
const contents = fs.readFileSync(entityPath, 'utf-8');
const pattern = /node_modules\/\.pnpm\/typescript@\d*\.\d*\.\d*\//gi;
fs.writeFileSync(entityPath, contents.replace(pattern, ''));
}
} catch (e) {
console.error('Error fixing links in docs', e);
}
}
}

View File

@@ -9,7 +9,7 @@ const test = process.platform === 'win32' ? it.skip : it;
test('docs are up to date', async () => {
const cwd = path.resolve(__dirname, '../');
await execAsync(`yarn build:docs`, { cwd });
await execAsync(`pnpm build:docs`, { cwd });
const result = await execAsync(`git status --short docs`, {
cwd,
encoding: 'utf-8',
@@ -27,10 +27,7 @@ test('docs are up to date', async () => {
if (lines !== '') {
const diff = await execAsync(`git diff docs`, { cwd, encoding: 'utf8' });
throw new Error(
'Docs are not up to date. Please re-run `yarn build:docs` to re-generate them.\nChanges:\n' +
lines +
'\n\n' +
diff.stdout
`Docs are not up to date. Please re-run \`pnpm build:docs\` to re-generate them.\nChanges:\n${lines}\n\n${diff.stdout}`
);
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/error-utils",
"version": "1.0.3",
"version": "1.0.7",
"description": "A collection of error utilities for vercel/vercel",
"main": "dist/index.js",
"types": "dist/index.d.ts",
@@ -12,7 +12,7 @@
"scripts": {
"build": "tsc",
"test": "jest --coverage --env node --verbose",
"test-unit": "yarn test"
"test-unit": "pnpm test"
},
"license": "MIT",
"devDependencies": {

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/frameworks",
"version": "1.1.18",
"version": "1.2.3",
"main": "./dist/frameworks.js",
"types": "./dist/frameworks.d.ts",
"files": [
@@ -10,7 +10,7 @@
"scripts": {
"build": "tsc",
"test": "jest --env node --verbose --runInBand --bail",
"test-unit": "yarn test"
"test-unit": "pnpm test"
},
"dependencies": {
"@iarna/toml": "2.2.3",
@@ -21,7 +21,7 @@
"@types/js-yaml": "3.12.1",
"@types/node": "14.18.33",
"@types/node-fetch": "2.5.8",
"@vercel/routing-utils": "2.1.3",
"@vercel/routing-utils": "workspace:2.1.7",
"ajv": "6.12.2",
"typescript": "4.3.4"
}

View File

@@ -30,6 +30,8 @@ export const frameworks = [
useRuntime: { src: 'package.json', use: '@vercel/next' },
detectors: {
some: [
// Intentionally does not detect a package name
// https://github.com/vercel/vercel/pull/8432
{
path: 'blitz.config.js',
},
@@ -74,9 +76,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"next":\\s*".+?"[^}]*}',
matchPackage: 'next',
},
],
},
@@ -119,9 +119,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"gatsby":\\s*".+?"[^}]*}',
matchPackage: 'gatsby',
},
],
},
@@ -205,6 +203,8 @@ export const frameworks = [
ignoreRuntimes: ['@vercel/node'],
detectors: {
every: [
// Intentionally does not detect a package name
// https://github.com/vercel/vercel/pull/7761
{
path: 'remix.config.js',
},
@@ -244,9 +244,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"astro":\\s*".+?"[^}]*}',
matchPackage: 'astro',
},
],
},
@@ -288,9 +286,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"hexo":\\s*".+?"[^}]*}',
matchPackage: 'hexo',
},
],
},
@@ -325,9 +321,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@11ty\\/eleventy":\\s*".+?"[^}]*}',
matchPackage: '@11ty/eleventy',
},
],
},
@@ -364,9 +358,7 @@ export const frameworks = [
detectors: {
some: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@docusaurus\\/core":\\s*".+?"[^}]*}',
matchPackage: '@docusaurus/core',
},
],
},
@@ -452,9 +444,7 @@ export const frameworks = [
detectors: {
some: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"docusaurus":\\s*".+?"[^}]*}',
matchPackage: 'docusaurus',
},
],
},
@@ -502,10 +492,10 @@ export const frameworks = [
website: 'https://preactjs.com',
detectors: {
every: [
// Intentionally does not detect "preact" package because that can be
// used to power other frameworks.
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"preact-cli":\\s*".+?"[^}]*}',
matchPackage: 'preact-cli',
},
],
},
@@ -549,14 +539,10 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"solid-js":\\s*".+?"[^}]*}',
matchPackage: 'solid-js',
},
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"solid-start":\\s*".+?"[^}]*}',
matchPackage: 'solid-start',
},
],
},
@@ -589,9 +575,7 @@ export const frameworks = [
detectors: {
some: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@dojo\\/framework":\\s*".+?"[^}]*}',
matchPackage: '@dojo/framework',
},
{
path: '.dojorc',
@@ -649,11 +633,12 @@ export const frameworks = [
description: 'An Ember app, created with the Ember CLI.',
website: 'https://emberjs.com/',
detectors: {
every: [
some: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"ember-cli":\\s*".+?"[^}]*}',
matchPackage: 'ember-source',
},
{
matchPackage: 'ember-cli',
},
],
},
@@ -698,9 +683,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@vue\\/cli-service":\\s*".+?"[^}]*}',
matchPackage: '@vue/cli-service',
},
],
},
@@ -753,9 +736,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@scullyio\\/init":\\s*".+?"[^}]*}',
matchPackage: '@scullyio/init',
},
],
},
@@ -790,9 +771,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@ionic\\/angular":\\s*".+?"[^}]*}',
matchPackage: '@ionic/angular',
},
],
},
@@ -835,9 +814,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@angular\\/cli":\\s*".+?"[^}]*}',
matchPackage: '@angular/cli',
},
],
},
@@ -895,9 +872,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"polymer-cli":\\s*".+?"[^}]*}',
matchPackage: 'polymer-cli',
},
],
},
@@ -953,14 +928,10 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"svelte":\\s*".+?"[^}]*}',
matchPackage: 'svelte',
},
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"sirv-cli":\\s*".+?"[^}]*}',
matchPackage: 'sirv-cli',
},
],
},
@@ -992,6 +963,7 @@ export const frameworks = [
],
},
{
// TODO: fix detected as "sveltekit-1"
name: 'SvelteKit (Legacy Beta)',
slug: 'sveltekit',
demo: 'https://sveltekit-template.vercel.app',
@@ -1081,9 +1053,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@ionic\\/react":\\s*".+?"[^}]*}',
matchPackage: '@ionic/react',
},
],
},
@@ -1143,14 +1113,10 @@ export const frameworks = [
detectors: {
some: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"react-scripts":\\s*".+?"[^}]*}',
matchPackage: 'react-scripts',
},
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"react-dev-utils":\\s*".+?"[^}]*}',
matchPackage: 'react-dev-utils',
},
],
},
@@ -1209,9 +1175,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"gridsome":\\s*".+?"[^}]*}',
matchPackage: 'gridsome',
},
],
},
@@ -1246,9 +1210,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"umi":\\s*".+?"[^}]*}',
matchPackage: 'umi',
},
],
},
@@ -1292,9 +1254,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"sapper":\\s*".+?"[^}]*}',
matchPackage: 'sapper',
},
],
},
@@ -1329,9 +1289,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"saber":\\s*".+?"[^}]*}',
matchPackage: 'saber',
},
],
},
@@ -1380,9 +1338,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@stencil\\/core":\\s*".+?"[^}]*}',
matchPackage: '@stencil/core',
},
],
},
@@ -1443,11 +1399,15 @@ export const frameworks = [
sort: 2,
envPrefix: 'NUXT_ENV_',
detectors: {
every: [
some: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"nuxt3?(-edge)?":\\s*".+?"[^}]*}',
matchPackage: 'nuxt',
},
{
matchPackage: 'nuxt3',
},
{
matchPackage: 'nuxt-edge',
},
],
},
@@ -1503,9 +1463,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"@redwoodjs\\/core":\\s*".+?"[^}]*}',
matchPackage: '@redwoodjs/core',
},
],
},
@@ -1630,7 +1588,10 @@ export const frameworks = [
description: 'A Brunch app, created with the Brunch CLI.',
website: 'https://brunch.io/',
detectors: {
every: [
some: [
{
matchPackage: 'brunch',
},
{
path: 'brunch-config.js',
},
@@ -1723,6 +1684,47 @@ export const frameworks = [
getOutputDirName: async () => 'public',
defaultVersion: '0.13.0', // Must match the build image
},
{
name: 'Hydrogen',
slug: 'hydrogen',
demo: 'https://hydrogen-template.vercel.app',
logo: 'https://api-frameworks.vercel.sh/framework-logos/hydrogen.svg',
tagline: 'React framework for headless commerce',
description: 'React framework for headless commerce',
website: 'https://hydrogen.shopify.dev',
useRuntime: { src: 'package.json', use: '@vercel/hydrogen' },
detectors: {
some: [
{
matchPackage: '@shopify/hydrogen',
},
{
path: 'hydrogen.config.js',
},
{
path: 'hydrogen.config.ts',
},
],
},
settings: {
installCommand: {
placeholder: '`yarn install`, `pnpm install`, or `npm install`',
},
buildCommand: {
value: 'shopify hydrogen build',
placeholder: '`npm run build` or `shopify hydrogen build`',
},
devCommand: {
value: 'shopify hydrogen dev',
placeholder: 'shopify hydrogen dev',
},
outputDirectory: {
value: 'dist',
},
},
dependency: '@shopify/hydrogen',
getOutputDirName: async () => 'dist',
},
{
name: 'Vite',
slug: 'vite',
@@ -1736,9 +1738,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"vite":\\s*".+?"[^}]*}',
matchPackage: 'vite',
},
],
},
@@ -1772,9 +1772,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"vitepress":\\s*".+?"[^}]*}',
matchPackage: 'vitepress',
},
],
},
@@ -1806,9 +1804,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*vuepress:\\s*".+?"[^}]*}',
matchPackage: 'vuepress',
},
],
},
@@ -1841,9 +1837,7 @@ export const frameworks = [
detectors: {
every: [
{
path: 'package.json',
matchContent:
'"(dev)?(d|D)ependencies":\\s*{[^}]*"parcel":\\s*".+?"[^}]*}',
matchPackage: 'parcel',
},
],
},
@@ -1931,44 +1925,6 @@ export const frameworks = [
},
],
},
{
name: 'Hydrogen',
slug: 'hydrogen',
demo: 'https://hydrogen-template.vercel.app',
logo: 'https://api-frameworks.vercel.sh/framework-logos/hydrogen.svg',
tagline: 'React framework for headless commerce',
description: 'React framework for headless commerce',
website: 'https://hydrogen.shopify.dev',
useRuntime: { src: 'package.json', use: '@vercel/hydrogen' },
detectors: {
some: [
{
path: 'hydrogen.config.js',
},
{
path: 'hydrogen.config.ts',
},
],
},
settings: {
installCommand: {
placeholder: '`yarn install`, `pnpm install`, or `npm install`',
},
buildCommand: {
value: 'shopify hydrogen build',
placeholder: '`npm run build` or `shopify hydrogen build`',
},
devCommand: {
value: 'shopify hydrogen dev',
placeholder: 'shopify hydrogen dev',
},
outputDirectory: {
value: 'dist',
},
},
dependency: '@shopify/hydrogen',
getOutputDirName: async () => 'dist',
},
{
name: 'Other',
slug: null,

View File

@@ -2,15 +2,24 @@ import { Rewrite, Route } from '@vercel/routing-utils';
export interface FrameworkDetectionItem {
/**
* A file path
* @example "package.json"
* A file path to detect.
* If specified, "matchPackage" cannot be specified.
* @example "some-framework.config.json"
*/
path: string;
path?: string;
/**
* A matcher
* A matcher for the entire file.
* If specified, "matchPackage" cannot be specified.
* @example "\"(dev)?(d|D)ependencies\":\\s*{[^}]*\"next\":\\s*\".+?\"[^}]*}"
*/
matchContent?: string;
/**
* A matcher for a package specifically found in a "package.json" file.
* If specified, "path" and "matchContext" cannot be specified.
* If specified in multiple detectors, the first one will be used to resolve the framework version.
* @example "\"(dev)?(d|D)ependencies\":\\s*{[^}]*\"next\":\\s*\".+?\"[^}]*}"
*/
matchPackage?: string;
}
export interface SettingPlaceholder {

View File

@@ -17,7 +17,7 @@ const SchemaFrameworkDetectionItem = {
items: [
{
type: 'object',
required: ['path'],
required: [],
additionalProperties: false,
properties: {
path: {
@@ -26,6 +26,9 @@ const SchemaFrameworkDetectionItem = {
matchContent: {
type: 'string',
},
matchPackage: {
type: 'string',
},
},
},
],

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/fs-detectors",
"version": "3.6.1",
"version": "3.7.3",
"description": "Vercel filesystem detectors",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -16,12 +16,12 @@
"scripts": {
"build": "tsc",
"test": "jest --env node --verbose --runInBand --bail test/unit.*test.*",
"test-unit": "yarn test"
"test-unit": "pnpm test"
},
"dependencies": {
"@vercel/error-utils": "1.0.3",
"@vercel/frameworks": "1.1.18",
"@vercel/routing-utils": "2.1.3",
"@vercel/error-utils": "workspace:1.0.7",
"@vercel/frameworks": "workspace:1.2.3",
"@vercel/routing-utils": "workspace:2.1.7",
"glob": "8.0.3",
"js-yaml": "4.1.0",
"json5": "2.2.2",
@@ -35,7 +35,7 @@
"@types/minimatch": "3.0.5",
"@types/node": "14.18.33",
"@types/semver": "7.3.10",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "workspace:5.8.2",
"typescript": "4.3.4"
}
}

View File

@@ -453,9 +453,7 @@ function getApiMatches() {
return [
{ src: 'middleware.[jt]s', use: `@vercel/node`, config },
{ src: 'api/**/*.js', use: `@vercel/node`, config },
{ src: 'api/**/*.mjs', use: `@vercel/node`, config },
{ src: 'api/**/*.ts', use: `@vercel/node`, config },
{ src: 'api/**/*.+(js|mjs|ts|tsx)', use: `@vercel/node`, config },
{ src: 'api/**/!(*_test).go', use: `@vercel/go`, config },
{ src: 'api/**/*.py', use: `@vercel/python`, config },
{ src: 'api/**/*.rb', use: `@vercel/ruby`, config },

View File

@@ -1,4 +1,5 @@
import type { Framework, FrameworkDetectionItem } from '@vercel/frameworks';
import { spawnSync } from 'child_process';
import { DetectorFilesystem } from './detectors/filesystem';
interface BaseFramework {
@@ -11,49 +12,96 @@ export interface DetectFrameworkOptions {
frameworkList: readonly BaseFramework[];
}
async function matches(fs: DetectorFilesystem, framework: BaseFramework) {
export interface DetectFrameworkRecordOptions {
fs: DetectorFilesystem;
frameworkList: readonly Framework[];
}
type MatchResult = {
framework: BaseFramework;
detectedVersion?: string;
};
async function matches(
fs: DetectorFilesystem,
framework: BaseFramework
): Promise<MatchResult | undefined> {
const { detectors } = framework;
if (!detectors) {
return false;
return;
}
const { every, some } = detectors;
if (every !== undefined && !Array.isArray(every)) {
return false;
return;
}
if (some !== undefined && !Array.isArray(some)) {
return false;
return;
}
const check = async ({ path, matchContent }: FrameworkDetectionItem) => {
const check = async ({
path,
matchContent,
matchPackage,
}: FrameworkDetectionItem): Promise<MatchResult | undefined> => {
if (matchPackage && matchContent) {
throw new Error(
`Cannot specify "matchPackage" and "matchContent" in the same detector for "${framework.slug}"`
);
}
if (matchPackage && path) {
throw new Error(
`Cannot specify "matchPackage" and "path" in the same detector for "${framework.slug}" because "path" is assumed to be "package.json".`
);
}
if (!path && !matchPackage) {
throw new Error(
`Must specify either "path" or "matchPackage" in detector for "${framework.slug}".`
);
}
if (!path) {
return false;
path = 'package.json';
}
if (matchPackage) {
matchContent = `"(dev)?(d|D)ependencies":\\s*{[^}]*"${matchPackage}":\\s*"(.+?)"[^}]*}`;
}
if ((await fs.hasPath(path)) === false) {
return false;
return;
}
if (matchContent) {
if ((await fs.isFile(path)) === false) {
return false;
return;
}
const regex = new RegExp(matchContent, 'gm');
const regex = new RegExp(matchContent, 'm');
const content = await fs.readFile(path);
if (!regex.test(content.toString())) {
return false;
const match = content.toString().match(regex);
if (!match) {
return;
}
if (matchPackage && match[3]) {
return {
framework,
detectedVersion: match[3],
};
}
}
return true;
return {
framework,
};
};
const result: boolean[] = [];
const result: (MatchResult | undefined)[] = [];
if (every) {
const everyResult = await Promise.all(every.map(item => check(item)));
@@ -61,11 +109,12 @@ async function matches(fs: DetectorFilesystem, framework: BaseFramework) {
}
if (some) {
let someResult = false;
let someResult: MatchResult | undefined;
for (const item of some) {
if (await check(item)) {
someResult = true;
const itemResult = await check(item);
if (itemResult) {
someResult = itemResult;
break;
}
}
@@ -73,9 +122,20 @@ async function matches(fs: DetectorFilesystem, framework: BaseFramework) {
result.push(someResult);
}
return result.every(res => res === true);
if (!result.every(res => !!res)) {
return;
}
const detectedVersion = result.find(
r => typeof r === 'object' && r.detectedVersion
)?.detectedVersion;
return {
framework,
detectedVersion,
};
}
// TODO: Deprecate and replace with `detectFrameworkRecord`
export async function detectFramework({
fs,
frameworkList,
@@ -90,3 +150,70 @@ export async function detectFramework({
);
return result.find(res => res !== null) ?? null;
}
/**
* Framework with a `detectedVersion` specifying the version
* or version range of the relevant package
*/
type VersionedFramework = Framework & {
detectedVersion?: string;
};
// Note: Does not currently support a `frameworkList` of monorepo managers
export async function detectFrameworkRecord({
fs,
frameworkList,
}: DetectFrameworkRecordOptions): Promise<VersionedFramework | null> {
const result = await Promise.all(
frameworkList.map(async frameworkMatch => {
const matchResult = await matches(fs, frameworkMatch);
if (matchResult) {
return {
...frameworkMatch,
detectedVersion: matchResult?.detectedVersion,
};
}
return null;
})
);
const frameworkRecord = result.find(res => res !== null) ?? null;
return frameworkRecord;
}
export function detectFrameworkVersion(
frameworkRecord: Framework
): string | undefined {
const allDetectors = [
...(frameworkRecord.detectors?.every || []),
...(frameworkRecord.detectors?.some || []),
];
const firstMatchPackage = allDetectors.find(d => d.matchPackage);
if (!firstMatchPackage?.matchPackage) {
return;
}
return lookupInstalledVersion(
process.execPath,
firstMatchPackage.matchPackage
);
}
function lookupInstalledVersion(
cwd: string,
packageName: string
): string | undefined {
try {
const script = `require('${packageName}/package.json').version`;
return spawnSync(cwd, ['-p', script], {
encoding: 'utf-8',
}).stdout.trim();
} catch (error) {
console.debug(
`Error looking up version of installed package "${packageName}": ${error}`
);
}
return;
}

View File

@@ -5,7 +5,11 @@ export {
detectApiExtensions,
} from './detect-builders';
export { detectFileSystemAPI } from './detect-file-system-api';
export { detectFramework } from './detect-framework';
export {
detectFramework,
detectFrameworkRecord,
detectFrameworkVersion,
} from './detect-framework';
export { getProjectPaths } from './get-project-paths';
export { DetectorFilesystem } from './detectors/filesystem';
export { LocalFileSystemDetector } from './detectors/local-file-system-detector';

View File

@@ -66,7 +66,7 @@ export async function getMonorepoDefaultSettings(
return {
monorepoManager: 'turbo',
buildCommand: `cd ${relativeToRoot} && npx turbo run build --filter=${projectName}...`,
buildCommand: `cd ${relativeToRoot} && npx turbo run build --filter={${projectPath}}...`,
installCommand: `cd ${relativeToRoot} && ${packageManager} install`,
commandForIgnoringBuildStep: `cd ${relativeToRoot} && npx turbo-ignore`,
};

View File

@@ -1369,6 +1369,25 @@ describe('Test `detectBuilders` with `featHandleMiss=true`', () => {
expect((errorRoutes![0] as Source).status).toBe(404);
});
it('api detect node tsx files', async () => {
const files = [
'api/index.tsx',
'api/users.tsx',
'api/config/staging.tsx',
'api/config/production.tsx',
'api/src/controllers/health.tsx',
'api/src/controllers/user.module.tsx',
];
const { builders, errorRoutes } = await detectBuilders(files, undefined, {
featHandleMiss,
});
expect(builders?.length).toBe(6);
expect(builders!.every(b => b.src!.endsWith('.tsx'))).toBe(true);
expect(errorRoutes?.length).toBe(1);
expect((errorRoutes![0] as Source).status).toBe(404);
});
it('just public', async () => {
const files = ['public/index.html', 'public/favicon.ico', 'README.md'];

View File

@@ -0,0 +1,35 @@
import frameworkList from '@vercel/frameworks';
import { detectFramework } from '../src';
import { FixtureFilesystem } from './utils/fixture-filesystem';
import { readdirSync, lstatSync } from 'fs';
import { join } from 'path';
function getExamples() {
const root = join(__dirname, '..', '..', '..');
const examplesPath = join(root, 'examples');
const examples = readdirSync(examplesPath);
const exampleDirs = examples.filter(example => {
const examplePath = join(examplesPath, example);
const stat = lstatSync(examplePath);
return stat.isDirectory();
});
return exampleDirs.map(exampleDirName => {
return [exampleDirName, join(examplesPath, exampleDirName)];
});
}
describe('examples should be detected', () => {
const examples = getExamples();
it.each(examples)('%s', async (example, examplePath) => {
const fs = new FixtureFilesystem(examplePath);
const framework = await detectFramework({ fs, frameworkList });
if (!framework) {
throw new Error(`Framework not detected for example "${example}".`);
}
expect(framework).toBe(example);
});
});

View File

@@ -0,0 +1,194 @@
import frameworkList from '@vercel/frameworks';
import { detectFrameworkRecord } from '../src';
import VirtualFilesystem from './virtual-file-system';
describe('detectFrameworkRecord', () => {
it('Do not detect anything', async () => {
const fs = new VirtualFilesystem({
'README.md': '# hi',
'api/cheese.js': 'export default (req, res) => res.end("cheese");',
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe(undefined);
});
it('Detects a framework record with a matchPackage detector', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
next: '9.0.0',
},
}),
});
const frameworkRecord = await detectFrameworkRecord({ fs, frameworkList });
if (!frameworkRecord) {
throw new Error(
'`frameworkRecord` was not detected, expected "nextjs" frameworks object'
);
}
expect(frameworkRecord.slug).toBe('nextjs');
expect(frameworkRecord.name).toBe('Next.js');
expect(frameworkRecord.detectedVersion).toBe('9.0.0');
});
it('Detects a framework record with a matchPackage detector with slashes', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'@ionic/angular': '5.0.0',
},
}),
});
const frameworkRecord = await detectFrameworkRecord({ fs, frameworkList });
if (!frameworkRecord) {
throw new Error(
'`frameworkRecord` was not detected, expected "ionic-angular" frameworks object'
);
}
expect(frameworkRecord.slug).toBe('ionic-angular');
expect(frameworkRecord.detectedVersion).toBe('5.0.0');
});
it('Detect first framework version found', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'nuxt-edge': '3.0.0',
nuxt3: '2.0.0',
nuxt: '1.0.0',
},
}),
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('nuxtjs');
expect(framework?.detectedVersion).toBe('1.0.0');
});
it('Detect frameworks based on ascending order in framework list', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
next: '9.0.0',
gatsby: '4.18.0',
},
}),
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('nextjs');
});
it('Detect Nuxt.js', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
nuxt: '1.0.0',
},
}),
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('nuxtjs');
});
it('Detect Nuxt.js edge', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'nuxt-edge': '1.0.0',
},
}),
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('nuxtjs');
});
it('Detect Gatsby', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
gatsby: '1.0.0',
},
}),
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('gatsby');
});
it('Detect Hugo #1', async () => {
const fs = new VirtualFilesystem({
'config.yaml': 'baseURL: http://example.org/',
'content/post.md': '# hello world',
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('hugo');
});
it('Detect Hugo #2', async () => {
const fs = new VirtualFilesystem({
'config.json': '{ "baseURL": "http://example.org/" }',
'content/post.md': '# hello world',
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('hugo');
});
it('Detect Hugo #3', async () => {
const fs = new VirtualFilesystem({
'config.toml': 'baseURL = "http://example.org/"',
'content/post.md': '# hello world',
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('hugo');
});
it('Detect Jekyll', async () => {
const fs = new VirtualFilesystem({
'_config.yml': 'config',
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('jekyll');
});
it('Detect Middleman', async () => {
const fs = new VirtualFilesystem({
'config.rb': 'config',
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('middleman');
});
it('Detect Scully', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'@angular/cli': 'latest',
'@scullyio/init': 'latest',
},
}),
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('scully');
});
it('Detect Zola', async () => {
const fs = new VirtualFilesystem({
'config.toml': 'base_url = "/"',
});
const framework = await detectFrameworkRecord({ fs, frameworkList });
expect(framework?.slug).toBe('zola');
});
});

View File

@@ -1,131 +1,7 @@
import path from 'path';
import frameworkList from '@vercel/frameworks';
import workspaceManagers from '../src/workspaces/workspace-managers';
import { detectFramework, DetectorFilesystem } from '../src';
import { DetectorFilesystemStat } from '../src/detectors/filesystem';
const posixPath = path.posix;
class VirtualFilesystem extends DetectorFilesystem {
private files: Map<string, Buffer>;
private cwd: string;
constructor(files: { [key: string]: string | Buffer }, cwd = '') {
super();
this.files = new Map();
this.cwd = cwd;
Object.entries(files).map(([key, value]) => {
const buffer = typeof value === 'string' ? Buffer.from(value) : value;
this.files.set(key, buffer);
});
}
private _normalizePath(rawPath: string): string {
return posixPath.normalize(rawPath);
}
async _hasPath(name: string): Promise<boolean> {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
for (const file of this.files.keys()) {
if (file.startsWith(basePath)) {
return true;
}
}
return false;
}
async _isFile(name: string): Promise<boolean> {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
return this.files.has(basePath);
}
async _readFile(name: string): Promise<Buffer> {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
const file = this.files.get(basePath);
if (file === undefined) {
throw new Error('File does not exist');
}
if (typeof file === 'string') {
return Buffer.from(file);
}
return file;
}
/**
* An example of how to implement readdir for a virtual filesystem.
*/
async _readdir(name = '/'): Promise<DetectorFilesystemStat[]> {
return (
[...this.files.keys()]
.map(filepath => {
const basePath = this._normalizePath(
posixPath.join(this.cwd, name === '/' ? '' : name)
);
const fileDirectoryName = posixPath.dirname(filepath);
if (fileDirectoryName === basePath) {
return {
name: posixPath.basename(filepath),
path: filepath.replace(
this.cwd === '' ? this.cwd : `${this.cwd}/`,
''
),
type: 'file',
};
}
if (
(basePath === '.' && fileDirectoryName !== '.') ||
fileDirectoryName.startsWith(basePath)
) {
let subDirectoryName = fileDirectoryName.replace(
basePath === '.' ? '' : `${basePath}/`,
''
);
if (subDirectoryName.includes('/')) {
subDirectoryName = subDirectoryName.split('/')[0];
}
return {
name: subDirectoryName,
path:
name === '/'
? subDirectoryName
: this._normalizePath(posixPath.join(name, subDirectoryName)),
type: 'dir',
};
}
return null;
})
// remove nulls
.filter((stat): stat is DetectorFilesystemStat => stat !== null)
// remove duplicates
.filter(
(stat, index, self) =>
index ===
self.findIndex(s => s.name === stat.name && s.path === stat.path)
)
);
}
/**
* An example of how to implement chdir for a virtual filesystem.
*/
_chdir(name: string): DetectorFilesystem {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
const files = Object.fromEntries(
[...this.files.keys()].map(key => [key, this.files.get(key) ?? ''])
);
return new VirtualFilesystem(files, basePath);
}
}
import { detectFramework } from '../src';
import VirtualFilesystem from './virtual-file-system';
describe('DetectorFilesystem', () => {
it('should return the directory contents relative to the cwd', async () => {
@@ -345,6 +221,30 @@ describe('DetectorFilesystem', () => {
expect(await detectFramework({ fs, frameworkList })).toBe('nuxtjs');
});
it('Detect Nuxt.js Edge', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'nuxt-edge': '1.0.0',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('nuxtjs');
});
it('Detect Nuxt.js 3', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
nuxt3: '1.0.0',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('nuxtjs');
});
it('Detect Gatsby', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
@@ -428,5 +328,69 @@ describe('DetectorFilesystem', () => {
expect(await detectFramework({ fs, frameworkList })).toBe('blitzjs');
});
it('Detect Ember via `ember-source`', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'ember-source': 'latest',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('ember');
});
it('Detect Ember via `ember-cli`', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'ember-cli': 'latest',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('ember');
});
it('Detect Brunch via `brunch`', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
brunch: 'latest',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('brunch');
});
it('Detect Brunch via `brunch-config.js`', async () => {
const fs = new VirtualFilesystem({
'brunch-config.js': '// some config',
});
expect(await detectFramework({ fs, frameworkList })).toBe('brunch');
});
it('Detect Hydrogen via `hydrogen.config.js`', async () => {
const fs = new VirtualFilesystem({
'hydrogen.config.js': '// some config',
});
expect(await detectFramework({ fs, frameworkList })).toBe('hydrogen');
});
it('Detect Hydrogen via `@shopify/hydrogen`', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
'@shopify/hydrogen': 'latest',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('hydrogen');
});
});
});

View File

@@ -37,7 +37,8 @@ describe('getMonorepoDefaultSettings', () => {
const expectedResultMap: Record<string, Record<string, string>> = {
turbo: {
monorepoManager: 'turbo',
buildCommand: 'cd ../.. && npx turbo run build --filter=app-1...',
buildCommand:
'cd ../.. && npx turbo run build --filter={packages/app-1}...',
installCommand: 'cd ../.. && yarn install',
commandForIgnoringBuildStep: 'cd ../.. && npx turbo-ignore',
},

View File

@@ -0,0 +1,126 @@
import path from 'path';
import { DetectorFilesystem } from '../src';
import { DetectorFilesystemStat } from '../src/detectors/filesystem';
const posixPath = path.posix;
export default class VirtualFilesystem extends DetectorFilesystem {
private files: Map<string, Buffer>;
private cwd: string;
constructor(files: { [key: string]: string | Buffer }, cwd = '') {
super();
this.files = new Map();
this.cwd = cwd;
Object.entries(files).map(([key, value]) => {
const buffer = typeof value === 'string' ? Buffer.from(value) : value;
this.files.set(key, buffer);
});
}
private _normalizePath(rawPath: string): string {
return posixPath.normalize(rawPath);
}
async _hasPath(name: string): Promise<boolean> {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
for (const file of this.files.keys()) {
if (file.startsWith(basePath)) {
return true;
}
}
return false;
}
async _isFile(name: string): Promise<boolean> {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
return this.files.has(basePath);
}
async _readFile(name: string): Promise<Buffer> {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
const file = this.files.get(basePath);
if (file === undefined) {
throw new Error('File does not exist');
}
if (typeof file === 'string') {
return Buffer.from(file);
}
return file;
}
/**
* An example of how to implement readdir for a virtual filesystem.
*/
async _readdir(name = '/'): Promise<DetectorFilesystemStat[]> {
return (
[...this.files.keys()]
.map(filepath => {
const basePath = this._normalizePath(
posixPath.join(this.cwd, name === '/' ? '' : name)
);
const fileDirectoryName = posixPath.dirname(filepath);
if (fileDirectoryName === basePath) {
return {
name: posixPath.basename(filepath),
path: filepath.replace(
this.cwd === '' ? this.cwd : `${this.cwd}/`,
''
),
type: 'file',
};
}
if (
(basePath === '.' && fileDirectoryName !== '.') ||
fileDirectoryName.startsWith(basePath)
) {
let subDirectoryName = fileDirectoryName.replace(
basePath === '.' ? '' : `${basePath}/`,
''
);
if (subDirectoryName.includes('/')) {
subDirectoryName = subDirectoryName.split('/')[0];
}
return {
name: subDirectoryName,
path:
name === '/'
? subDirectoryName
: this._normalizePath(posixPath.join(name, subDirectoryName)),
type: 'dir',
};
}
return null;
})
// remove nulls
.filter((stat): stat is DetectorFilesystemStat => stat !== null)
// remove duplicates
.filter(
(stat, index, self) =>
index ===
self.findIndex(s => s.name === stat.name && s.path === stat.path)
)
);
}
/**
* An example of how to implement chdir for a virtual filesystem.
*/
_chdir(name: string): DetectorFilesystem {
const basePath = this._normalizePath(posixPath.join(this.cwd, name));
const files = Object.fromEntries(
[...this.files.keys()].map(key => [key, this.files.get(key) ?? ''])
);
return new VirtualFilesystem(files, basePath);
}
}

View File

@@ -1,4 +1,4 @@
# gatsby-plugin-vercel
# @vercel/gatsby-plugin-vercel-analytics
---
@@ -15,13 +15,7 @@ This plugin sends [Core Web Vitals](https://web.dev/vitals/) to Vercel Analytics
## Install
```bash
npm i gatsby-plugin-vercel
```
or
```bash
yarn add gatsby-plugin-vercel
npm i @vercel/gatsby-plugin-vercel-analytics
```
## Usage
@@ -31,7 +25,7 @@ yarn add gatsby-plugin-vercel
module.exports = {
plugins: [
{
resolve: "gatsby-plugin-vercel",
resolve: "@vercel/gatsby-plugin-vercel-analytics",
options: {
// (optional) Prints metrics in the console when true
debug: false,

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/gatsby-plugin-vercel-analytics",
"version": "1.0.0",
"version": "1.0.5",
"description": "Track Core Web Vitals in Gatsby projects with Vercel Analytics.",
"main": "index.js",
"files": [

Some files were not shown because too many files have changed in this diff Show More