Compare commits

..

31 Commits

Author SHA1 Message Date
Steven
bef1aec766 Publish Stable
- @vercel/build-utils@5.0.3
 - vercel@27.1.5
 - @vercel/client@12.1.2
 - @vercel/frameworks@1.1.1
 - @vercel/fs-detectors@2.0.1
 - @vercel/go@2.0.7
 - @vercel/hydrogen@0.0.4
 - @vercel/next@3.1.7
 - @vercel/node@2.4.4
 - @vercel/python@3.0.7
 - @vercel/redwood@1.0.8
 - @vercel/remix@1.0.9
 - @vercel/routing-utils@2.0.0
 - @vercel/ruby@1.3.15
 - @vercel/static-build@1.0.8
2022-07-15 15:40:37 -04:00
Steven
4f4a42813f [build-utils][node][python][ruby] Update error message for EOL runtimes (#8167)
This PR updates the error message when the runtime version detected is EOL
2022-07-15 15:38:38 -04:00
Steven
181a492d91 [routing-utils] MAJOR refactor getTransformedRoutes and types (#8155)
This is a semver major change to the public API for `@vercel/routing-utils` which includes the following breaking changes.

1. `getTransformedRoutes({ nowConfig })` props changed to `getTransformedRoutes(nowConfig)`
2. `type Source` renamed `type RouteWithSrc`
3. `type Handler` renamed `type RouteWithHandle`
4. `interface VercelConfig` removed
5. `type NowConfig` removed
6. `type NowRewrite` removed
7. `type NowRedirect` removed
8. `type NowHeader` removed
9. `type NowHeaderKeyValue` removed
2022-07-15 14:05:08 -04:00
Sean Massa
1be7a80bb8 Publish Stable
- vercel@27.1.4
 - @vercel/next@3.1.6
2022-07-15 11:20:56 -05:00
Sean Massa
0428d4744e [cli] write config.json when exiting because of error in builder (#8163)
Co-authored-by: Steven <steven@ceriously.com>
2022-07-15 11:16:31 -05:00
JJ Kasper
2a929a4bb9 [next] Update allowQuery for prerendered paths (#8158)
This updates our `allowQuery` generating to ignore all query values for build-time prerender paths as these will match before dynamic routes since they are filesystem routes and the query values will not be overridden properly like they are for fallback prerender paths. This also adds testing for both prerender path types with on-demand ISR to ensure the cache is updated as expected regardless of the query.  

Deployment with patch can be seen here https://nextjs-issue-odr-simple-hrjt2dagm-ijjk-testing.vercel.app/

### Related Issues

x-ref: https://github.com/vercel/next.js/issues/38306
x-ref: https://github.com/vercel/next.js/issues/38653
2022-07-15 11:37:05 -04:00
JJ Kasper
accd308dc5 [tests] Update log for update-canary-tag script (#8156)
### Related Issues

Noticed this log was not being converted to a string so we're losing some context so this corrects in case we have a failure on this step in the future. 

Fixes: https://github.com/vercel/vercel/runs/7344626478?check_suite_focus=true#step:8:258

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [ ] The code changed/added as part of this PR has been covered with tests
- [ ] All tests pass locally with `yarn test-unit`

#### Code Review

- [ ] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2022-07-15 02:16:51 +00:00
Sean Massa
e2d4efab08 Publish Stable
- vercel@27.1.3
2022-07-14 12:04:05 -05:00
JJ Kasper
7e0dd6f808 [tests] Update to latest version of turbo (#8152)
### Related Issues

Updates to latest turbo which includes patches for cached files. 

x-ref: [slack thread](https://vercel.slack.com/archives/C02CDC2ALJH/p1657767763630359?thread_ts=1657757803.039099&cid=C02CDC2ALJH)

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [ ] The code changed/added as part of this PR has been covered with tests
- [ ] All tests pass locally with `yarn test-unit`

#### Code Review

- [ ] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2022-07-14 12:41:28 +00:00
Nathan Rajlich
8971e02e49 [cli] Normalize output file paths in vercel build (#8149)
When a Builder returns an output asset that contains irregular slashes (multiple slashes or leading/trailing slashes), then have them be removed from the file path before creating the output asset.

This fixes an edge case where `@vercel/next` could end up outputting a Serverless Function with a trailing slash (i.e. `en-US/`). Before this PR, that would be serialized to the filesystem at `en-US/.func`, but after this fix it's saved in the correct `en-US.func` directory.
2022-07-14 07:58:20 +00:00
Nathan Rajlich
10c91c8579 [cli] Store build error in "builds.json" file in vc build (#8148)
When a build fails, store the serialized Error in the "builds.json" file under the "build" object of the Builder that failed.

Example:

```json
{
  "//": "This file was generated by the `vercel build` command. It is not part of the Build Output API.",
  "target": "preview",
  "argv": [
    "/usr/local/bin/node",
    "/Users/nrajlich/Code/vercel/vercel/packages/cli/src/index.ts",
    "build",
    "--cwd",
    "/Users/nrajlich/Downloads/vc-build-next-repro/"
  ],
  "builds": [
    {
      "require": "@vercel/next",
      "requirePath": "/Users/nrajlich/Code/vercel/vercel/packages/next/dist/index",
      "apiVersion": 2,
      "src": "package.json",
      "use": "@vercel/next",
      "config": {
        "zeroConfig": true,
        "framework": "nextjs"
      },
      "error": {
        "name": "Error",
        "message": "Command \"pnpm run build\" exited with 1",
        "stack": "Error: Command \"pnpm run build\" exited with 1\n    at ChildProcess.<anonymous> (/Users/nrajlich/Code/vercel/vercel/packages/build-utils/dist/index.js:20591:20)\n    at ChildProcess.emit (node:events:527:28)\n    at ChildProcess.emit (node:domain:475:12)\n    at maybeClose (node:internal/child_process:1092:16)\n    at Process.ChildProcess._handle.onexit (node:internal/child_process:302:5)",
        "hideStackTrace": true,
        "code": "BUILD_UTILS_SPAWN_1"
      }
    }
  ]
}
```
2022-07-14 01:05:00 +00:00
Nathan Rajlich
bfdbe58675 Publish Stable
- vercel@27.1.2
 - @vercel/static-build@1.0.7
2022-07-13 13:51:02 -07:00
Matthew Stanciu
7bdaf107b7 [cli] Consume --no-clipboard (#8147)
#8085 removed the clipboard copy feature in `vc deploy`, along with the `--no-clipboard` flag. Right now, the CLI exits and returns an error when someone includes the `--no-clipboard` flag. This PR instead consumes the flag and warns the user that the flag is deprecated.

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [ ] The code changed/added as part of this PR has been covered with tests
- [ ] All tests pass locally with `yarn test-unit`

#### Code Review

- [ ] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2022-07-13 20:32:21 +00:00
Nathan Rajlich
8de100f0e1 [static-build] Return BOA v3 result without vercel build (#8146)
This will allow Build Output API v3 to be produced by a build script without the `ENABLE_VC_BUILD=1` env var being necessary.
2022-07-13 18:17:53 +00:00
Steven
38a6785859 Publish Stable
- vercel@27.1.1
2022-07-13 12:00:03 -04:00
Steven
c67d1a8525 [cli] Fix vercel build sort order (#8144)
- Fixes #8063
2022-07-13 11:58:56 -04:00
Matthew Stanciu
c5a7c574a2 [tests] Add missing function import in Project mock (#8143)
A function `formatProvider` wasn't imported in the `Project` mock endpoint. This wasn't caught before #8100 was merged, and dodged CI.

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [ ] The code changed/added as part of this PR has been covered with tests
- [ ] All tests pass locally with `yarn test-unit`

#### Code Review

- [ ] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2022-07-12 21:35:27 +00:00
Sean Massa
d2f8d178f7 Publish Stable
- @vercel/build-utils@5.0.2
 - vercel@27.1.0
 - @vercel/client@12.1.1
 - @vercel/fs-detectors@2.0.0
 - @vercel/go@2.0.6
 - @vercel/hydrogen@0.0.3
 - @vercel/next@3.1.5
 - @vercel/node@2.4.3
 - @vercel/python@3.0.6
 - @vercel/redwood@1.0.7
 - @vercel/remix@1.0.8
 - @vercel/ruby@1.3.14
 - @vercel/static-build@1.0.6
2022-07-12 15:34:28 -05:00
Nathan Rajlich
f9a747764c [remix] Fix isMonorepo check (#8142)
`config.projectSettings` is not available when running `@vercel/remix` directly (i.e. when not using `vc build`), so calculate the correct root directory value based on the `workPath` relative to the `repoRootPath` value.
2022-07-12 20:10:51 +00:00
Matthew Stanciu
27d80f13cd [cli] Add ability to connect Git provider repo (#8100)
This PR adds a new subcommand `vc git`, which allows you to create a Git provider repository from your local `.git` config to your Vercel project.

Usage:

- `vc git connect` – connects a Git provider repository
- `vc git disconnect` – disconnects a Git provider repository

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [x] The code changed/added as part of this PR has been covered with tests
- [x] All tests pass locally with `yarn test-unit`

#### Code Review

- [ ] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2022-07-12 17:55:40 +00:00
Nathan Rajlich
8c668c925d [fs-detectors] Remove "solidstart" framework from FS API v2 detection (#8134)
"solidstart" framework doesn't need to use the FS API v2 Builder anymore since:

  1) Newer versions output Build Output API v3
  2) Older versions that output FS API v2 will be handled by the compatbility shim that was added in https://github.com/vercel/vercel/pull/7690
2022-07-12 17:03:00 +00:00
Nathan Rajlich
4b1b33c143 [cli] Add createdAt to project settings in vc pull (#8138)
The `createdAt` property is checked in the `detectBuilders()` function, so it needs to be present in the local copy of the project settings.
2022-07-12 08:12:32 +00:00
Gal Schlezinger
a8d4147554 [next] Add EdgeRuntime provider environment variable to the builder (#8130)
This will enable Next.js to compile and set `vercel` as the edge runtime provider (`EdgeRuntime` global),
which can allow different libraries/customers to have different code running depends on the runtime environment (`edge-runtime` vs `vercel`).

### Related Issues

- https://github.com/vercel/next.js/pull/38331

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

The Next.js feature was not merged yet, so it still can't be tested.

- [ ] The code changed/added as part of this PR has been covered with tests
- [ ] All tests pass locally with `yarn test-unit`

#### Code Review

- [ ] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2022-07-12 06:33:14 +00:00
Gal Schlezinger
09339f494d [next][build-utils] allow to declare assets in EdgeFunction constructor (#8127)
There's a [Next.js PR](https://github.com/vercel/next.js/pull/38492) to include binary assets in Edge Functions. This PR enables to declare assets in the `EdgeFunction` constructor.

This is a configuration level setting, compared to WebAssembly modules which the `middleware-manifest.json` converts into `import` statements. This is because the asset generation is done in Webpack and it is not dynamic, it's a static string. We can use AST to replace it but we're risking in:

* worse performance
* accidentally replacing a string that isn't an asset
2022-07-11 23:13:21 +00:00
Steven
ee4d772ae9 [tests] Add retry to npm tests (#8133)
These tests fail occasionally on GH Actions due to flakey network IO, so this PR adds retry logic.

https://github.com/vercel/vercel/runs/7289659634?check_suite_focus=true#step:13:328
2022-07-11 21:39:10 +00:00
Matthew Stanciu
61e8103404 [tests] Fix flaky Git meta test (#8132)
A test I wrote for a previous PR (https://github.com/vercel/vercel/pull/8112) was failing once in ~every 5 runs. This PR makes the test reliable.

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [x] The code changed/added as part of this PR has been covered with tests
- [x] All tests pass locally with `yarn test-unit`

#### Code Review

- [ ] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2022-07-11 21:01:16 +00:00
Steven
fb4f477325 Publish Stable
- vercel@27.0.2
 - @vercel/fs-detectors@1.0.2
 - @vercel/node@2.4.2
2022-07-11 16:21:30 -04:00
P.B. To
016bff848e [fs-detectors] process detectFramework in parallel (#8128)
The Vercel API's `detect-framework` API contacts GitHub's API using its own file adapter. 

However, currently the `detectFramework` function in the Vercel CLI (on which the API depends) calls the file system adapter in series using a `for` loop. This results in large amounts of lag when in a networked situation such as calling GitHub's API as each API call takes a couple hundred milliseconds.

I propose changing `detectFramework` to process the directories it scans in parallel.

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [x] The code changed/added as part of this PR has been covered with tests
- [x] All tests pass locally with `yarn test-unit`

#### Code Review

- [x] This PR has a concise title and thorough description useful to a reviewer
- [x] Issue from task tracker has a link to this PR
2022-07-11 20:16:04 +00:00
Steven
183e411f7c [cli] Remove DEBUG=corepack env var (#8131)
This debug log was originally added in #7871 because corepack has no output by default. In particular, it was nice to see the first deployment was not stalled when the package manager is being installed.

That being said, this gets noisy really fast because cache detections also print a log line.
For example, here's a deployment that prints 3 times:

```
Detected ENABLE_EXPERIMENTAL_COREPACK=1 and "npm@8.10.0" in package.json
Running "install" command: `npm install --prefix=.. --no-audit --engine-strict=false`...
2022-07-11T18:27:00.696Z corepack Reusing npm@8.10.0
356 packages are looking for funding
Running "npm run vercel-build"
2022-07-11T18:27:06.664Z corepack Reusing npm@8.10.0
> front@0.0.0 vercel-build
> npm run buildonly && npm run build:rss
2022-07-11T18:27:07.088Z corepack Reusing npm@8.10.0
> front@0.0.0 buildonly
> next build
```

I think its best to let users add this env var themselves if they want to debug what corepack is doing, so this PR removes that environment variable.
2022-07-11 19:31:04 +00:00
Sean Massa
070e300148 [node] add links to edge error messages (#8048)
Add links to some Edge errors.

---

Follow up to: https://github.com/vercel/vercel/pull/8007
2022-07-11 18:50:15 +00:00
Steven
cbdf9b4a88 [cli] Fix beta label (#8129)
This PR is a follow up to #7991 which incorrectly used a the empty string instead of empty array

- Maybe related to #8125
2022-07-11 18:05:36 +00:00
110 changed files with 1949 additions and 496 deletions

View File

@@ -1,4 +1,5 @@
# https://prettier.io/docs/en/ignore.html
# ignore this file with an intentional syntax error
# ignore these files with an intentional syntax error
packages/cli/test/dev/fixtures/edge-function-error/api/edge-error-syntax.js
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts

View File

@@ -31,7 +31,7 @@
"prettier": "2.6.2",
"ts-eager": "2.0.2",
"ts-jest": "28.0.5",
"turbo": "1.3.1"
"turbo": "1.3.2-canary.1"
},
"scripts": {
"lerna": "lerna",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/build-utils",
"version": "5.0.1",
"version": "5.0.3",
"license": "MIT",
"main": "./dist/index.js",
"types": "./dist/index.d.js",

View File

@@ -33,6 +33,11 @@ export class EdgeFunction {
*/
envVarsInUse?: string[];
/**
* Extra binary files to be included in the edge function
*/
assets?: { name: string; path: string }[];
constructor(params: Omit<EdgeFunction, 'type'>) {
this.type = 'EdgeFunction';
this.name = params.name;
@@ -40,5 +45,6 @@ export class EdgeFunction {
this.entrypoint = params.entrypoint;
this.files = params.files;
this.envVarsInUse = params.envVarsInUse;
this.assets = params.assets;
}
}

View File

@@ -33,9 +33,6 @@ function getHint(isAuto = false) {
: `Please set "engines": { "node": "${range}" } in your \`package.json\` file to use Node.js ${major}.`;
}
const upstreamProvider =
'This change is the result of a decision made by an upstream infrastructure provider (AWS).';
export function getLatestNodeVersion() {
return allOptions[0];
}
@@ -75,7 +72,7 @@ export async function getSupportedNodeVersion(
throw new NowBuildError({
code: 'BUILD_UTILS_NODE_VERSION_DISCONTINUED',
link: 'http://vercel.link/node-version',
message: `${intro} ${getHint(isAuto)} ${upstreamProvider}`,
message: `${intro} ${getHint(isAuto)}`,
});
}
@@ -86,9 +83,9 @@ export async function getSupportedNodeVersion(
console.warn(
`Error: Node.js version ${
selection.range
} is deprecated. Deployments created on or after ${d} will fail to build. ${getHint(
} has reached End-of-Life. Deployments created on or after ${d} will fail to build. ${getHint(
isAuto
)} ${upstreamProvider}`
)}`
);
}

View File

@@ -1,6 +1,7 @@
import ms from 'ms';
import path from 'path';
import fs, { readlink } from 'fs-extra';
import retry from 'async-retry';
import { strict as assert, strictEqual } from 'assert';
import { createZip } from '../src/lambda';
import { getSupportedNodeVersion } from '../src/fs/node-version';
@@ -386,10 +387,10 @@ it('should warn for deprecated versions, soon to be discontinued', async () => {
12
);
expect(warningMessages).toStrictEqual([
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
]);
global.Date.now = realDateNow;
@@ -494,28 +495,43 @@ it('should only invoke `runNpmInstall()` once per `package.json` file (serial)',
const meta: Meta = {};
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
const apiDir = path.join(fixture, 'api');
const run1 = await runNpmInstall(apiDir, [], undefined, meta);
expect(run1).toEqual(true);
expect(
(meta.runNpmInstallSet as Set<string>).has(
path.join(fixture, 'package.json')
)
).toEqual(true);
const run2 = await runNpmInstall(apiDir, [], undefined, meta);
expect(run2).toEqual(false);
const run3 = await runNpmInstall(fixture, [], undefined, meta);
expect(run3).toEqual(false);
const retryOpts = { maxRetryTime: 1000 };
let run1, run2, run3;
await retry(async () => {
run1 = await runNpmInstall(apiDir, [], undefined, meta);
expect(run1).toEqual(true);
expect(
(meta.runNpmInstallSet as Set<string>).has(
path.join(fixture, 'package.json')
)
).toEqual(true);
}, retryOpts);
await retry(async () => {
run2 = await runNpmInstall(apiDir, [], undefined, meta);
expect(run2).toEqual(false);
}, retryOpts);
await retry(async () => {
run3 = await runNpmInstall(fixture, [], undefined, meta);
expect(run3).toEqual(false);
}, retryOpts);
});
it('should only invoke `runNpmInstall()` once per `package.json` file (parallel)', async () => {
const meta: Meta = {};
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
const apiDir = path.join(fixture, 'api');
const [run1, run2, run3] = await Promise.all([
runNpmInstall(apiDir, [], undefined, meta),
runNpmInstall(apiDir, [], undefined, meta),
runNpmInstall(fixture, [], undefined, meta),
]);
let results: [boolean, boolean, boolean] | undefined;
await retry(
async () => {
results = await Promise.all([
runNpmInstall(apiDir, [], undefined, meta),
runNpmInstall(apiDir, [], undefined, meta),
runNpmInstall(fixture, [], undefined, meta),
]);
},
{ maxRetryTime: 3000 }
);
const [run1, run2, run3] = results || [];
expect(run1).toEqual(true);
expect(run2).toEqual(false);
expect(run3).toEqual(false);

View File

@@ -1,6 +1,6 @@
{
"name": "vercel",
"version": "27.0.1",
"version": "27.1.5",
"preferGlobal": true,
"license": "Apache-2.0",
"description": "The command-line interface for Vercel",
@@ -42,16 +42,16 @@
"node": ">= 14"
},
"dependencies": {
"@vercel/build-utils": "5.0.1",
"@vercel/go": "2.0.5",
"@vercel/hydrogen": "0.0.2",
"@vercel/next": "3.1.4",
"@vercel/node": "2.4.1",
"@vercel/python": "3.0.5",
"@vercel/redwood": "1.0.6",
"@vercel/remix": "1.0.7",
"@vercel/ruby": "1.3.13",
"@vercel/static-build": "1.0.5",
"@vercel/build-utils": "5.0.3",
"@vercel/go": "2.0.7",
"@vercel/hydrogen": "0.0.4",
"@vercel/next": "3.1.7",
"@vercel/node": "2.4.4",
"@vercel/python": "3.0.7",
"@vercel/redwood": "1.0.8",
"@vercel/remix": "1.0.9",
"@vercel/ruby": "1.3.15",
"@vercel/static-build": "1.0.8",
"update-notifier": "5.1.0"
},
"devDependencies": {
@@ -96,9 +96,9 @@
"@types/which": "1.3.2",
"@types/write-json-file": "2.2.1",
"@types/yauzl-promise": "2.1.0",
"@vercel/client": "12.1.0",
"@vercel/frameworks": "1.1.0",
"@vercel/fs-detectors": "1.0.1",
"@vercel/client": "12.1.2",
"@vercel/frameworks": "1.1.1",
"@vercel/fs-detectors": "2.0.1",
"@vercel/ncc": "0.24.0",
"@zeit/fun": "0.11.2",
"@zeit/source-map-support": "0.6.2",

View File

@@ -48,9 +48,17 @@ import {
} from '../util/build/write-build-result';
import { importBuilders, BuilderWithPkg } from '../util/build/import-builders';
import { initCorepack, cleanupCorepack } from '../util/build/corepack';
import { sortBuilders } from '../util/build/sort-builders';
type BuildResult = BuildResultV2 | BuildResultV3;
interface SerializedBuilder extends Builder {
error?: Error;
require?: string;
requirePath?: string;
apiVersion: number;
}
const help = () => {
return console.log(`
${chalk.bold(`${cli.logo} ${cli.name} build`)}
@@ -198,7 +206,7 @@ export default async function main(client: Client): Promise<number> {
normalizePath(relative(workPath, f))
);
const routesResult = getTransformedRoutes({ nowConfig: vercelConfig || {} });
const routesResult = getTransformedRoutes(vercelConfig || {});
if (routesResult.error) {
output.prettyError(routesResult.error);
return 1;
@@ -296,32 +304,36 @@ export default async function main(client: Client): Promise<number> {
const ops: Promise<Error | void>[] = [];
// Write the `detectedBuilders` result to output dir
ops.push(
fs.writeJSON(
join(outputDir, 'builds.json'),
{
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
target,
argv: process.argv,
builds: builds.map(build => {
const builderWithPkg = buildersWithPkgs.get(build.use);
if (!builderWithPkg) {
throw new Error(`Failed to load Builder "${build.use}"`);
}
const { builder, pkg: builderPkg } = builderWithPkg;
return {
require: builderPkg.name,
requirePath: builderWithPkg.path,
apiVersion: builder.version,
...build,
};
}),
},
{
spaces: 2,
const buildsJsonBuilds = new Map<Builder, SerializedBuilder>(
builds.map(build => {
const builderWithPkg = buildersWithPkgs.get(build.use);
if (!builderWithPkg) {
throw new Error(`Failed to load Builder "${build.use}"`);
}
)
const { builder, pkg: builderPkg } = builderWithPkg;
return [
build,
{
require: builderPkg.name,
requirePath: builderWithPkg.path,
apiVersion: builder.version,
...build,
},
];
})
);
const buildsJson = {
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
target,
argv: process.argv,
builds: Array.from(buildsJsonBuilds.values()),
};
const buildsJsonPath = join(outputDir, 'builds.json');
const writeBuildsJsonPromise = fs.writeJSON(buildsJsonPath, buildsJson, {
spaces: 2,
});
ops.push(writeBuildsJsonPromise);
// The `meta` config property is re-used for each Builder
// invocation so that Builders can share state between
@@ -332,65 +344,95 @@ export default async function main(client: Client): Promise<number> {
};
// Execute Builders for detected entrypoints
// TODO: parallelize builds
// TODO: parallelize builds (except for frontend)
const sortedBuilders = sortBuilders(builds);
const buildResults: Map<Builder, BuildResult> = new Map();
const overrides: PathOverride[] = [];
const repoRootPath = cwd;
const rootPackageJsonPath = repoRootPath || workPath;
const corepackShimDir = await initCorepack({ cwd, rootPackageJsonPath });
const corepackShimDir = await initCorepack({ repoRootPath });
for (const build of builds) {
for (const build of sortedBuilders) {
if (typeof build.src !== 'string') continue;
const builderWithPkg = buildersWithPkgs.get(build.use);
if (!builderWithPkg) {
throw new Error(`Failed to load Builder "${build.use}"`);
}
const { builder, pkg: builderPkg } = builderWithPkg;
const buildConfig: Config = {
outputDirectory: project.settings.outputDirectory ?? undefined,
...build.config,
projectSettings: project.settings,
installCommand: project.settings.installCommand ?? undefined,
devCommand: project.settings.devCommand ?? undefined,
buildCommand: project.settings.buildCommand ?? undefined,
framework: project.settings.framework,
nodeVersion: project.settings.nodeVersion,
};
const buildOptions: BuildOptions = {
files: filesMap,
entrypoint: build.src,
workPath,
repoRootPath,
config: buildConfig,
meta,
};
output.debug(
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
);
const buildResult = await builder.build(buildOptions);
try {
const { builder, pkg: builderPkg } = builderWithPkg;
// Store the build result to generate the final `config.json` after
// all builds have completed
buildResults.set(build, buildResult);
const buildConfig: Config = {
outputDirectory: project.settings.outputDirectory ?? undefined,
...build.config,
projectSettings: project.settings,
installCommand: project.settings.installCommand ?? undefined,
devCommand: project.settings.devCommand ?? undefined,
buildCommand: project.settings.buildCommand ?? undefined,
framework: project.settings.framework,
nodeVersion: project.settings.nodeVersion,
};
const buildOptions: BuildOptions = {
files: filesMap,
entrypoint: build.src,
workPath,
repoRootPath,
config: buildConfig,
meta,
};
output.debug(
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
);
const buildResult = await builder.build(buildOptions);
// Start flushing the file outputs to the filesystem asynchronously
ops.push(
writeBuildResult(
outputDir,
buildResult,
build,
builder,
builderPkg,
vercelConfig?.cleanUrls
).then(
override => {
if (override) overrides.push(override);
},
err => err
)
);
// Store the build result to generate the final `config.json` after
// all builds have completed
buildResults.set(build, buildResult);
// Start flushing the file outputs to the filesystem asynchronously
ops.push(
writeBuildResult(
outputDir,
buildResult,
build,
builder,
builderPkg,
vercelConfig?.cleanUrls
).then(
override => {
if (override) overrides.push(override);
},
err => err
)
);
} catch (err: any) {
const configJson = {
version: 3,
};
const configJsonPromise = fs.writeJSON(
join(outputDir, 'config.json'),
configJson,
{ spaces: 2 }
);
await Promise.all([writeBuildsJsonPromise, configJsonPromise]);
const buildJsonBuild = buildsJsonBuilds.get(build);
if (buildJsonBuild) {
buildJsonBuild.error = {
name: err.name,
message: err.message,
stack: err.stack,
...err,
};
await fs.writeJSON(buildsJsonPath, buildsJson, {
spaces: 2,
});
}
return 1;
}
}
if (corepackShimDir) {

View File

@@ -15,6 +15,7 @@ export const help = () => `
)}
dev Start a local development server
env Manages the Environment Variables for your current Project
git Manage Git provider repository for your current Project
init [example] Initialize an example project
ls | list [app] Lists deployments
inspect [id] Displays information related to a deployment

View File

@@ -64,7 +64,7 @@ import { help } from './args';
import { getDeploymentChecks } from '../../util/deploy/get-deployment-checks';
import parseTarget from '../../util/deploy/parse-target';
import getPrebuiltJson from '../../util/deploy/get-prebuilt-json';
import { createGitMeta } from '../../util/deploy/create-git-meta';
import { createGitMeta } from '../../util/create-git-meta';
export default async (client: Client) => {
const { output } = client;
@@ -95,6 +95,7 @@ export default async (client: Client) => {
// deprecated
'--name': String,
'-n': '--name',
'--no-clipboard': Boolean,
'--target': String,
});
} catch (error) {
@@ -183,6 +184,17 @@ export default async (client: Client) => {
);
}
if (argv['--no-clipboard']) {
output.print(
`${prependEmoji(
`The ${param(
'--no-clipboard'
)} option was ignored because it is the default behavior. Please remove it.`,
emoji('warning')
)}\n`
);
}
// build `target`
const target = parseTarget(output, argv['--target'], argv['--prod']);
if (typeof target === 'number') {

View File

@@ -0,0 +1,168 @@
import chalk from 'chalk';
import { join } from 'path';
import { Org, Project } from '../../types';
import Client from '../../util/client';
import { parseGitConfig, pluckRemoteUrl } from '../../util/create-git-meta';
import confirm from '../../util/input/confirm';
import { Output } from '../../util/output';
import link from '../../util/output/link';
import { getCommandName } from '../../util/pkg-name';
import {
connectGitProvider,
disconnectGitProvider,
formatProvider,
parseRepoUrl,
} from '../../util/projects/connect-git-provider';
import validatePaths from '../../util/validate-paths';
export default async function connect(
client: Client,
argv: any,
args: string[],
project: Project | undefined,
org: Org | undefined
) {
const { output } = client;
const confirm = Boolean(argv['--confirm']);
if (args.length !== 0) {
output.error(
`Invalid number of arguments. Usage: ${chalk.cyan(
`${getCommandName('project connect')}`
)}`
);
return 2;
}
if (!project || !org) {
output.error(
`Can't find \`org\` or \`project\`. Make sure your current directory is linked to a Vercel projet by running ${getCommandName(
'link'
)}.`
);
return 1;
}
let paths = [process.cwd()];
const validate = await validatePaths(client, paths);
if (!validate.valid) {
return validate.exitCode;
}
const { path } = validate;
const gitProviderLink = project.link;
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
// get project from .git
const gitConfigPath = join(path, '.git/config');
const gitConfig = await parseGitConfig(gitConfigPath, output);
if (!gitConfig) {
output.error(
`No local git repo found. Run ${chalk.cyan(
'`git clone <url>`'
)} to clone a remote Git repository first.`
);
return 1;
}
const remoteUrl = pluckRemoteUrl(gitConfig);
if (!remoteUrl) {
output.error(
`No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run ${chalk.cyan(
'`git remote --help`'
)} for more details.`
);
return 1;
}
output.log(`Identified Git remote "origin": ${link(remoteUrl)}`);
const parsedUrl = parseRepoUrl(remoteUrl);
if (!parsedUrl) {
output.error(
`Failed to parse Git repo data from the following remote URL in your Git config: ${link(
remoteUrl
)}`
);
return 1;
}
const { provider, org: gitOrg, repo } = parsedUrl;
const repoPath = `${gitOrg}/${repo}`;
let connectedRepoPath;
if (!gitProviderLink) {
const connect = await connectGitProvider(
client,
org,
project.id,
provider,
repoPath
);
if (typeof connect === 'number') {
return connect;
}
} else {
const connectedProvider = gitProviderLink.type;
const connectedOrg = gitProviderLink.org;
const connectedRepo = gitProviderLink.repo;
connectedRepoPath = `${connectedOrg}/${connectedRepo}`;
const isSameRepo =
connectedProvider === provider &&
connectedOrg === gitOrg &&
connectedRepo === repo;
if (isSameRepo) {
output.log(
`${chalk.cyan(connectedRepoPath)} is already connected to your project.`
);
return 1;
}
const shouldReplaceRepo = await confirmRepoConnect(
client,
output,
confirm,
connectedRepoPath
);
if (!shouldReplaceRepo) {
return 0;
}
await disconnectGitProvider(client, org, project.id);
const connect = await connectGitProvider(
client,
org,
project.id,
provider,
repoPath
);
if (typeof connect === 'number') {
return connect;
}
}
output.log(
`Connected ${formatProvider(provider)} repository ${chalk.cyan(repoPath)}!`
);
return 0;
}
async function confirmRepoConnect(
client: Client,
output: Output,
yes: boolean,
connectedRepoPath: string
) {
let shouldReplaceProject = yes;
if (!shouldReplaceProject) {
shouldReplaceProject = await confirm(
client,
`Looks like you already have a repository connected: ${chalk.cyan(
connectedRepoPath
)}. Do you want to replace it?`,
true
);
if (!shouldReplaceProject) {
output.log(`Aborted. Repo not connected.`);
}
}
return shouldReplaceProject;
}

View File

@@ -0,0 +1,58 @@
import chalk from 'chalk';
import { Org, Project } from '../../types';
import Client from '../../util/client';
import confirm from '../../util/input/confirm';
import { getCommandName } from '../../util/pkg-name';
import { disconnectGitProvider } from '../../util/projects/connect-git-provider';
export default async function disconnect(
client: Client,
args: string[],
project: Project | undefined,
org: Org | undefined
) {
const { output } = client;
if (args.length !== 0) {
output.error(
`Invalid number of arguments. Usage: ${chalk.cyan(
`${getCommandName('project disconnect')}`
)}`
);
return 2;
}
if (!project || !org) {
output.error('An unexpected error occurred.');
return 1;
}
if (project.link) {
const { org: linkOrg, repo } = project.link;
output.print(
`Your Vercel project will no longer create deployments when you push to this repository.\n`
);
const confirmDisconnect = await confirm(
client,
`Are you sure you want to disconnect ${chalk.cyan(
`${linkOrg}/${repo}`
)} from your project?`,
false
);
if (confirmDisconnect) {
await disconnectGitProvider(client, org, project.id);
output.log(`Disconnected ${chalk.cyan(`${linkOrg}/${repo}`)}.`);
} else {
output.log('Aborted.');
}
} else {
output.error(
`No Git repository connected. Run ${getCommandName(
'project connect'
)} to connect one.`
);
return 1;
}
return 0;
}

View File

@@ -0,0 +1,94 @@
import chalk from 'chalk';
import Client from '../../util/client';
import { ensureLink } from '../../util/ensure-link';
import getArgs from '../../util/get-args';
import getInvalidSubcommand from '../../util/get-invalid-subcommand';
import handleError from '../../util/handle-error';
import logo from '../../util/output/logo';
import { getPkgName } from '../../util/pkg-name';
import validatePaths from '../../util/validate-paths';
import connect from './connect';
import disconnect from './disconnect';
const help = () => {
console.log(`
${chalk.bold(`${logo} ${getPkgName()} git`)} <command>
${chalk.dim('Commands:')}
connect Connect your Git config "origin" remote as a Git provider to your project
disconnect Disconnect the Git provider repository from your project
${chalk.dim('Options:')}
-h, --help Output usage information
-t ${chalk.bold.underline('TOKEN')}, --token=${chalk.bold.underline(
'TOKEN'
)} Login token
${chalk.dim('Examples:')}
${chalk.gray('')} Connect a Git provider repository
${chalk.cyan(`$ ${getPkgName()} git connect`)}
${chalk.gray('')} Disconnect the Git provider repository
${chalk.cyan(`$ ${getPkgName()} git disconnect`)}
`);
};
const COMMAND_CONFIG = {
connect: ['connect'],
disconnect: ['disconnect'],
};
export default async function main(client: Client) {
let argv: any;
let subcommand: string | string[];
try {
argv = getArgs(client.argv.slice(2), {
'--confirm': Boolean,
});
} catch (error) {
handleError(error);
return 1;
}
if (argv['--help']) {
help();
return 2;
}
argv._ = argv._.slice(1);
subcommand = argv._[0];
const args = argv._.slice(1);
const confirm = Boolean(argv['--confirm']);
const { output } = client;
let paths = [process.cwd()];
const pathValidation = await validatePaths(client, paths);
if (!pathValidation.valid) {
return pathValidation.exitCode;
}
const { path } = pathValidation;
const linkedProject = await ensureLink('git', client, path, confirm);
if (typeof linkedProject === 'number') {
return linkedProject;
}
const { org, project } = linkedProject;
switch (subcommand) {
case 'connect':
return await connect(client, argv, args, project, org);
case 'disconnect':
return await disconnect(client, args, project, org);
default:
output.error(getInvalidSubcommand(COMMAND_CONFIG));
help();
return 2;
}
}

View File

@@ -14,6 +14,7 @@ export default new Map([
['domain', 'domains'],
['domains', 'domains'],
['env', 'env'],
['git', 'git'],
['help', 'help'],
['init', 'init'],
['inspect', 'inspect'],

View File

@@ -6,7 +6,6 @@ import getScope from '../../util/get-scope';
import handleError from '../../util/handle-error';
import logo from '../../util/output/logo';
import { getPkgName } from '../../util/pkg-name';
import validatePaths from '../../util/validate-paths';
import add from './add';
import list from './list';
import rm from './rm';
@@ -48,7 +47,6 @@ const COMMAND_CONFIG = {
ls: ['ls', 'list'],
add: ['add'],
rm: ['rm', 'remove'],
connect: ['connect'],
};
export default async function main(client: Client) {
@@ -59,7 +57,6 @@ export default async function main(client: Client) {
argv = getArgs(client.argv.slice(2), {
'--next': Number,
'-N': '--next',
'--yes': Boolean,
});
} catch (error) {
handleError(error);
@@ -76,12 +73,6 @@ export default async function main(client: Client) {
const args = argv._.slice(1);
const { output } = client;
let paths = [process.cwd()];
const pathValidation = await validatePaths(client, paths);
if (!pathValidation.valid) {
return pathValidation.exitCode;
}
let contextName = '';
try {

View File

@@ -173,7 +173,7 @@ const main = async () => {
const targetOrSubcommand = argv._[2];
// Currently no beta commands - add here as needed
const betaCommands: string[] = [''];
const betaCommands: string[] = [];
if (betaCommands.includes(targetOrSubcommand)) {
console.log(
`${chalk.grey(
@@ -632,6 +632,9 @@ const main = async () => {
case 'env':
func = require('./commands/env').default;
break;
case 'git':
func = require('./commands/git').default;
break;
case 'init':
func = require('./commands/init').default;
break;

View File

@@ -248,12 +248,34 @@ export interface ProjectEnvVariable {
gitBranch?: string;
}
export interface DeployHook {
createdAt: number;
id: string;
name: string;
ref: string;
url: string;
}
export interface ProjectLinkData {
type: string;
repo: string;
repoId: number;
org?: string;
gitCredentialId: string;
productionBranch?: string | null;
sourceless: boolean;
createdAt: number;
updatedAt: number;
deployHooks?: DeployHook[];
}
export interface Project extends ProjectSettings {
id: string;
name: string;
accountId: string;
updatedAt: number;
createdAt: number;
link?: ProjectLinkData;
alias?: ProjectAliasTarget[];
latestDeployments?: Partial<Deployment>[];
}

View File

@@ -6,11 +6,9 @@ import { VERCEL_DIR } from '../projects/link';
import readJSONFile from '../read-json-file';
export async function initCorepack({
cwd,
rootPackageJsonPath,
repoRootPath,
}: {
cwd: string;
rootPackageJsonPath: string;
repoRootPath: string;
}): Promise<string | null> {
if (process.env.ENABLE_EXPERIMENTAL_COREPACK !== '1') {
// Since corepack is experimental, we need to exit early
@@ -18,7 +16,7 @@ export async function initCorepack({
return null;
}
const pkg = await readJSONFile<PackageJson>(
join(rootPackageJsonPath, 'package.json')
join(repoRootPath, 'package.json')
);
if (pkg instanceof CantParseJSONFile) {
console.warn(
@@ -32,16 +30,13 @@ export async function initCorepack({
console.log(
`Detected ENABLE_EXPERIMENTAL_COREPACK=1 and "${pkg.packageManager}" in package.json`
);
const corepackRootDir = join(cwd, VERCEL_DIR, 'cache', 'corepack');
const corepackRootDir = join(repoRootPath, VERCEL_DIR, 'cache', 'corepack');
const corepackHomeDir = join(corepackRootDir, 'home');
const corepackShimDir = join(corepackRootDir, 'shim');
await fs.mkdirp(corepackHomeDir);
await fs.mkdirp(corepackShimDir);
process.env.COREPACK_HOME = corepackHomeDir;
process.env.PATH = `${corepackShimDir}${delimiter}${process.env.PATH}`;
process.env.DEBUG = process.env.DEBUG
? `corepack,${process.env.DEBUG}`
: 'corepack';
const pkgManagerName = pkg.packageManager.split('@')[0];
// We must explicitly call `corepack enable npm` since `corepack enable`
// doesn't work with npm. See https://github.com/nodejs/corepack/pull/24
@@ -72,11 +67,4 @@ export function cleanupCorepack(corepackShimDir: string) {
''
);
}
if (process.env.DEBUG) {
if (process.env.DEBUG === 'corepack') {
delete process.env.DEBUG;
} else {
process.env.DEBUG = process.env.DEBUG.replace('corepack,', '');
}
}
}

View File

@@ -0,0 +1,12 @@
import frameworkList from '@vercel/frameworks';
export function sortBuilders<B extends { use: string }>(builds: B[]): B[] {
const frontendRuntimeSet = new Set(
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
);
const toNumber = (build: B) => (frontendRuntimeSet.has(build.use) ? 0 : 1);
return builds.sort((build1, build2) => {
return toNumber(build1) - toNumber(build2);
});
}

View File

@@ -1,6 +1,14 @@
import fs from 'fs-extra';
import mimeTypes from 'mime-types';
import { basename, dirname, extname, join, relative, resolve } from 'path';
import {
basename,
dirname,
extname,
join,
relative,
resolve,
posix,
} from 'path';
import {
Builder,
BuildResultV2,
@@ -20,6 +28,7 @@ import pipe from 'promisepipe';
import { unzip } from './unzip';
import { VERCEL_DIR } from '../projects/link';
const { normalize } = posix;
export const OUTPUT_DIR = join(VERCEL_DIR, 'output');
export async function writeBuildResult(
@@ -67,6 +76,13 @@ export interface PathOverride {
path?: string;
}
/**
* Remove duplicate slashes as well as leading/trailing slashes.
*/
function stripDuplicateSlashes(path: string): string {
return normalize(path).replace(/(^\/|\/$)/g, '');
}
/**
* Writes the output from the `build()` return value of a v2 Builder to
* the filesystem.
@@ -84,16 +100,17 @@ async function writeBuildResultV2(
const lambdas = new Map<Lambda, string>();
const overrides: Record<string, PathOverride> = {};
for (const [path, output] of Object.entries(buildResult.output)) {
const normalizedPath = stripDuplicateSlashes(path);
if (isLambda(output)) {
await writeLambda(outputDir, output, path, lambdas);
await writeLambda(outputDir, output, normalizedPath, lambdas);
} else if (isPrerender(output)) {
await writeLambda(outputDir, output.lambda, path, lambdas);
await writeLambda(outputDir, output.lambda, normalizedPath, lambdas);
// Write the fallback file alongside the Lambda directory
let fallback = output.fallback;
if (fallback) {
const ext = getFileExtension(fallback);
const fallbackName = `${path}.prerender-fallback${ext}`;
const fallbackName = `${normalizedPath}.prerender-fallback${ext}`;
const fallbackPath = join(outputDir, 'functions', fallbackName);
const stream = fallback.toStream();
await pipe(
@@ -109,7 +126,7 @@ async function writeBuildResultV2(
const prerenderConfigPath = join(
outputDir,
'functions',
`${path}.prerender-config.json`
`${normalizedPath}.prerender-config.json`
);
const prerenderConfig = {
...output,
@@ -118,12 +135,20 @@ async function writeBuildResultV2(
};
await fs.writeJSON(prerenderConfigPath, prerenderConfig, { spaces: 2 });
} else if (isFile(output)) {
await writeStaticFile(outputDir, output, path, overrides, cleanUrls);
await writeStaticFile(
outputDir,
output,
normalizedPath,
overrides,
cleanUrls
);
} else if (isEdgeFunction(output)) {
await writeEdgeFunction(outputDir, output, path);
await writeEdgeFunction(outputDir, output, normalizedPath);
} else {
throw new Error(
`Unsupported output type: "${(output as any).type}" for ${path}`
`Unsupported output type: "${
(output as any).type
}" for ${normalizedPath}`
);
}
}
@@ -145,9 +170,9 @@ async function writeBuildResultV3(
throw new Error(`Expected "build.src" to be a string`);
}
const ext = extname(src);
const path = build.config?.zeroConfig
? src.substring(0, src.length - ext.length)
: src;
const path = stripDuplicateSlashes(
build.config?.zeroConfig ? src.substring(0, src.length - ext.length) : src
);
if (isLambda(output)) {
await writeLambda(outputDir, output, path);
} else if (isEdgeFunction(output)) {

View File

@@ -3,8 +3,8 @@ import { join } from 'path';
import ini from 'ini';
import git from 'git-last-commit';
import { exec } from 'child_process';
import { GitMetadata } from '../../types';
import { Output } from '../output';
import { GitMetadata } from '../types';
import { Output } from './output';
export function isDirty(directory: string, output: Output): Promise<boolean> {
return new Promise(resolve => {
@@ -33,21 +33,31 @@ function getLastCommit(directory: string): Promise<git.Commit> {
});
}
export async function parseGitConfig(configPath: string, output: Output) {
try {
return ini.parse(await fs.readFile(configPath, 'utf-8'));
} catch (error) {
output.debug(`Error while parsing repo data: ${error.message}`);
}
}
export function pluckRemoteUrl(gitConfig: {
[key: string]: any;
}): string | undefined {
// Assuming "origin" is the remote url that the user would want to use
return gitConfig['remote "origin"']?.url;
}
export async function getRemoteUrl(
configPath: string,
output: Output
): Promise<string | null> {
let gitConfig;
try {
gitConfig = ini.parse(await fs.readFile(configPath, 'utf-8'));
} catch (error) {
output.debug(`Error while parsing repo data: ${error.message}`);
}
let gitConfig = await parseGitConfig(configPath, output);
if (!gitConfig) {
return null;
}
const originUrl: string = gitConfig['remote "origin"']?.url;
const originUrl = pluckRemoteUrl(gitConfig);
if (originUrl) {
return originUrl;
}

View File

@@ -558,9 +558,8 @@ export default class DevServer {
]);
await this.validateVercelConfig(vercelConfig);
const { error: routeError, routes: maybeRoutes } = getTransformedRoutes({
nowConfig: vercelConfig,
});
const { error: routeError, routes: maybeRoutes } =
getTransformedRoutes(vercelConfig);
if (routeError) {
this.output.prettyError(routeError);
await this.exit();

View File

@@ -1,4 +1,3 @@
import inquirer from 'inquirer';
import Client from '../client';
import getUser from '../get-user';
import getTeams from '../teams/get-teams';
@@ -43,7 +42,7 @@ export default async function selectOrg(
return choices[defaultOrgIndex].value;
}
const answers = await inquirer.prompt({
const answers = await client.prompt({
type: 'list',
name: 'org',
message: question,

View File

@@ -0,0 +1,117 @@
import Client from '../client';
import { stringify } from 'qs';
import { Org } from '../../types';
import chalk from 'chalk';
import link from '../output/link';
export async function disconnectGitProvider(
client: Client,
org: Org,
projectId: string
) {
const fetchUrl = `/v4/projects/${projectId}/link?${stringify({
teamId: org.type === 'team' ? org.id : undefined,
})}`;
return client.fetch(fetchUrl, {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
},
});
}
export async function connectGitProvider(
client: Client,
org: Org,
projectId: string,
type: string,
repo: string
) {
const fetchUrl = `/v4/projects/${projectId}/link?${stringify({
teamId: org.type === 'team' ? org.id : undefined,
})}`;
try {
return await client.fetch(fetchUrl, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
type,
repo,
}),
});
} catch (err) {
if (
err.meta?.action === 'Install GitHub App' ||
err.code === 'repo_not_found'
) {
client.output.error(
`Failed to link ${chalk.cyan(
repo
)}. Make sure there aren't any typos and that you have access to the repository if it's private.`
);
} else if (err.action === 'Add a Login Connection') {
client.output.error(
err.message.replace(repo, chalk.cyan(repo)) +
`\nVisit ${link(err.link)} for more information.`
);
} else {
client.output.error(
`Failed to connect the ${formatProvider(
type
)} repository ${repo}.\n${err}`
);
}
return 1;
}
}
export function formatProvider(type: string): string {
switch (type) {
case 'github':
return 'GitHub';
case 'gitlab':
return 'GitLab';
case 'bitbucket':
return 'Bitbucket';
default:
return type;
}
}
export function parseRepoUrl(originUrl: string): {
provider: string;
org: string;
repo: string;
} | null {
const isSSH = originUrl.startsWith('git@');
// Matches all characters between (// or @) and (.com or .org)
// eslint-disable-next-line prefer-named-capture-group
const provider = /(?<=(\/\/|@)).*(?=(\.com|\.org))/.exec(originUrl);
if (!provider) {
return null;
}
let org;
let repo;
if (isSSH) {
org = originUrl.split(':')[1].split('/')[0];
repo = originUrl.split('/')[1]?.replace('.git', '');
} else {
// Assume https:// or git://
org = originUrl.split('/')[3];
repo = originUrl.split('/')[4]?.replace('.git', '');
}
if (!org || !repo) {
return null;
}
return {
provider: provider[0],
org,
repo,
};
}

View File

@@ -5,6 +5,7 @@ import { join } from 'path';
export type ProjectLinkAndSettings = ProjectLink & {
settings: {
createdAt: Project['createdAt'];
installCommand: Project['installCommand'];
buildCommand: Project['buildCommand'];
devCommand: Project['devCommand'];
@@ -28,6 +29,7 @@ export async function writeProjectSettings(
projectId: project.id,
orgId: org.id,
settings: {
createdAt: project.createdAt,
framework: project.framework,
devCommand: project.devCommand,
installCommand: project.installCommand,

View File

@@ -0,0 +1,7 @@
{
"orgId": ".",
"projectId": ".",
"settings": {
"framework": null
}
}

View File

@@ -0,0 +1 @@
export default (req, res) => res.end('Vercel');

View File

@@ -0,0 +1 @@
module.exports = (req, res) => res.end('Vercel');

View File

@@ -0,0 +1 @@
export default (req, res) => res.end('Vercel');

View File

@@ -0,0 +1,4 @@
import { IncomingMessage, ServerResponse } from 'http';
// Intentional syntax error to make the build fail
export default (req: IncomingMessage, res: ServerResponse => res.end('Vercel');

View File

@@ -1,9 +1,17 @@
const { FileBlob } = require('@vercel/build-utils');
const { FileBlob, Lambda } = require('@vercel/build-utils');
exports.build = async () => {
const file = new FileBlob({
data: Buffer.from('file contents')
});
const output = { file };
const lambda = new Lambda({
files: {},
runtime: 'provided',
handler: 'example.js'
})
const output = {
file,
'withTrailingSlash/': lambda
};
return { output };
};

View File

@@ -0,0 +1 @@
!.vercel

View File

@@ -0,0 +1,4 @@
{
"orgId": "team_dummy",
"projectId": "bad-remote-url"
}

View File

@@ -0,0 +1,10 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true
[remote "origin"]
url = bababooey
fetch = +refs/heads/*:refs/remotes/origin/*

View File

@@ -0,0 +1 @@
!.vercel

View File

@@ -0,0 +1,4 @@
{
"orgId": "team_dummy",
"projectId": "existing-connection"
}

View File

@@ -0,0 +1 @@
ref: refs/heads/master

View File

@@ -0,0 +1,10 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true
[remote "origin"]
url = https://github.com/user2/repo2
fetch = +refs/heads/*:refs/remotes/origin/*

View File

@@ -0,0 +1 @@
Unnamed repository; edit this file 'description' to name the repository.

View File

@@ -0,0 +1 @@
!.vercel

View File

@@ -0,0 +1,4 @@
{
"orgId": "team_dummy",
"projectId": "invalid-repo"
}

View File

@@ -0,0 +1,10 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true
[remote "origin"]
url = https://github.com/laksfj/asdgklsadkl
fetch = +refs/heads/*:refs/remotes/origin/*

View File

@@ -0,0 +1 @@
!.vercel

View File

@@ -0,0 +1,4 @@
{
"orgId": "team_dummy",
"projectId": "new-connection"
}

View File

@@ -0,0 +1,10 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true
[remote "origin"]
url = https://github.com/user/repo
fetch = +refs/heads/*:refs/remotes/origin/*

View File

@@ -0,0 +1 @@
!.vercel

View File

@@ -0,0 +1,4 @@
{
"orgId": "team_dummy",
"projectId": "no-git-config"
}

View File

@@ -0,0 +1 @@
!.vercel

View File

@@ -0,0 +1,4 @@
{
"orgId": "team_dummy",
"projectId": "no-remote-url"
}

View File

@@ -0,0 +1,7 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true

View File

@@ -0,0 +1 @@
!.vercel

View File

@@ -0,0 +1,4 @@
{
"orgId": "team_dummy",
"projectId": "new-connection"
}

View File

@@ -0,0 +1,10 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true
[remote "origin"]
url = https://github.com/user/repo
fetch = +refs/heads/*:refs/remotes/origin/*

View File

@@ -0,0 +1,2 @@
!.vercel
.vercel

View File

@@ -0,0 +1 @@
ref: refs/heads/master

View File

@@ -0,0 +1,10 @@
[core]
repositoryformatversion = 0
filemode = true
bare = false
logallrefupdates = true
ignorecase = true
precomposeunicode = true
[remote "origin"]
url = https://github.com/user/repo.git
fetch = +refs/heads/*:refs/remotes/origin/*

View File

@@ -0,0 +1 @@
Unnamed repository; edit this file 'description' to name the repository.

View File

@@ -5,17 +5,19 @@ export function readOutputStream(
length: number = 3
): Promise<string> {
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
let output: string = '';
let lines = 0;
const timeout = setTimeout(() => {
reject();
}, 3000);
client.stderr.resume();
client.stderr.on('data', chunk => {
chunks.push(chunk);
if (chunks.length === length) {
output += chunk.toString();
lines++;
if (lines === length) {
clearTimeout(timeout);
resolve(chunks.toString().replace(/,/g, ''));
resolve(output);
}
});
client.stderr.on('error', reject);

View File

@@ -1,5 +1,6 @@
import { client } from './client';
import { Project } from '../../src/types';
import { formatProvider } from '../../src/util/projects/connect-git-provider';
const envs = [
{

View File

@@ -589,8 +589,6 @@ describe('build', () => {
const output = join(cwd, '.vercel/output');
try {
process.chdir(cwd);
client.stderr.pipe(process.stderr);
client.setArgv('build');
const exitCode = await build(client);
expect(exitCode).toEqual(0);
@@ -614,6 +612,40 @@ describe('build', () => {
expect(await fs.readFile(join(output, 'static/file'), 'utf8')).toEqual(
'file contents'
);
// "functions" directory has output Functions
const functions = await fs.readdir(join(output, 'functions'));
expect(functions.sort()).toEqual(['withTrailingSlash.func']);
} finally {
process.chdir(originalCwd);
delete process.env.__VERCEL_BUILD_RUNNING;
}
});
it('should store Builder error in `builds.json`', async () => {
const cwd = fixture('node-error');
const output = join(cwd, '.vercel/output');
try {
process.chdir(cwd);
const exitCode = await build(client);
expect(exitCode).toEqual(1);
// `builds.json` contains "error" build
const builds = await fs.readJSON(join(output, 'builds.json'));
expect(builds.builds).toHaveLength(4);
const errorBuilds = builds.builds.filter((b: any) => 'error' in b);
expect(errorBuilds).toHaveLength(1);
expect(errorBuilds[0].error.name).toEqual('Error');
expect(errorBuilds[0].error.message).toMatch(`TS1005`);
expect(errorBuilds[0].error.message).toMatch(`',' expected.`);
expect(errorBuilds[0].error.hideStackTrace).toEqual(true);
expect(errorBuilds[0].error.code).toEqual('NODE_TYPESCRIPT_ERROR');
// `config.json`` contains `version`
const configJson = await fs.readJSON(join(output, 'config.json'));
expect(configJson.version).toBe(3);
} finally {
process.chdir(originalCwd);
delete process.env.__VERCEL_BUILD_RUNNING;

View File

@@ -0,0 +1,380 @@
import { join } from 'path';
import fs from 'fs-extra';
import { useUser } from '../../mocks/user';
import { useTeams } from '../../mocks/team';
import { defaultProject, useProject } from '../../mocks/project';
import { client } from '../../mocks/client';
import git from '../../../src/commands/git';
import { Project } from '../../../src/types';
describe('git', () => {
describe('connect', () => {
const originalCwd = process.cwd();
const fixture = (name: string) =>
join(__dirname, '../../fixtures/unit/commands/git/connect', name);
it('connects an unlinked project', async () => {
const cwd = fixture('unlinked');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
useProject({
...defaultProject,
id: 'unlinked',
name: 'unlinked',
});
client.setArgv('projects', 'connect');
const gitPromise = git(client);
await expect(client.stderr).toOutput('Set up');
client.stdin.write('y\n');
await expect(client.stderr).toOutput(
'Which scope should contain your project?'
);
client.stdin.write('\r');
await expect(client.stderr).toOutput('Found project');
client.stdin.write('y\n');
await expect(client.stderr).toOutput(
`Identified Git remote "origin": https://github.com/user/repo.git`
);
const exitCode = await gitPromise;
await expect(client.stderr).toOutput(
'Connected GitHub repository user/repo!'
);
expect(exitCode).toEqual(0);
const project: Project = await client.fetch(`/v8/projects/unlinked`);
expect(project.link).toMatchObject({
type: 'github',
repo: 'user/repo',
repoId: 1010,
gitCredentialId: '',
sourceless: true,
createdAt: 1656109539791,
updatedAt: 1656109539791,
});
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
it('should fail when there is no git config', async () => {
const cwd = fixture('no-git-config');
try {
process.chdir(cwd);
useUser();
useTeams('team_dummy');
useProject({
...defaultProject,
id: 'no-git-config',
name: 'no-git-config',
});
client.setArgv('projects', 'connect', '--confirm');
const exitCode = await git(client);
expect(exitCode).toEqual(1);
await expect(client.stderr).toOutput(
`Error! No local git repo found. Run \`git clone <url>\` to clone a remote Git repository first.\n`
);
} finally {
process.chdir(originalCwd);
}
});
it('should fail when there is no remote url', async () => {
const cwd = fixture('no-remote-url');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
useProject({
...defaultProject,
id: 'no-remote-url',
name: 'no-remote-url',
});
client.setArgv('projects', 'connect', '--confirm');
const exitCode = await git(client);
expect(exitCode).toEqual(1);
await expect(client.stderr).toOutput(
`Error! No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run \`git remote --help\` for more details.`
);
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
it('should fail when the remote url is bad', async () => {
const cwd = fixture('bad-remote-url');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
useProject({
...defaultProject,
id: 'bad-remote-url',
name: 'bad-remote-url',
});
client.setArgv('projects', 'connect', '--confirm');
const exitCode = await git(client);
expect(exitCode).toEqual(1);
await expect(client.stderr).toOutput(
`Identified Git remote "origin": bababooey`
);
await expect(client.stderr).toOutput(
`Error! Failed to parse Git repo data from the following remote URL in your Git config: bababooey\n`
);
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
it('should connect a repo to a project that is not already connected', async () => {
const cwd = fixture('new-connection');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
useProject({
...defaultProject,
id: 'new-connection',
name: 'new-connection',
});
client.setArgv('projects', 'connect', '--confirm');
const gitPromise = git(client);
await expect(client.stderr).toOutput(
`Identified Git remote "origin": https://github.com/user/repo`
);
await expect(client.stderr).toOutput(
`> Connected GitHub repository user/repo!\n`
);
const exitCode = await gitPromise;
expect(exitCode).toEqual(0);
const project: Project = await client.fetch(
`/v8/projects/new-connection`
);
expect(project.link).toMatchObject({
type: 'github',
repo: 'user/repo',
repoId: 1010,
gitCredentialId: '',
sourceless: true,
createdAt: 1656109539791,
updatedAt: 1656109539791,
});
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
it('should replace an old connection with a new one', async () => {
const cwd = fixture('existing-connection');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
const project = useProject({
...defaultProject,
id: 'existing-connection',
name: 'existing-connection',
});
project.project.link = {
type: 'github',
repo: 'repo',
org: 'user',
repoId: 1010,
gitCredentialId: '',
sourceless: true,
createdAt: 1656109539791,
updatedAt: 1656109539791,
};
client.setArgv('projects', 'connect', '--confirm');
const gitPromise = git(client);
await expect(client.stderr).toOutput(
`Identified Git remote "origin": https://github.com/user2/repo2`
);
await expect(client.stderr).toOutput(
`> Connected GitHub repository user2/repo2!\n`
);
const exitCode = await gitPromise;
expect(exitCode).toEqual(0);
const newProjectData: Project = await client.fetch(
`/v8/projects/existing-connection`
);
expect(newProjectData.link).toMatchObject({
type: 'github',
repo: 'user2/repo2',
repoId: 1010,
gitCredentialId: '',
sourceless: true,
createdAt: 1656109539791,
updatedAt: 1656109539791,
});
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
it('should exit when an already-connected repo is connected', async () => {
const cwd = fixture('new-connection');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
const project = useProject({
...defaultProject,
id: 'new-connection',
name: 'new-connection',
});
project.project.link = {
type: 'github',
repo: 'repo',
org: 'user',
repoId: 1010,
gitCredentialId: '',
sourceless: true,
createdAt: 1656109539791,
updatedAt: 1656109539791,
};
client.setArgv('projects', 'connect', '--confirm');
const gitPromise = git(client);
await expect(client.stderr).toOutput(
`Identified Git remote "origin": https://github.com/user/repo`
);
await expect(client.stderr).toOutput(
`> user/repo is already connected to your project.\n`
);
const exitCode = await gitPromise;
expect(exitCode).toEqual(1);
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
it('should fail when it cannot find the repository', async () => {
const cwd = fixture('invalid-repo');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
useProject({
...defaultProject,
id: 'invalid-repo',
name: 'invalid-repo',
});
client.setArgv('projects', 'connect', '--confirm');
const gitPromise = git(client);
await expect(client.stderr).toOutput(
`Identified Git remote "origin": https://github.com/laksfj/asdgklsadkl`
);
await expect(client.stderr).toOutput(
`Failed to link laksfj/asdgklsadkl. Make sure there aren't any typos and that you have access to the repository if it's private.`
);
const exitCode = await gitPromise;
expect(exitCode).toEqual(1);
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
});
describe('disconnect', () => {
const originalCwd = process.cwd();
const fixture = (name: string) =>
join(__dirname, '../../fixtures/unit/commands/git/connect', name);
it('should disconnect a repository', async () => {
const cwd = fixture('new-connection');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
const project = useProject({
...defaultProject,
id: 'new-connection',
name: 'new-connection',
});
project.project.link = {
type: 'github',
repo: 'repo',
org: 'user',
repoId: 1010,
gitCredentialId: '',
sourceless: true,
createdAt: 1656109539791,
updatedAt: 1656109539791,
};
client.setArgv('git', 'disconnect');
const gitPromise = git(client);
await expect(client.stderr).toOutput(
`Are you sure you want to disconnect user/repo from your project?`
);
client.stdin.write('y\n');
await expect(client.stderr).toOutput('Disconnected user/repo.');
const newProjectData: Project = await client.fetch(
`/v8/projects/new-connection`
);
expect(newProjectData.link).toBeUndefined();
const exitCode = await gitPromise;
expect(exitCode).toEqual(0);
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
it('should fail if there is no repository to disconnect', async () => {
const cwd = fixture('new-connection');
try {
process.chdir(cwd);
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
useUser();
useTeams('team_dummy');
useProject({
...defaultProject,
id: 'new-connection',
name: 'new-connection',
});
client.setArgv('git', 'disconnect');
const gitPromise = git(client);
await expect(client.stderr).toOutput(
'No Git repository connected. Run `vercel project connect` to connect one.'
);
const exitCode = await gitPromise;
expect(exitCode).toEqual(1);
} finally {
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
process.chdir(originalCwd);
}
});
});
});

View File

@@ -101,7 +101,9 @@ describe('pull', () => {
Object {
"orgId": "team_dummy",
"projectId": "vercel-pull-next",
"settings": Object {},
"settings": Object {
"createdAt": 1555413045188,
},
}
`);
} finally {

View File

@@ -0,0 +1,51 @@
import { sortBuilders } from '../../../../src/util/build/sort-builders';
describe('sortBuilders()', () => {
test.each([
{
name: 'should sort @vercel/next from middle to beginning',
input: ['@vercel/node', '@vercel/next', '@vercel/python'],
output: ['@vercel/next', '@vercel/node', '@vercel/python'],
},
{
name: 'should sort @vercel/static-build from middle to beginning',
input: ['@vercel/node', '@vercel/static-build', '@vercel/python'],
output: ['@vercel/static-build', '@vercel/node', '@vercel/python'],
},
{
name: 'should sort @vercel/remix from end to beginning',
input: ['@vercel/python', '@vercel/node', '@vercel/remix'],
output: ['@vercel/remix', '@vercel/python', '@vercel/node'],
},
{
name: 'should sort @vercel/redwood from beginning to beginning',
input: ['@vercel/redwood', '@vercel/python', '@vercel/ruby'],
output: ['@vercel/redwood', '@vercel/python', '@vercel/ruby'],
},
{
name: 'should sort @vercel/hydrogen from end to beginning',
input: ['@vercel/python', '@vercel/hydrogen'],
output: ['@vercel/hydrogen', '@vercel/python'],
},
{
name: 'should sort @vercel/static-build to beginning with many @vercel/node',
input: [
'@vercel/node',
'@vercel/node',
'@vercel/node',
'@vercel/static-build',
'@vercel/node',
],
output: [
'@vercel/static-build',
'@vercel/node',
'@vercel/node',
'@vercel/node',
'@vercel/node',
],
},
])('$name', ({ input, output }) => {
const builders = sortBuilders(input.map(use => ({ use })));
expect(builders.map(b => b.use)).toEqual(output);
});
});

View File

@@ -6,8 +6,10 @@ import {
createGitMeta,
getRemoteUrl,
isDirty,
} from '../../../../src/util/deploy/create-git-meta';
} from '../../../../src/util/create-git-meta';
import { client } from '../../../mocks/client';
import { parseRepoUrl } from '../../../../src/util/projects/connect-git-provider';
import { readOutputStream } from '../../../helpers/read-output-stream';
const fixture = (name: string) =>
join(__dirname, '../../../fixtures/unit/create-git-meta', name);
@@ -27,6 +29,97 @@ describe('getRemoteUrl', () => {
});
});
describe('parseRepoUrl', () => {
it('should be null when a url does not match the regex', () => {
const parsedUrl = parseRepoUrl('https://examplecom/foo');
expect(parsedUrl).toBeNull();
});
it('should be null when a url does not contain org and repo data', () => {
const parsedUrl = parseRepoUrl('https://github.com/borked');
expect(parsedUrl).toBeNull();
});
it('should parse url with a period in the repo name', () => {
const parsedUrl = parseRepoUrl('https://github.com/vercel/next.js');
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('github');
expect(parsedUrl?.org).toEqual('vercel');
expect(parsedUrl?.repo).toEqual('next.js');
});
it('should parse url that ends with .git', () => {
const parsedUrl = parseRepoUrl('https://github.com/vercel/next.js.git');
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('github');
expect(parsedUrl?.org).toEqual('vercel');
expect(parsedUrl?.repo).toEqual('next.js');
});
it('should parse github https url', () => {
const parsedUrl = parseRepoUrl('https://github.com/vercel/vercel.git');
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('github');
expect(parsedUrl?.org).toEqual('vercel');
expect(parsedUrl?.repo).toEqual('vercel');
});
it('should parse github https url without the .git suffix', () => {
const parsedUrl = parseRepoUrl('https://github.com/vercel/vercel');
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('github');
expect(parsedUrl?.org).toEqual('vercel');
expect(parsedUrl?.repo).toEqual('vercel');
});
it('should parse github git url', () => {
const parsedUrl = parseRepoUrl('git://github.com/vercel/vercel.git');
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('github');
expect(parsedUrl?.org).toEqual('vercel');
expect(parsedUrl?.repo).toEqual('vercel');
});
it('should parse github ssh url', () => {
const parsedUrl = parseRepoUrl('git@github.com:vercel/vercel.git');
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('github');
expect(parsedUrl?.org).toEqual('vercel');
expect(parsedUrl?.repo).toEqual('vercel');
});
it('should parse gitlab https url', () => {
const parsedUrl = parseRepoUrl(
'https://gitlab.com/gitlab-examples/knative-kotlin-app.git'
);
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('gitlab');
expect(parsedUrl?.org).toEqual('gitlab-examples');
expect(parsedUrl?.repo).toEqual('knative-kotlin-app');
});
it('should parse gitlab ssh url', () => {
const parsedUrl = parseRepoUrl(
'git@gitlab.com:gitlab-examples/knative-kotlin-app.git'
);
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('gitlab');
expect(parsedUrl?.org).toEqual('gitlab-examples');
expect(parsedUrl?.repo).toEqual('knative-kotlin-app');
});
it('should parse bitbucket https url', () => {
const parsedUrl = parseRepoUrl(
'https://bitbucket.org/atlassianlabs/maven-project-example.git'
);
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('bitbucket');
expect(parsedUrl?.org).toEqual('atlassianlabs');
expect(parsedUrl?.repo).toEqual('maven-project-example');
});
it('should parse bitbucket ssh url', () => {
const parsedUrl = parseRepoUrl(
'git@bitbucket.org:atlassianlabs/maven-project-example.git'
);
expect(parsedUrl).toBeDefined();
expect(parsedUrl?.provider).toEqual('bitbucket');
expect(parsedUrl?.org).toEqual('atlassianlabs');
expect(parsedUrl?.repo).toEqual('maven-project-example');
});
});
describe('createGitMeta', () => {
it('is undefined when it does not receive a remote url', async () => {
const directory = fixture('no-origin');
@@ -138,10 +231,12 @@ describe('createGitMeta', () => {
client.output.debugEnabled = true;
const data = await createGitMeta(tmpDir, client.output);
await expect(client.stderr).toOutput(
const output = await readOutputStream(client, 2);
expect(output).toContain(
`Failed to get last commit. The directory is likely not a Git repo, there are no latest commits, or it is corrupted.`
);
await expect(client.stderr).toOutput(
expect(output).toContain(
`Failed to determine if Git repo has been modified:`
);
expect(data).toBeUndefined();

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/client",
"version": "12.1.0",
"version": "12.1.2",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"homepage": "https://vercel.com",
@@ -42,8 +42,8 @@
]
},
"dependencies": {
"@vercel/build-utils": "5.0.1",
"@vercel/routing-utils": "1.13.5",
"@vercel/build-utils": "5.0.3",
"@vercel/routing-utils": "2.0.0",
"@zeit/fetch": "5.2.0",
"async-retry": "1.2.3",
"async-sema": "3.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/frameworks",
"version": "1.1.0",
"version": "1.1.1",
"main": "./dist/frameworks.js",
"types": "./dist/frameworks.d.ts",
"files": [
@@ -21,7 +21,7 @@
"@types/js-yaml": "3.12.1",
"@types/node": "12.0.4",
"@types/node-fetch": "2.5.8",
"@vercel/routing-utils": "1.13.5",
"@vercel/routing-utils": "2.0.0",
"ajv": "6.12.2",
"typescript": "4.3.4"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/fs-detectors",
"version": "1.0.1",
"version": "2.0.1",
"description": "Vercel filesystem detectors",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -20,8 +20,8 @@
"test-unit": "yarn test"
},
"dependencies": {
"@vercel/frameworks": "1.1.0",
"@vercel/routing-utils": "1.13.5",
"@vercel/frameworks": "1.1.1",
"@vercel/routing-utils": "2.0.0",
"glob": "8.0.3",
"js-yaml": "4.1.0",
"minimatch": "3.0.4",

View File

@@ -1,7 +1,7 @@
import minimatch from 'minimatch';
import { valid as validSemver } from 'semver';
import { parse as parsePath, extname } from 'path';
import type { Route, Source } from '@vercel/routing-utils';
import type { Route, RouteWithSrc } from '@vercel/routing-utils';
import frameworkList, { Framework } from '@vercel/frameworks';
import type {
PackageJson,
@@ -155,8 +155,8 @@ export async function detectBuilders(
let fallbackEntrypoint: string | null = null;
const apiRoutes: Source[] = [];
const dynamicRoutes: Source[] = [];
const apiRoutes: RouteWithSrc[] = [];
const dynamicRoutes: RouteWithSrc[] = [];
// API
for (const fileName of sortedFiles) {
@@ -692,7 +692,7 @@ function getApiRoute(
options: Options,
absolutePathCache: Map<string, string>
): {
apiRoute: Source | null;
apiRoute: RouteWithSrc | null;
isDynamic: boolean;
routeError: ErrorResponse | null;
} {
@@ -886,7 +886,7 @@ function createRouteFromPath(
filePath: string,
featHandleMiss: boolean,
cleanUrls: boolean
): { route: Source; isDynamic: boolean } {
): { route: RouteWithSrc; isDynamic: boolean } {
const parts = filePath.split('/');
let counter = 1;
@@ -932,7 +932,7 @@ function createRouteFromPath(
? `^/${srcParts.slice(0, -1).join('/')}${srcParts.slice(-1)[0]}$`
: `^/${srcParts.join('/')}$`;
let route: Source;
let route: RouteWithSrc;
if (featHandleMiss) {
const extensionless = ext ? filePath.slice(0, -ext.length) : filePath;
@@ -959,8 +959,8 @@ interface LimitedRoutes {
function getRouteResult(
pkg: PackageJson | undefined | null,
apiRoutes: Source[],
dynamicRoutes: Source[],
apiRoutes: RouteWithSrc[],
dynamicRoutes: RouteWithSrc[],
outputDirectory: string,
apiBuilders: Builder[],
frontendBuilder: Builder | null,

View File

@@ -13,8 +13,6 @@ interface Metadata {
hasMiddleware: boolean;
}
const enableFileSystemApiFrameworks = new Set(['solidstart']);
/**
* If the Deployment can be built with the new File System API,
* return the new Builder. Otherwise an "Exclusion Condition"
@@ -61,11 +59,7 @@ export async function detectFileSystemAPI({
hasMiddleware,
};
const isEnabled =
enableFlag ||
hasMiddleware ||
hasDotOutput ||
enableFileSystemApiFrameworks.has(framework);
const isEnabled = enableFlag || hasMiddleware || hasDotOutput;
if (!isEnabled) {
return { metadata, fsApiBuilder: null, reason: 'Flag not enabled.' };
}

View File

@@ -80,11 +80,13 @@ export async function detectFramework({
fs,
frameworkList,
}: DetectFrameworkOptions): Promise<string | null> {
for (const framework of frameworkList) {
if (await matches(fs, framework)) {
return framework.slug;
}
}
return null;
const result = await Promise.all(
frameworkList.map(async frameworkMatch => {
if (await matches(fs, frameworkMatch)) {
return frameworkMatch.slug;
}
return null;
})
);
return result.find(res => res !== null) ?? null;
}

View File

@@ -1,4 +1,8 @@
import type { Source, Route, Handler } from '@vercel/routing-utils';
import type {
Route,
RouteWithHandle as Handler,
RouteWithSrc as Source,
} from '@vercel/routing-utils';
import {
detectBuilders,
detectOutputDirectory,

View File

@@ -252,6 +252,19 @@ describe('DetectorFilesystem', () => {
expect(await detectFramework({ fs, frameworkList })).toBe('nextjs');
});
it('Detect frameworks based on ascending order in framework list', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({
dependencies: {
next: '9.0.0',
gatsby: '4.18.0',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('nextjs');
});
it('Detect Nuxt.js', async () => {
const fs = new VirtualFilesystem({
'package.json': JSON.stringify({

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/go",
"version": "2.0.5",
"version": "2.0.7",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
@@ -25,7 +25,7 @@
"@types/fs-extra": "^5.0.5",
"@types/node-fetch": "^2.3.0",
"@types/tar": "^4.0.0",
"@vercel/build-utils": "5.0.1",
"@vercel/build-utils": "5.0.3",
"@vercel/ncc": "0.24.0",
"async-retry": "1.3.1",
"execa": "^1.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/hydrogen",
"version": "0.0.2",
"version": "0.0.4",
"license": "MIT",
"main": "./dist/index.js",
"homepage": "https://vercel.com/docs",
@@ -22,7 +22,7 @@
"devDependencies": {
"@types/jest": "27.5.1",
"@types/node": "*",
"@vercel/build-utils": "5.0.1",
"@vercel/build-utils": "5.0.3",
"typescript": "4.6.4"
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/next",
"version": "3.1.4",
"version": "3.1.7",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
@@ -45,9 +45,9 @@
"@types/semver": "6.0.0",
"@types/text-table": "0.2.1",
"@types/webpack-sources": "3.2.0",
"@vercel/build-utils": "5.0.1",
"@vercel/build-utils": "5.0.3",
"@vercel/nft": "0.20.1",
"@vercel/routing-utils": "1.13.5",
"@vercel/routing-utils": "2.0.0",
"async-sema": "3.0.1",
"buffer-crc32": "0.2.13",
"cheerio": "1.0.0-rc.10",

View File

@@ -24,7 +24,7 @@ import {
NodejsLambda,
BuildResultV2Typical as BuildResult,
} from '@vercel/build-utils';
import { Handler, Route, Source } from '@vercel/routing-utils';
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
import {
convertHeaders,
convertRedirects,
@@ -399,6 +399,7 @@ export const build: BuildV2 = async ({
const env: typeof process.env = { ...spawnOpts.env };
const memoryToConsume = Math.floor(os.totalmem() / 1024 ** 2) - 128;
env.NODE_OPTIONS = `--max_old_space_size=${memoryToConsume}`;
env.NEXT_EDGE_RUNTIME_PROVIDER = 'vercel';
if (target) {
// Since version v10.0.8-canary.15 of Next.js the NEXT_PRIVATE_TARGET env
@@ -895,7 +896,7 @@ export const build: BuildV2 = async ({
...(output[path.join('./', entryDirectory, '404')] ||
output[path.join('./', entryDirectory, '404/index')]
? [
{ handle: 'error' } as Handler,
{ handle: 'error' } as RouteWithHandle,
{
status: 404,
@@ -927,7 +928,7 @@ export const build: BuildV2 = async ({
let trailingSlash = false;
redirects = redirects.filter(_redir => {
const redir = _redir as Source;
const redir = _redir as RouteWithSrc;
// detect the trailing slash redirect and make sure it's
// kept above the wildcard mapping to prevent erroneous redirects
// since non-continue routes come after continue the $wildcard
@@ -1145,7 +1146,7 @@ export const build: BuildV2 = async ({
continue;
}
const route: Source & { dest: string } = {
const route: RouteWithSrc & { dest: string } = {
src: (
dataRoute.namedDataRouteRegex || dataRoute.dataRouteRegex
).replace(/^\^/, `^${appMountPrefixNoTrailingSlash}`),
@@ -1174,7 +1175,7 @@ export const build: BuildV2 = async ({
if (isOmittedRoute && isServerMode) {
// only match this route when in preview mode so
// preview works for non-prerender fallback: false pages
(route as Source).has = [
(route as RouteWithSrc).has = [
{
type: 'cookie',
key: '__prerender_bypass',
@@ -2453,7 +2454,7 @@ export const build: BuildV2 = async ({
? []
: [
// Custom Next.js 404 page
{ handle: 'error' } as Handler,
{ handle: 'error' } as RouteWithHandle,
...(i18n && (static404Page || hasIsr404Page)
? [

View File

@@ -14,7 +14,7 @@ import {
Files,
BuildResultV2Typical as BuildResult,
} from '@vercel/build-utils';
import { Handler, Route, Source } from '@vercel/routing-utils';
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
import { MAX_AGE_ONE_YEAR } from '.';
import {
NextRequiredServerFilesManifest,
@@ -56,6 +56,7 @@ import prettyBytes from 'pretty-bytes';
const CORRECT_NOT_FOUND_ROUTES_VERSION = 'v12.0.1';
const CORRECT_MIDDLEWARE_ORDER_VERSION = 'v12.1.7-canary.29';
const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
const EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION = 'v12.2.0';
export async function serverBuild({
dynamicPages,
@@ -133,6 +134,10 @@ export async function serverBuild({
const lambdaPageKeys = Object.keys(lambdaPages);
const internalPages = ['_app.js', '_error.js', '_document.js'];
const pageBuildTraces = await glob('**/*.js.nft.json', pagesDir);
const isEmptyAllowQueryForPrendered = semver.gte(
nextVersion,
EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION
);
const isCorrectNotFoundRoutes = semver.gte(
nextVersion,
CORRECT_NOT_FOUND_ROUTES_VERSION
@@ -756,6 +761,7 @@ export async function serverBuild({
static404Page,
hasPages404: routesManifest.pages404,
isCorrectNotFoundRoutes,
isEmptyAllowQueryForPrendered,
});
Object.keys(prerenderManifest.staticRoutes).forEach(route =>
@@ -822,7 +828,7 @@ export async function serverBuild({
const { staticFiles, publicDirectoryFiles, staticDirectoryFiles } =
await getStaticFiles(entryPath, entryDirectory, outputDirectory);
const notFoundPreviewRoutes: Source[] = [];
const notFoundPreviewRoutes: RouteWithSrc[] = [];
if (prerenderManifest.notFoundRoutes?.length > 0 && canUsePreviewMode) {
// we combine routes into one src here to reduce the number of needed
@@ -1378,7 +1384,7 @@ export async function serverBuild({
},
// error handling
{ handle: 'error' } as Handler,
{ handle: 'error' } as RouteWithHandle,
// Custom Next.js 404 page
...(i18n && (static404Page || hasIsr404Page || lambdaPages['404.js'])

View File

@@ -16,7 +16,7 @@ import {
EdgeFunction,
} from '@vercel/build-utils';
import { NodeFileTraceReasons } from '@vercel/nft';
import { Header, Rewrite, Route, Source } from '@vercel/routing-utils';
import { Header, Rewrite, Route, RouteWithSrc } from '@vercel/routing-utils';
import { Sema } from 'async-sema';
import crc32 from 'buffer-crc32';
import fs, { lstat, stat } from 'fs-extra';
@@ -273,8 +273,8 @@ export async function getDynamicRoutes(
canUsePreviewMode?: boolean,
bypassToken?: string,
isServerMode?: boolean,
dynamicMiddlewareRouteMap?: Map<string, Source>
): Promise<Source[]> {
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>
): Promise<RouteWithSrc[]> {
if (routesManifest) {
switch (routesManifest.version) {
case 1:
@@ -307,7 +307,7 @@ export async function getDynamicRoutes(
}
const { page, namedRegex, regex, routeKeys } = params;
const route: Source = {
const route: RouteWithSrc = {
src: namedRegex || regex,
dest: `${!isDev ? path.join('/', entryDirectory, page) : page}${
routeKeys
@@ -400,7 +400,7 @@ export async function getDynamicRoutes(
matcher: getRouteRegex && getRouteRegex(pageName).re,
}));
const routes: Source[] = [];
const routes: RouteWithSrc[] = [];
pageMatchers.forEach(pageMatcher => {
// in `vercel dev` we don't need to prefix the destination
const dest = !isDev
@@ -419,7 +419,7 @@ export async function getDynamicRoutes(
}
export function localizeDynamicRoutes(
dynamicRoutes: Source[],
dynamicRoutes: RouteWithSrc[],
dynamicPrefix: string,
entryDirectory: string,
staticPages: Files,
@@ -427,8 +427,8 @@ export function localizeDynamicRoutes(
routesManifest?: RoutesManifest,
isServerMode?: boolean,
isCorrectLocaleAPIRoutes?: boolean
): Source[] {
return dynamicRoutes.map((route: Source) => {
): RouteWithSrc[] {
return dynamicRoutes.map((route: RouteWithSrc) => {
// i18n is already handled for middleware
if (route.middleware !== undefined || route.middlewarePath !== undefined)
return route;
@@ -1665,6 +1665,7 @@ type OnPrerenderRouteArgs = {
pageLambdaMap: { [key: string]: string };
routesManifest?: RoutesManifest;
isCorrectNotFoundRoutes?: boolean;
isEmptyAllowQueryForPrendered?: boolean;
};
let prerenderGroup = 1;
@@ -1698,6 +1699,7 @@ export const onPrerenderRoute =
pageLambdaMap,
routesManifest,
isCorrectNotFoundRoutes,
isEmptyAllowQueryForPrendered,
} = prerenderRouteArgs;
if (isBlocking && isFallback) {
@@ -1901,7 +1903,6 @@ export const onPrerenderRoute =
// a given path. All other query keys will be striped. We can automatically
// detect this for prerender (ISR) pages by reading the routes manifest file.
const pageKey = srcRoute || routeKey;
const isDynamic = isDynamicRoute(pageKey);
const route = routesManifest?.dynamicRoutes.find(
(r): r is RoutesManifestRoute =>
r.page === pageKey && !('isMiddleware' in r)
@@ -1911,14 +1912,33 @@ export const onPrerenderRoute =
// we have sufficient information to set it
let allowQuery: string[] | undefined;
if (routeKeys) {
// if we have routeKeys in the routes-manifest we use those
// for allowQuery for dynamic routes
allowQuery = Object.values(routeKeys);
} else if (!isDynamic) {
// for non-dynamic routes we use an empty array since
// no query values bust the cache for non-dynamic prerenders
allowQuery = [];
if (isEmptyAllowQueryForPrendered) {
const isDynamic = isDynamicRoute(routeKey);
if (!isDynamic) {
// for non-dynamic routes we use an empty array since
// no query values bust the cache for non-dynamic prerenders
// prerendered paths also do not pass allowQuery as they match
// during handle: 'filesystem' so should not cache differently
// by query values
allowQuery = [];
} else if (routeKeys) {
// if we have routeKeys in the routes-manifest we use those
// for allowQuery for dynamic routes
allowQuery = Object.values(routeKeys);
}
} else {
const isDynamic = isDynamicRoute(pageKey);
if (routeKeys) {
// if we have routeKeys in the routes-manifest we use those
// for allowQuery for dynamic routes
allowQuery = Object.values(routeKeys);
} else if (!isDynamic) {
// for non-dynamic routes we use an empty array since
// no query values bust the cache for non-dynamic prerenders
allowQuery = [];
}
}
prerenders[outputPathPage] = new Prerender({
@@ -2148,6 +2168,7 @@ interface EdgeFunctionInfo {
page: string;
regexp: string;
wasm?: { filePath: string; name: string }[];
assets?: { filePath: string; name: string }[];
}
export async function getMiddlewareBundle({
@@ -2234,6 +2255,23 @@ export async function getMiddlewareBundle({
{}
);
const assetFiles = (edgeFunction.assets ?? []).reduce(
(acc: Files, { filePath, name }) => {
const fullFilePath = path.join(
entryPath,
outputDirectory,
filePath
);
acc[`assets/${name}`] = new FileFsRef({
mode: 0o644,
contentType: 'application/octet-stream',
fsPath: fullFilePath,
});
return acc;
},
{}
);
return new EdgeFunction({
deploymentTarget: 'v8-worker',
name: edgeFunction.name,
@@ -2251,9 +2289,16 @@ export async function getMiddlewareBundle({
}),
}),
...wasmFiles,
...assetFiles,
},
entrypoint: 'index.js',
envVarsInUse: edgeFunction.env,
assets: (edgeFunction.assets ?? []).map(({ name }) => {
return {
name,
path: `assets/${name}`,
};
}),
});
})(),
routeSrc: getRouteSrc(edgeFunction, routesManifest),
@@ -2267,7 +2312,7 @@ export async function getMiddlewareBundle({
const source: {
staticRoutes: Route[];
dynamicRouteMap: Map<string, Source>;
dynamicRouteMap: Map<string, RouteWithSrc>;
edgeFunctions: Record<string, EdgeFunction>;
} = {
staticRoutes: [],

View File

@@ -1,8 +1,144 @@
/* eslint-env jest */
const path = require('path');
const { deployAndTest } = require('../../utils');
const cheerio = require('cheerio');
const { deployAndTest, check, waitFor } = require('../../utils');
const fetch = require('../../../../../test/lib/deployment/fetch-retry');
async function checkForChange(url, initialValue, getNewValue) {
return check(async () => {
const res = await fetch(url);
if (res.status !== 200) {
throw new Error(`Invalid status code ${res.status}`);
}
const newValue = await getNewValue(res);
return initialValue !== newValue
? 'success'
: JSON.stringify({ initialValue, newValue });
}, 'success');
}
const ctx = {};
describe(`${__dirname.split(path.sep).pop()}`, () => {
it('should deploy and pass probe checks', async () => {
await deployAndTest(__dirname);
const info = await deployAndTest(__dirname);
Object.assign(ctx, info);
});
it.each([
{
title: 'should update content for prerendered path correctly',
pathsToCheck: [
{ urlPath: '/fallback-blocking/first' },
{ urlPath: '/fallback-blocking/first', query: '?slug=first' },
{ urlPath: '/fallback-blocking/first', query: '?slug=random' },
{ urlPath: '/fallback-blocking/first', query: '?another=value' },
],
},
{
title: 'should update content for non-prerendered path correctly',
pathsToCheck: [
{ urlPath: '/fallback-blocking/on-demand-2' },
{
urlPath: '/fallback-blocking/on-demand-2',
query: '?slug=on-demand-2',
},
{ urlPath: '/fallback-blocking/on-demand-2', query: '?slug=random' },
{ urlPath: '/fallback-blocking/on-demand-2', query: '?another=value' },
],
},
])('$title', async ({ pathsToCheck }) => {
let initialRandom;
let initialRandomData;
let preRevalidateRandom;
let preRevalidateRandomData;
const checkPaths = async pathsToCheck => {
for (const { urlPath, query } of pathsToCheck) {
console.log('checking', {
urlPath,
query,
initialRandom,
preRevalidateRandom,
});
if (preRevalidateRandom) {
// wait for change as cache may take a little to propagate
const initialUrl = `${ctx.deploymentUrl}${urlPath}${query || ''}`;
await checkForChange(initialUrl, preRevalidateRandom, async () => {
const res = await fetch(initialUrl);
const $ = cheerio.load(await res.text());
return JSON.parse($('#props').text()).random;
});
}
const res = await fetch(`${ctx.deploymentUrl}${urlPath}${query || ''}`);
expect(res.status).toBe(200);
const $ = await cheerio.load(await res.text());
const props = JSON.parse($('#props').text());
if (initialRandom) {
// for fallback paths the initial value is generated
// in the foreground and then a revalidation is kicked off
// in the background so the initial value will be replaced
if (initialRandom !== props.random && urlPath.includes('on-demand')) {
initialRandom = props.random;
} else {
expect(initialRandom).toBe(props.random);
}
} else {
initialRandom = props.random;
}
expect(isNaN(initialRandom)).toBe(false);
const dataRes = await fetch(
`${ctx.deploymentUrl}/_next/data/testing-build-id${urlPath}.json${
query || ''
}`
);
expect(dataRes.status).toBe(200);
const { pageProps: dataProps } = await dataRes.json();
if (initialRandomData) {
// for fallback paths the initial value is generated
// in the foreground and then a revalidation is kicked off
// in the background so the initial value will be replaced
if (
initialRandomData !== dataProps.random &&
urlPath.includes('on-demand-2')
) {
initialRandomData = dataProps.random;
} else {
expect(initialRandomData).toBe(dataProps.random);
}
} else {
initialRandomData = dataProps.random;
}
expect(isNaN(initialRandomData)).toBe(false);
}
};
await checkPaths(pathsToCheck);
preRevalidateRandom = initialRandom;
preRevalidateRandomData = initialRandomData;
initialRandom = undefined;
initialRandomData = undefined;
const revalidateRes = await fetch(
`${ctx.deploymentUrl}/api/revalidate?urlPath=${pathsToCheck[0].urlPath}`
);
expect(revalidateRes.status).toBe(200);
expect((await revalidateRes.json()).revalidated).toBe(true);
await checkPaths(pathsToCheck);
expect(preRevalidateRandom).toBeDefined();
expect(preRevalidateRandomData).toBeDefined();
});
});

View File

@@ -0,0 +1,10 @@
export default async function handler(req, res) {
try {
console.log('revalidating', req.query.urlPath);
await res.revalidate(req.query.urlPath);
return res.json({ revalidated: true });
} catch (err) {
console.error(err);
return res.json({ revalidated: false });
}
}

View File

@@ -16,7 +16,7 @@ export const getStaticProps = ({ params }) => {
export const getStaticPaths = () => {
return {
paths: ['/fallback-blocking/first'],
paths: ['/fallback-blocking/first', '/fallback-blocking/on-demand-1'],
fallback: 'blocking',
};
};

View File

@@ -78,8 +78,20 @@ it('should build using server build', async () => {
expect(output['dynamic/[slug]'].maxDuration).toBe(5);
expect(output['fallback/[slug]'].type).toBe('Prerender');
expect(output['fallback/[slug]'].allowQuery).toEqual(['slug']);
expect(output['_next/data/testing-build-id/fallback/[slug].json'].type).toBe(
'Prerender'
);
expect(
output['_next/data/testing-build-id/fallback/[slug].json'].allowQuery
).toEqual(['slug']);
expect(output['fallback/first'].type).toBe('Prerender');
expect(output['fallback/first'].allowQuery).toEqual(['slug']);
expect(output['fallback/first'].allowQuery).toEqual([]);
expect(output['_next/data/testing-build-id/fallback/first.json'].type).toBe(
'Prerender'
);
expect(
output['_next/data/testing-build-id/fallback/first.json'].allowQuery
).toEqual([]);
expect(output['api'].type).toBe('Lambda');
expect(output['api'].allowQuery).toBe(undefined);
expect(output['api'].memory).toBe(128);

View File

@@ -1,3 +1,6 @@
module.exports = (phase, { defaultConfig }) => ({
pageExtensions: [...defaultConfig.pageExtensions, 'hello.js'],
generateBuildId() {
return 'testing-build-id';
},
});

View File

@@ -129,7 +129,7 @@ export async function deployAndTest(fixtureDir) {
};
}
async function waitFor(milliseconds) {
export async function waitFor(milliseconds) {
return new Promise(resolve => {
setTimeout(resolve, milliseconds);
});

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/node",
"version": "2.4.1",
"version": "2.4.4",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
@@ -31,7 +31,7 @@
},
"dependencies": {
"@types/node": "*",
"@vercel/build-utils": "5.0.1",
"@vercel/build-utils": "5.0.3",
"@vercel/node-bridge": "3.0.0",
"@vercel/static-config": "2.0.1",
"edge-runtime": "1.0.1",

View File

@@ -193,7 +193,7 @@ async function compileUserCode(entrypoint: string) {
let edgeHandler = module.exports.default;
if (!edgeHandler) {
throw new Error('No default export was found. Add a default export to handle requests.');
throw new Error('No default export was found. Add a default export to handle requests. Learn more: https://vercel.link/creating-edge-middleware');
}
let response = await edgeHandler(event.request, event);
@@ -305,7 +305,7 @@ function parseRuntime(
throw new Error(
`Invalid function runtime "${runtime}" for "${entrypoint}". Valid runtimes are: ${JSON.stringify(
validRuntimes
)}`
)}. Learn more: https://vercel.link/creating-edge-functions`
);
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/python",
"version": "3.0.5",
"version": "3.0.7",
"main": "./dist/index.js",
"license": "MIT",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
@@ -23,7 +23,7 @@
"devDependencies": {
"@types/execa": "^0.9.0",
"@types/jest": "27.4.1",
"@vercel/build-utils": "5.0.1",
"@vercel/build-utils": "5.0.3",
"@vercel/ncc": "0.24.0",
"execa": "^1.0.0",
"typescript": "4.3.4"

View File

@@ -25,9 +25,6 @@ const allOptions: PythonVersion[] = [
},
];
const upstreamProvider =
'This change is the result of a decision made by an upstream infrastructure provider (AWS)';
function getDevPythonVersion(): PythonVersion {
// Use the system-installed version of `python3` when running `vercel dev`
return {
@@ -75,14 +72,14 @@ export function getSupportedPythonVersion({
throw new NowBuildError({
code: 'BUILD_UTILS_PYTHON_VERSION_DISCONTINUED',
link: 'http://vercel.link/python-version',
message: `Python version "${selection.version}" detected in Pipfile.lock is discontinued and must be upgraded. ${upstreamProvider}.`,
message: `Python version "${selection.version}" detected in Pipfile.lock is discontinued and must be upgraded.`,
});
}
if (selection.discontinueDate) {
const d = selection.discontinueDate.toISOString().split('T')[0];
console.warn(
`Error: Python version "${selection.version}" detected in Pipfile.lock is deprecated. Deployments created on or after ${d} will fail to build. ${upstreamProvider}. http://vercel.link/python-version`
`Error: Python version "${selection.version}" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after ${d} will fail to build. http://vercel.link/python-version`
);
}

View File

@@ -5,7 +5,7 @@
{
"path": "/",
"mustContain": "wsgi:RANDOMNESS_PLACEHOLDER",
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build"
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after 2022-07-18 will fail to build"
}
]
}

View File

@@ -8,7 +8,7 @@
{
"path": "/",
"mustContain": "RANDOMNESS_PLACEHOLDER:env",
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build"
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after 2022-07-18 will fail to build"
}
]
}

View File

@@ -58,7 +58,7 @@ it('should throw for discontinued versions', async () => {
expect(() =>
getSupportedPythonVersion({ pipLockPythonVersion: '3.6' })
).toThrow(
'Python version "3.6" detected in Pipfile.lock is discontinued and must be upgraded. This change is the result of a decision made by an upstream infrastructure provider (AWS).'
'Python version "3.6" detected in Pipfile.lock is discontinued and must be upgraded.'
);
expect(warningMessages).toStrictEqual([]);
});
@@ -70,6 +70,6 @@ it('should warn for deprecated versions, soon to be discontinued', async () => {
getSupportedPythonVersion({ pipLockPythonVersion: '3.6' })
).toHaveProperty('runtime', 'python3.6');
expect(warningMessages).toStrictEqual([
'Error: Python version "3.6" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build. This change is the result of a decision made by an upstream infrastructure provider (AWS). http://vercel.link/python-version',
'Error: Python version "3.6" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after 2022-07-18 will fail to build. http://vercel.link/python-version',
]);
});

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/redwood",
"version": "1.0.6",
"version": "1.0.8",
"main": "./dist/index.js",
"license": "MIT",
"homepage": "https://vercel.com/docs",
@@ -21,13 +21,13 @@
},
"dependencies": {
"@vercel/nft": "0.20.1",
"@vercel/routing-utils": "1.13.5",
"@vercel/routing-utils": "2.0.0",
"semver": "6.1.1"
},
"devDependencies": {
"@types/aws-lambda": "8.10.19",
"@types/node": "*",
"@types/semver": "6.0.0",
"@vercel/build-utils": "5.0.1"
"@vercel/build-utils": "5.0.3"
}
}

View File

@@ -283,12 +283,10 @@ export const build: BuildV2 = async ({
: '/index';
const defaultRoutesConfig = getTransformedRoutes({
nowConfig: {
// this makes sure we send back 200.html for unprerendered pages
rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }],
cleanUrls: true,
trailingSlash: false,
},
// this makes sure we send back 200.html for unprerendered pages
rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }],
cleanUrls: true,
trailingSlash: false,
});
if (defaultRoutesConfig.error) {

View File

@@ -1,6 +1,5 @@
const fs = require('fs');
const path = require('path');
const { version } = require('../package.json');
const {
packAndDeploy,
@@ -9,10 +8,15 @@ const {
jest.setTimeout(12 * 60 * 1000);
let buildUtilsUrl;
let builderUrl;
const buildUtilsUrl = version.includes('canary') ? '@canary' : undefined;
beforeAll(async () => {
if (!buildUtilsUrl) {
const buildUtilsPath = path.resolve(__dirname, '..', '..', 'build-utils');
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
console.log('buildUtilsUrl', buildUtilsUrl);
}
const builderPath = path.resolve(__dirname, '..');
builderUrl = await packAndDeploy(builderPath);
console.log('builderUrl', builderUrl);

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/remix",
"version": "1.0.7",
"version": "1.0.9",
"license": "MIT",
"main": "./dist/index.js",
"homepage": "https://vercel.com/docs",
@@ -26,7 +26,7 @@
"devDependencies": {
"@types/jest": "27.5.1",
"@types/node": "*",
"@vercel/build-utils": "5.0.1",
"@vercel/build-utils": "5.0.3",
"typescript": "4.6.4"
}
}

View File

@@ -1,5 +1,5 @@
import { promises as fs } from 'fs';
import { dirname, join } from 'path';
import { dirname, join, relative } from 'path';
import {
debug,
download,
@@ -192,11 +192,9 @@ export const build: BuildV2 = async ({
// If we are, prepend the app root directory from config onto the build path.
// e.g. `/apps/my-remix-app/api/index.js`
const isMonorepo = repoRootPath && repoRootPath !== workPath;
if (isMonorepo && config.projectSettings?.rootDirectory) {
serverBuildPath = join(
config.projectSettings.rootDirectory,
serverBuildPath
);
if (isMonorepo) {
const rootDirectory = relative(repoRootPath, workPath);
serverBuildPath = join(rootDirectory, serverBuildPath);
}
} catch (err: any) {
// Ignore error if `remix.config.js` does not exist

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/routing-utils",
"version": "1.13.5",
"version": "2.0.0",
"description": "Vercel routing utilities",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",

View File

@@ -1,28 +1,27 @@
import { parse as parseUrl } from 'url';
export * from './schemas';
export * from './types';
import {
Route,
Handler,
NormalizedRoutes,
GetRoutesProps,
RouteApiError,
Redirect,
HasField,
} from './types';
import {
collectHasSegments,
convertCleanUrls,
convertRewrites,
convertRedirects,
convertHeaders,
convertRedirects,
convertRewrites,
convertTrailingSlash,
sourceToRegex,
collectHasSegments,
} from './superstatic';
export { getCleanUrls } from './superstatic';
export { mergeRoutes } from './merge';
import {
GetRoutesProps,
HasField,
NormalizedRoutes,
Redirect,
Route,
RouteApiError,
RouteWithHandle,
} from './types';
export { appendRoutesToPhase } from './append';
export { mergeRoutes } from './merge';
export * from './schemas';
export { getCleanUrls } from './superstatic';
export * from './types';
const VALID_HANDLE_VALUES = [
'filesystem',
@@ -35,8 +34,8 @@ const VALID_HANDLE_VALUES = [
const validHandleValues = new Set<string>(VALID_HANDLE_VALUES);
export type HandleValue = typeof VALID_HANDLE_VALUES[number];
export function isHandler(route: Route): route is Handler {
return typeof (route as Handler).handle !== 'undefined';
export function isHandler(route: Route): route is RouteWithHandle {
return typeof (route as RouteWithHandle).handle !== 'undefined';
}
export function isValidHandleValue(handle: string): handle is HandleValue {
@@ -249,11 +248,12 @@ function notEmpty<T>(value: T | null | undefined): value is T {
return value !== null && value !== undefined;
}
export function getTransformedRoutes({
nowConfig,
}: GetRoutesProps): NormalizedRoutes {
const { cleanUrls, rewrites, redirects, headers, trailingSlash } = nowConfig;
let { routes = null } = nowConfig;
export function getTransformedRoutes(
vercelConfig: GetRoutesProps
): NormalizedRoutes {
const { cleanUrls, rewrites, redirects, headers, trailingSlash } =
vercelConfig;
let { routes = null } = vercelConfig;
if (routes) {
const hasNewProperties =
typeof cleanUrls !== 'undefined' ||

View File

@@ -21,7 +21,7 @@ export type HasField = Array<
}
>;
export type Source = {
export type RouteWithSrc = {
src: string;
dest?: string;
headers?: { [name: string]: string };
@@ -49,14 +49,14 @@ export type Source = {
middleware?: number;
};
export type Handler = {
export type RouteWithHandle = {
handle: HandleValue;
src?: string;
dest?: string;
status?: number;
};
export type Route = Source | Handler;
export type Route = RouteWithSrc | RouteWithHandle;
export type NormalizedRoutes = {
routes: Route[] | null;
@@ -64,7 +64,12 @@ export type NormalizedRoutes = {
};
export interface GetRoutesProps {
nowConfig: VercelConfig;
routes?: Route[];
cleanUrls?: boolean;
rewrites?: Rewrite[];
redirects?: Redirect[];
headers?: Header[];
trailingSlash?: boolean;
}
export interface MergeRoutesProps {
@@ -78,17 +83,6 @@ export interface Build {
routes?: Route[];
}
export interface VercelConfig {
name?: string;
version?: number;
routes?: Route[];
cleanUrls?: boolean;
rewrites?: Rewrite[];
redirects?: Redirect[];
headers?: Header[];
trailingSlash?: boolean;
}
export interface Rewrite {
source: string;
destination: string;
@@ -131,18 +125,3 @@ export interface AppendRoutesToPhaseProps {
*/
phase: HandleValue;
}
/** @deprecated Use VercelConfig instead. */
export type NowConfig = VercelConfig;
/** @deprecated Use Rewrite instead. */
export type NowRewrite = Rewrite;
/** @deprecated Use Redirect instead. */
export type NowRedirect = Redirect;
/** @deprecated Use Header instead. */
export type NowHeader = Header;
/** @deprecated Use HeaderKeyValue instead. */
export type NowHeaderKeyValue = HeaderKeyValue;

View File

@@ -775,24 +775,24 @@ describe('normalizeRoutes', () => {
});
describe('getTransformedRoutes', () => {
test('should normalize nowConfig.routes', () => {
const nowConfig = { routes: [{ src: '/page', dest: '/page.html' }] };
const actual = getTransformedRoutes({ nowConfig });
const expected = normalizeRoutes(nowConfig.routes);
test('should normalize vercelConfig.routes', () => {
const vercelConfig = { routes: [{ src: '/page', dest: '/page.html' }] };
const actual = getTransformedRoutes(vercelConfig);
const expected = normalizeRoutes(vercelConfig.routes);
assert.deepEqual(actual, expected);
assertValid(actual.routes);
});
test('should not error when routes is null and cleanUrls is true', () => {
const nowConfig = { cleanUrls: true, routes: null };
const vercelConfig = { cleanUrls: true, routes: null };
// @ts-expect-error intentionally passing invalid `routes: null` here
const actual = getTransformedRoutes({ nowConfig });
const actual = getTransformedRoutes(vercelConfig);
assert.equal(actual.error, null);
assertValid(actual.routes);
});
test('should not error when has segment is used in destination', () => {
const nowConfig = {
const vercelConfig = {
redirects: [
{
source: '/redirect',
@@ -809,17 +809,17 @@ describe('getTransformedRoutes', () => {
};
// @ts-expect-error not sure if this one is an error or not…
const actual = getTransformedRoutes({ nowConfig });
const actual = getTransformedRoutes(vercelConfig);
assert.equal(actual.error, null);
assertValid(actual.routes);
});
test('should error when routes is defined and cleanUrls is true', () => {
const nowConfig = {
const vercelConfig = {
cleanUrls: true,
routes: [{ src: '/page', dest: '/file.html' }],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_mixed_routes');
assert.equal(
@@ -831,10 +831,10 @@ describe('getTransformedRoutes', () => {
});
test('should error when redirects is invalid regex', () => {
const nowConfig = {
const vercelConfig = {
redirects: [{ source: '^/(*.)\\.html$', destination: '/file.html' }],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_redirect');
assert.equal(
@@ -846,10 +846,10 @@ describe('getTransformedRoutes', () => {
});
test('should error when redirects is invalid pattern', () => {
const nowConfig = {
const vercelConfig = {
redirects: [{ source: '/:?', destination: '/file.html' }],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_redirect');
assert.equal(
@@ -861,7 +861,7 @@ describe('getTransformedRoutes', () => {
});
test('should error when redirects defines both permanent and statusCode', () => {
const nowConfig = {
const vercelConfig = {
redirects: [
{
source: '^/both$',
@@ -871,7 +871,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_redirect');
assert.equal(
@@ -883,7 +883,7 @@ describe('getTransformedRoutes', () => {
});
test('should error when headers is invalid regex', () => {
const nowConfig = {
const vercelConfig = {
headers: [
{
source: '^/(*.)\\.html$',
@@ -896,7 +896,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_header');
assert.equal(
@@ -908,12 +908,12 @@ describe('getTransformedRoutes', () => {
});
test('should error when headers is invalid pattern', () => {
const nowConfig = {
const vercelConfig = {
headers: [
{ source: '/:?', headers: [{ key: 'x-hello', value: 'world' }] },
],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_header');
assert.equal(
@@ -925,10 +925,10 @@ describe('getTransformedRoutes', () => {
});
test('should error when rewrites is invalid regex', () => {
const nowConfig = {
const vercelConfig = {
rewrites: [{ source: '^/(*.)\\.html$', destination: '/file.html' }],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_rewrite');
assert.equal(
@@ -940,10 +940,10 @@ describe('getTransformedRoutes', () => {
});
test('should error when rewrites is invalid pattern', () => {
const nowConfig = {
const vercelConfig = {
rewrites: [{ source: '/:?', destination: '/file.html' }],
};
const { error } = getTransformedRoutes({ nowConfig });
const { error } = getTransformedRoutes(vercelConfig);
assert.notEqual(error, null);
assert.equal(error?.code, 'invalid_rewrite');
assert.equal(
@@ -955,7 +955,7 @@ describe('getTransformedRoutes', () => {
});
test('should normalize all redirects before rewrites', () => {
const nowConfig = {
const vercelConfig = {
cleanUrls: true,
rewrites: [{ source: '/v1', destination: '/v2/api.py' }],
redirects: [
@@ -967,7 +967,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const { error, routes } = getTransformedRoutes({ nowConfig });
const { error, routes } = getTransformedRoutes(vercelConfig);
const expected = [
{
src: '^/(?:(.+)/)?index(?:\\.html)?/?$',
@@ -998,7 +998,7 @@ describe('getTransformedRoutes', () => {
});
test('should validate schemas', () => {
const nowConfig = {
const vercelConfig = {
cleanUrls: true,
rewrites: [
{ source: '/page', destination: '/page.html' },
@@ -1071,22 +1071,22 @@ describe('getTransformedRoutes', () => {
],
trailingSlashSchema: false,
};
assertValid(nowConfig.cleanUrls, cleanUrlsSchema);
assertValid(nowConfig.rewrites, rewritesSchema);
assertValid(nowConfig.redirects, redirectsSchema);
assertValid(nowConfig.headers, headersSchema);
assertValid(nowConfig.trailingSlashSchema, trailingSlashSchema);
assertValid(vercelConfig.cleanUrls, cleanUrlsSchema);
assertValid(vercelConfig.rewrites, rewritesSchema);
assertValid(vercelConfig.redirects, redirectsSchema);
assertValid(vercelConfig.headers, headersSchema);
assertValid(vercelConfig.trailingSlashSchema, trailingSlashSchema);
});
test('should return null routes if no transformations are performed', () => {
const nowConfig = { routes: null };
const vercelConfig = { routes: null };
// @ts-expect-error intentionally passing invalid `routes: null`
const { routes } = getTransformedRoutes({ nowConfig });
const { routes } = getTransformedRoutes(vercelConfig);
assert.equal(routes, null);
});
test('should error when segment is defined in `destination` but not `source`', () => {
const nowConfig = {
const vercelConfig = {
redirects: [
{
source: '/iforgot/:id',
@@ -1094,7 +1094,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const { routes, error } = getTransformedRoutes({ nowConfig });
const { routes, error } = getTransformedRoutes(vercelConfig);
assert.deepEqual(routes, null);
assert.ok(
error?.message.includes(
@@ -1105,7 +1105,7 @@ describe('getTransformedRoutes', () => {
});
test('should error when segment is defined in HTTPS `destination` but not `source`', () => {
const nowConfig = {
const vercelConfig = {
redirects: [
{
source: '/iforgot/:id',
@@ -1113,7 +1113,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const { routes, error } = getTransformedRoutes({ nowConfig });
const { routes, error } = getTransformedRoutes(vercelConfig);
assert.deepEqual(routes, null);
assert.ok(
error?.message.includes(
@@ -1124,7 +1124,7 @@ describe('getTransformedRoutes', () => {
});
test('should error when segment is defined in `destination` query string but not `source`', () => {
const nowConfig = {
const vercelConfig = {
redirects: [
{
source: '/iforgot/:id',
@@ -1132,7 +1132,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const { routes, error } = getTransformedRoutes({ nowConfig });
const { routes, error } = getTransformedRoutes(vercelConfig);
assert.deepEqual(routes, null);
assert.ok(
error?.message.includes(
@@ -1143,7 +1143,7 @@ describe('getTransformedRoutes', () => {
});
test('should error when segment is defined in HTTPS `destination` query string but not `source`', () => {
const nowConfig = {
const vercelConfig = {
redirects: [
{
source: '/iforgot/:id',
@@ -1151,7 +1151,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const { routes, error } = getTransformedRoutes({ nowConfig });
const { routes, error } = getTransformedRoutes(vercelConfig);
assert.deepEqual(routes, null);
assert.ok(
error?.message.includes(
@@ -1162,7 +1162,7 @@ describe('getTransformedRoutes', () => {
});
test('should work with content-security-policy header containing URL', () => {
const nowConfig = {
const vercelConfig = {
headers: [
{
source: '/(.*)',
@@ -1201,7 +1201,7 @@ describe('getTransformedRoutes', () => {
},
],
};
const actual = getTransformedRoutes({ nowConfig });
const actual = getTransformedRoutes(vercelConfig);
assert.deepEqual(actual.routes, [
{
continue: true,

Some files were not shown because too many files have changed in this diff Show More