mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-24 19:00:03 +00:00
Compare commits
49 Commits
@vercel/no
...
@vercel/no
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0d112c848a | ||
|
|
d17abf463a | ||
|
|
440ef3ba98 | ||
|
|
38c5e93625 | ||
|
|
4d51d777fe | ||
|
|
1fee87e76f | ||
|
|
ea0e9aeaec | ||
|
|
7910f2f307 | ||
|
|
670441620f | ||
|
|
bfc01fd98f | ||
|
|
6d74b9b61a | ||
|
|
9483d49f72 | ||
|
|
6740f9b155 | ||
|
|
b663f813e1 | ||
|
|
e318a0eea5 | ||
|
|
644721a90d | ||
|
|
e109e3325a | ||
|
|
92b2fbe372 | ||
|
|
e50fe2b37c | ||
|
|
678ebbe525 | ||
|
|
10e200e0bf | ||
|
|
24c3dd282d | ||
|
|
142a397d8e | ||
|
|
0dd9a27859 | ||
|
|
b6ed28b9b1 | ||
|
|
0d034b6820 | ||
|
|
05c8be1a6d | ||
|
|
d21bb9f87e | ||
|
|
ab24444660 | ||
|
|
20080d4ae7 | ||
|
|
c32a909afc | ||
|
|
abaa700cea | ||
|
|
8ba0ce9324 | ||
|
|
4027a18337 | ||
|
|
3bad73401b | ||
|
|
50e135ea47 | ||
|
|
d05e41eeaf | ||
|
|
de63e35622 | ||
|
|
4d1ab422d3 | ||
|
|
a03cfa1040 | ||
|
|
eaae86d776 | ||
|
|
77bc00f92e | ||
|
|
19a373288f | ||
|
|
322c88536d | ||
|
|
1f259d5eb9 | ||
|
|
3759da57ab | ||
|
|
30ba68edf9 | ||
|
|
62ca2efa73 | ||
|
|
2b71ee6b42 |
64
.github/dependabot.yml
vendored
Normal file
64
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/angular-v17
|
||||
allow:
|
||||
- dependency-name: '@angular*'
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- '@angular*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/astro-v4
|
||||
allow:
|
||||
- dependency-name: 'astro*'
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- 'astro*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/hydrogen-v2023
|
||||
allow:
|
||||
- dependency-name: '@remix-run*'
|
||||
- dependency-name: '@shopify*'
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- '@remix-run*'
|
||||
- '@shopify*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
@@ -1,5 +1,19 @@
|
||||
# @vercel-internals/types
|
||||
|
||||
## 1.0.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 1.0.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 1.0.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
7
internals/types/index.d.ts
vendored
7
internals/types/index.d.ts
vendored
@@ -5,7 +5,12 @@ import type { Route } from '@vercel/routing-utils';
|
||||
import { PROJECT_ENV_TARGET } from '@vercel-internals/constants';
|
||||
|
||||
export type ProjectEnvTarget = typeof PROJECT_ENV_TARGET[number];
|
||||
export type ProjectEnvType = 'plain' | 'secret' | 'encrypted' | 'system';
|
||||
export type ProjectEnvType =
|
||||
| 'plain'
|
||||
| 'secret'
|
||||
| 'encrypted'
|
||||
| 'system'
|
||||
| 'sensitive';
|
||||
|
||||
export type ProjectSettings = import('@vercel/build-utils').ProjectSettings;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "@vercel-internals/types",
|
||||
"version": "1.0.21",
|
||||
"version": "1.0.23",
|
||||
"types": "index.d.ts",
|
||||
"main": "index.d.ts",
|
||||
"files": [
|
||||
@@ -10,7 +10,7 @@
|
||||
"dependencies": {
|
||||
"@types/node": "14.14.31",
|
||||
"@vercel-internals/constants": "1.0.4",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/routing-utils": "3.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"source-map-support": "0.5.12",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "29.1.0",
|
||||
"turbo": "1.11.3",
|
||||
"turbo": "1.12.4",
|
||||
"typescript": "4.9.5"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,25 @@
|
||||
# @vercel/build-utils
|
||||
|
||||
## 7.7.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Revert "Revert "Default ruby to only currently supported version (3.2.0)"" ([#11137](https://github.com/vercel/vercel/pull/11137))
|
||||
|
||||
## 7.6.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Revert "Default ruby to only currently supported version (3.2.0)" ([#11135](https://github.com/vercel/vercel/pull/11135))
|
||||
|
||||
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
|
||||
|
||||
- [build-utils] change default package manager when no lockfile detected from `yarn` to `npm` (gated behind feature flag) ([#11131](https://github.com/vercel/vercel/pull/11131))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update internal type for variants ([#11111](https://github.com/vercel/vercel/pull/11111))
|
||||
|
||||
## 7.5.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "7.5.1",
|
||||
"version": "7.7.0",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -302,7 +302,7 @@ export async function scanParentDirs(
|
||||
});
|
||||
let lockfilePath: string | undefined;
|
||||
let lockfileVersion: number | undefined;
|
||||
let cliType: CliType = 'yarn';
|
||||
let cliType: CliType;
|
||||
|
||||
const [hasYarnLock, packageLockJson, pnpmLockYaml, bunLockBin] =
|
||||
await Promise.all([
|
||||
@@ -338,6 +338,12 @@ export async function scanParentDirs(
|
||||
lockfilePath = bunLockPath;
|
||||
// TODO: read "bun-lockfile-format-v0"
|
||||
lockfileVersion = 0;
|
||||
} else {
|
||||
if (process.env.VERCEL_ENABLE_NPM_DEFAULT === '1') {
|
||||
cliType = 'npm';
|
||||
} else {
|
||||
cliType = 'yarn';
|
||||
}
|
||||
}
|
||||
|
||||
const packageJsonPath = pkgJsonPath || undefined;
|
||||
@@ -414,6 +420,14 @@ export async function runNpmInstall(
|
||||
destPath
|
||||
);
|
||||
|
||||
if (!packageJsonPath) {
|
||||
debug(
|
||||
`Skipping dependency installation because no package.json was found for ${destPath}`
|
||||
);
|
||||
runNpmInstallSema.release();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Only allow `runNpmInstall()` to run once per `package.json`
|
||||
// when doing a default install (no additional args)
|
||||
if (meta && packageJsonPath && args.length === 0) {
|
||||
|
||||
@@ -440,7 +440,9 @@ export interface Cron {
|
||||
schedule: string;
|
||||
}
|
||||
|
||||
// TODO: Proper description once complete
|
||||
/**
|
||||
* @deprecated Replaced by Variants. Remove once fully replaced.
|
||||
*/
|
||||
export interface Flag {
|
||||
key: string;
|
||||
defaultValue?: unknown;
|
||||
@@ -471,7 +473,9 @@ export interface BuildResultV2Typical {
|
||||
framework?: {
|
||||
version: string;
|
||||
};
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags?: Flag[];
|
||||
variants?: Record<string, VariantDefinition>;
|
||||
}
|
||||
|
||||
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
|
||||
@@ -491,3 +495,28 @@ export type ShouldServe = (
|
||||
export type StartDevServer = (
|
||||
options: StartDevServerOptions
|
||||
) => Promise<StartDevServerResult>;
|
||||
|
||||
/**
|
||||
* TODO: The following types will eventually be exported by a more
|
||||
* relevant package.
|
||||
*/
|
||||
type VariantJSONArray = ReadonlyArray<VariantJSONValue>;
|
||||
|
||||
type VariantJSONValue =
|
||||
| string
|
||||
| boolean
|
||||
| number
|
||||
| null
|
||||
| VariantJSONArray
|
||||
| { [key: string]: VariantJSONValue };
|
||||
|
||||
type VariantOption = {
|
||||
value: VariantJSONValue;
|
||||
label?: string;
|
||||
};
|
||||
|
||||
export interface VariantDefinition {
|
||||
options?: VariantOption[];
|
||||
origin?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
4
packages/build-utils/test/fixtures/07-cross-install/api/yarn.lock
vendored
Normal file
4
packages/build-utils/test/fixtures/07-cross-install/api/yarn.lock
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
4
packages/build-utils/test/fixtures/07-cross-install/lib/yarn.lock
vendored
Normal file
4
packages/build-utils/test/fixtures/07-cross-install/lib/yarn.lock
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
4
packages/build-utils/test/fixtures/08-yarn-npm/with-yarn/yarn.lock
vendored
Normal file
4
packages/build-utils/test/fixtures/08-yarn-npm/with-yarn/yarn.lock
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
9
packages/build-utils/test/fixtures/40-no-lockfile/package.json
vendored
Normal file
9
packages/build-utils/test/fixtures/40-no-lockfile/package.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "mkdir -p public && (printf \"npm version: \" && npm -v) > public/index.txt"
|
||||
},
|
||||
"dependencies": {
|
||||
"sharp": "0.33.2"
|
||||
}
|
||||
}
|
||||
3
packages/build-utils/test/fixtures/40-no-lockfile/probes.json
vendored
Normal file
3
packages/build-utils/test/fixtures/40-no-lockfile/probes.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 8" }]
|
||||
}
|
||||
7
packages/build-utils/test/fixtures/40-no-lockfile/vercel.json
vendored
Normal file
7
packages/build-utils/test/fixtures/40-no-lockfile/vercel.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"build": {
|
||||
"env": {
|
||||
"VERCEL_ENABLE_NPM_DEFAULT": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
46
packages/build-utils/test/unit.test.ts
vendored
46
packages/build-utils/test/unit.test.ts
vendored
@@ -576,6 +576,52 @@ it(
|
||||
ms('1m')
|
||||
);
|
||||
|
||||
it('should return cliType npm when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is set', async () => {
|
||||
const originalRepoLockfilePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'pnpm-lock.yaml'
|
||||
);
|
||||
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
|
||||
await fs.remove(originalRepoLockfilePath);
|
||||
try {
|
||||
process.env.VERCEL_ENABLE_NPM_DEFAULT = '1';
|
||||
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
|
||||
const result = await scanParentDirs(fixture);
|
||||
expect(result.cliType).toEqual('npm');
|
||||
expect(result.lockfileVersion).toEqual(undefined);
|
||||
expect(result.lockfilePath).toEqual(undefined);
|
||||
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
|
||||
} finally {
|
||||
delete process.env.VERCEL_ENABLE_NPM_DEFAULT;
|
||||
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return cliType yarn when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is not set', async () => {
|
||||
const originalRepoLockfilePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'pnpm-lock.yaml'
|
||||
);
|
||||
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
|
||||
await fs.remove(originalRepoLockfilePath);
|
||||
try {
|
||||
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
|
||||
const result = await scanParentDirs(fixture);
|
||||
expect(result.cliType).toEqual('yarn');
|
||||
expect(result.lockfileVersion).toEqual(undefined);
|
||||
expect(result.lockfilePath).toEqual(undefined);
|
||||
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
|
||||
} finally {
|
||||
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return cliType bun and correct lock file for bun v1', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '31-bun-v1-with-yarn-lock');
|
||||
const result = await scanParentDirs(fixture);
|
||||
|
||||
@@ -1,5 +1,69 @@
|
||||
# vercel
|
||||
|
||||
## 33.5.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`e109e3325`](https://github.com/vercel/vercel/commit/e109e3325ab5299da0903034175fabe72d486a4e), [`d17abf463`](https://github.com/vercel/vercel/commit/d17abf463acabf9e1e43b91200f18efd34e91f62), [`644721a90`](https://github.com/vercel/vercel/commit/644721a90da8cf98414d272be9da0a821a2ce217), [`ea0e9aeae`](https://github.com/vercel/vercel/commit/ea0e9aeaec8ddddb5a726be0d252df9cdbd84808), [`e318a0eea`](https://github.com/vercel/vercel/commit/e318a0eea55c9b8536b0874f66cfd03aca6f0adf), [`1fee87e76`](https://github.com/vercel/vercel/commit/1fee87e76f18d2f5e5524247cfce615fa1832e49), [`bfc01fd98`](https://github.com/vercel/vercel/commit/bfc01fd98f760a008d0d2e6c52b5216503b44b75), [`7910f2f30`](https://github.com/vercel/vercel/commit/7910f2f3070ff69742e845e795d4db77d598c181), [`440ef3ba9`](https://github.com/vercel/vercel/commit/440ef3ba98af8f05e7714c86c67c36dbda11e85c)]:
|
||||
- @vercel/remix-builder@2.0.20
|
||||
- @vercel/next@4.1.2
|
||||
- @vercel/node@3.0.20
|
||||
- @vercel/redwood@2.0.8
|
||||
|
||||
## 33.5.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- build: upgrade edge-runtime ([#11148](https://github.com/vercel/vercel/pull/11148))
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab), [`10e200e0b`](https://github.com/vercel/vercel/commit/10e200e0bf8f692b6740e098e0572b4e7de83850), [`678ebbe52`](https://github.com/vercel/vercel/commit/678ebbe5255766656bf2dddc574e86b2999f11c8)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
- @vercel/static-build@2.4.0
|
||||
- @vercel/node@3.0.19
|
||||
|
||||
## 33.5.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`c32a909af`](https://github.com/vercel/vercel/commit/c32a909afcedf0ee55777d5dcaecc0c8383dd8c8), [`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`d21bb9f87`](https://github.com/vercel/vercel/commit/d21bb9f87e1d837666fe8104d4e199b2590725d6), [`4027a1833`](https://github.com/vercel/vercel/commit/4027a1833718a92be74b2b3c5a4df23745d19a36), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c), [`3bad73401`](https://github.com/vercel/vercel/commit/3bad73401b4ec1f61e515965732cde8dcc052b17)]:
|
||||
- @vercel/next@4.1.1
|
||||
- @vercel/node@3.0.18
|
||||
- @vercel/redwood@2.0.7
|
||||
- @vercel/remix-builder@2.0.19
|
||||
- @vercel/build-utils@7.6.0
|
||||
- @vercel/static-build@2.3.0
|
||||
|
||||
## 33.4.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`d05e41eea`](https://github.com/vercel/vercel/commit/d05e41eeaf97a024157d2bd843782c95c39389be), [`de63e3562`](https://github.com/vercel/vercel/commit/de63e356223467447cda539ddc435a892303afc7)]:
|
||||
- @vercel/static-build@2.2.0
|
||||
|
||||
## 33.4.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Added a new option to add a sensitive environment variable ([#11033](https://github.com/vercel/vercel/pull/11033))
|
||||
|
||||
## 33.3.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Emit "filePathMap" in `vc-config.json` for `FileFsRef` instances ([#11060](https://github.com/vercel/vercel/pull/11060))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update `vc dev` to support `Lambda` instances without `zipBuffer` ([#11080](https://github.com/vercel/vercel/pull/11080))
|
||||
|
||||
- Updated dependencies [[`322c88536`](https://github.com/vercel/vercel/commit/322c88536dfa0ba3892eb580858ee54f6b04ed3f), [`62ca2efa7`](https://github.com/vercel/vercel/commit/62ca2efa731c4df46d586b94078b2dcb1c0bb934)]:
|
||||
- @vercel/ruby@2.0.5
|
||||
- @vercel/python@4.1.1
|
||||
|
||||
## 33.2.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "33.2.0",
|
||||
"version": "33.5.2",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -31,22 +31,22 @@
|
||||
"node": ">= 16"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/fun": "1.1.0",
|
||||
"@vercel/go": "3.0.5",
|
||||
"@vercel/hydrogen": "1.0.2",
|
||||
"@vercel/next": "4.1.0",
|
||||
"@vercel/node": "3.0.17",
|
||||
"@vercel/python": "4.1.0",
|
||||
"@vercel/redwood": "2.0.6",
|
||||
"@vercel/remix-builder": "2.0.18",
|
||||
"@vercel/ruby": "2.0.4",
|
||||
"@vercel/static-build": "2.1.0",
|
||||
"@vercel/next": "4.1.2",
|
||||
"@vercel/node": "3.0.20",
|
||||
"@vercel/python": "4.1.1",
|
||||
"@vercel/redwood": "2.0.8",
|
||||
"@vercel/remix-builder": "2.0.20",
|
||||
"@vercel/ruby": "2.0.5",
|
||||
"@vercel/static-build": "2.4.0",
|
||||
"chokidar": "3.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@alex_neo/jest-expect-message": "1.0.5",
|
||||
"@edge-runtime/node-utils": "2.2.2",
|
||||
"@edge-runtime/node-utils": "2.3.0",
|
||||
"@next/env": "11.1.2",
|
||||
"@sentry/node": "5.5.0",
|
||||
"@sindresorhus/slugify": "0.11.0",
|
||||
@@ -88,8 +88,8 @@
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel-internals/constants": "1.0.4",
|
||||
"@vercel-internals/get-package-json": "1.0.0",
|
||||
"@vercel-internals/types": "1.0.21",
|
||||
"@vercel/client": "13.0.14",
|
||||
"@vercel-internals/types": "1.0.23",
|
||||
"@vercel/client": "13.1.3",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/frameworks": "2.0.6",
|
||||
"@vercel/fs-detectors": "5.1.6",
|
||||
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
Cron,
|
||||
validateNpmrc,
|
||||
Flag,
|
||||
VariantDefinition,
|
||||
} from '@vercel/build-utils';
|
||||
import {
|
||||
detectBuilders,
|
||||
@@ -95,7 +96,9 @@ interface BuildOutputConfig {
|
||||
version: string;
|
||||
};
|
||||
crons?: Cron[];
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags?: Flag[];
|
||||
variants?: Record<string, VariantDefinition>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -553,6 +556,7 @@ async function doBuild(
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
repoRootPath,
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
@@ -677,10 +681,13 @@ async function doBuild(
|
||||
overrides: mergedOverrides,
|
||||
framework,
|
||||
crons: mergedCrons,
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags: mergedFlags,
|
||||
};
|
||||
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
|
||||
|
||||
await writeVariantsJson(client, buildResults.values(), outputDir);
|
||||
|
||||
const relOutputDir = relative(cwd, outputDir);
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
@@ -825,6 +832,51 @@ function mergeFlags(
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the build output and writes all the variants into the `variants.json`
|
||||
* file. It'll skip variants that already exist.
|
||||
*/
|
||||
async function writeVariantsJson(
|
||||
{ output }: Client,
|
||||
buildResults: Iterable<BuildResult | BuildOutputConfig>,
|
||||
outputDir: string
|
||||
): Promise<void> {
|
||||
const variantsFilePath = join(outputDir, 'variants.json');
|
||||
|
||||
let hasVariants = true;
|
||||
|
||||
const variants = (await fs.readJSON(variantsFilePath).catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
hasVariants = false;
|
||||
return { definitions: {} };
|
||||
}
|
||||
|
||||
throw error;
|
||||
})) as { definitions: Record<string, VariantDefinition> };
|
||||
|
||||
for (const result of buildResults) {
|
||||
if (!('variants' in result) || !result.variants) continue;
|
||||
|
||||
for (const [key, defintion] of Object.entries(result.variants)) {
|
||||
if (result.variants[key]) {
|
||||
output.warn(
|
||||
`The variant "${key}" was found multiple times. Only its first occurrence will be considered.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
hasVariants = true;
|
||||
variants.definitions[key] = defintion;
|
||||
}
|
||||
}
|
||||
|
||||
// Only create the file when there are variants to write,
|
||||
// or when the file already exists.
|
||||
if (hasVariants) {
|
||||
await fs.writeJSON(variantsFilePath, variants, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
|
||||
async function writeBuildJson(buildsJson: BuildsManifest, outputDir: string) {
|
||||
await fs.writeJSON(join(outputDir, 'builds.json'), buildsJson, { spaces: 2 });
|
||||
}
|
||||
|
||||
5
packages/cli/src/commands/env/add.ts
vendored
5
packages/cli/src/commands/env/add.ts
vendored
@@ -19,6 +19,7 @@ import { isAPIError } from '../../util/errors-ts';
|
||||
|
||||
type Options = {
|
||||
'--debug': boolean;
|
||||
'--sensitive': boolean;
|
||||
};
|
||||
|
||||
export default async function add(
|
||||
@@ -144,6 +145,8 @@ export default async function add(
|
||||
envGitBranch = inputValue || '';
|
||||
}
|
||||
|
||||
const type = opts['--sensitive'] ? 'sensitive' : 'encrypted';
|
||||
|
||||
const addStamp = stamp();
|
||||
try {
|
||||
output.spinner('Saving');
|
||||
@@ -151,7 +154,7 @@ export default async function add(
|
||||
output,
|
||||
client,
|
||||
project.id,
|
||||
'encrypted',
|
||||
type,
|
||||
envName,
|
||||
envValue,
|
||||
envTargets,
|
||||
|
||||
15
packages/cli/src/commands/env/command.ts
vendored
15
packages/cli/src/commands/env/command.ts
vendored
@@ -34,7 +34,16 @@ export const envCommand: Command = {
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
options: [],
|
||||
options: [
|
||||
{
|
||||
name: 'sensitive',
|
||||
description: 'Add a sensitive Environment Variable',
|
||||
shorthand: null,
|
||||
type: 'string',
|
||||
deprecated: false,
|
||||
multi: false,
|
||||
},
|
||||
],
|
||||
examples: [],
|
||||
},
|
||||
{
|
||||
@@ -117,6 +126,10 @@ export const envCommand: Command = {
|
||||
`${packageName} env add DB_PASS production`,
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Add a sensitive Environment Variable',
|
||||
value: `${packageName} env add API_TOKEN --sensitive`,
|
||||
},
|
||||
{
|
||||
name: 'Add a new variable for a specific Environment and Git Branch',
|
||||
value: [
|
||||
|
||||
1
packages/cli/src/commands/env/index.ts
vendored
1
packages/cli/src/commands/env/index.ts
vendored
@@ -34,6 +34,7 @@ export default async function main(client: Client) {
|
||||
'-y': '--yes',
|
||||
'--environment': String,
|
||||
'--git-branch': String,
|
||||
'--sensitive': Boolean,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
|
||||
@@ -14,6 +14,7 @@ import {
|
||||
BuildResultV2,
|
||||
BuildResultV3,
|
||||
File,
|
||||
Files,
|
||||
FileFsRef,
|
||||
BuilderV2,
|
||||
BuilderV3,
|
||||
@@ -45,6 +46,7 @@ interface FunctionConfiguration {
|
||||
}
|
||||
|
||||
export async function writeBuildResult(
|
||||
repoRootPath: string,
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV2 | BuildResultV3,
|
||||
build: Builder,
|
||||
@@ -55,6 +57,7 @@ export async function writeBuildResult(
|
||||
const { version } = builder;
|
||||
if (typeof version !== 'number' || version === 2) {
|
||||
return writeBuildResultV2(
|
||||
repoRootPath,
|
||||
outputDir,
|
||||
buildResult as BuildResultV2,
|
||||
build,
|
||||
@@ -62,6 +65,7 @@ export async function writeBuildResult(
|
||||
);
|
||||
} else if (version === 3) {
|
||||
return writeBuildResultV3(
|
||||
repoRootPath,
|
||||
outputDir,
|
||||
buildResult as BuildResultV3,
|
||||
build,
|
||||
@@ -107,6 +111,7 @@ function stripDuplicateSlashes(path: string): string {
|
||||
* the filesystem.
|
||||
*/
|
||||
async function writeBuildResultV2(
|
||||
repoRootPath: string,
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV2,
|
||||
build: Builder,
|
||||
@@ -136,6 +141,7 @@ async function writeBuildResultV2(
|
||||
const normalizedPath = stripDuplicateSlashes(path);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(
|
||||
repoRootPath,
|
||||
outputDir,
|
||||
output,
|
||||
normalizedPath,
|
||||
@@ -150,6 +156,7 @@ async function writeBuildResultV2(
|
||||
}
|
||||
|
||||
await writeLambda(
|
||||
repoRootPath,
|
||||
outputDir,
|
||||
output.lambda,
|
||||
normalizedPath,
|
||||
@@ -210,6 +217,7 @@ async function writeBuildResultV2(
|
||||
);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(
|
||||
repoRootPath,
|
||||
outputDir,
|
||||
output,
|
||||
normalizedPath,
|
||||
@@ -231,6 +239,7 @@ async function writeBuildResultV2(
|
||||
* the filesystem.
|
||||
*/
|
||||
async function writeBuildResultV3(
|
||||
repoRootPath: string,
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV3,
|
||||
build: Builder,
|
||||
@@ -254,9 +263,15 @@ async function writeBuildResultV3(
|
||||
build.config?.zeroConfig ? src.substring(0, src.length - ext.length) : src
|
||||
);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, path, functionConfiguration);
|
||||
await writeLambda(
|
||||
repoRootPath,
|
||||
outputDir,
|
||||
output,
|
||||
path,
|
||||
functionConfiguration
|
||||
);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
await writeEdgeFunction(repoRootPath, outputDir, output, path);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${(output as any).type}" for ${build.src}`
|
||||
@@ -364,6 +379,7 @@ async function writeFunctionSymlink(
|
||||
* @param existingFunctions (optional) Map of `Lambda`/`EdgeFunction` instances that have previously been written
|
||||
*/
|
||||
async function writeEdgeFunction(
|
||||
repoRootPath: string,
|
||||
outputDir: string,
|
||||
edgeFunction: EdgeFunction,
|
||||
path: string,
|
||||
@@ -387,12 +403,17 @@ async function writeEdgeFunction(
|
||||
|
||||
await fs.mkdirp(dest);
|
||||
const ops: Promise<any>[] = [];
|
||||
ops.push(download(edgeFunction.files, dest));
|
||||
const { files, filePathMap } = filesWithoutFsRefs(
|
||||
edgeFunction.files,
|
||||
repoRootPath
|
||||
);
|
||||
ops.push(download(files, dest));
|
||||
|
||||
const config = {
|
||||
runtime: 'edge',
|
||||
...edgeFunction,
|
||||
entrypoint: normalizePath(edgeFunction.entrypoint),
|
||||
filePathMap,
|
||||
files: undefined,
|
||||
type: undefined,
|
||||
};
|
||||
@@ -415,6 +436,7 @@ async function writeEdgeFunction(
|
||||
* @param existingFunctions (optional) Map of `Lambda`/`EdgeFunction` instances that have previously been written
|
||||
*/
|
||||
async function writeLambda(
|
||||
repoRootPath: string,
|
||||
outputDir: string,
|
||||
lambda: Lambda,
|
||||
path: string,
|
||||
@@ -434,9 +456,12 @@ async function writeLambda(
|
||||
|
||||
await fs.mkdirp(dest);
|
||||
const ops: Promise<any>[] = [];
|
||||
let filePathMap: Record<string, string> | undefined;
|
||||
if (lambda.files) {
|
||||
// `files` is defined
|
||||
ops.push(download(lambda.files, dest));
|
||||
const f = filesWithoutFsRefs(lambda.files, repoRootPath);
|
||||
filePathMap = f.filePathMap;
|
||||
ops.push(download(f.files, dest));
|
||||
} else if (lambda.zipBuffer) {
|
||||
// Builders that use the deprecated `createLambda()` might only have `zipBuffer`
|
||||
ops.push(unzip(lambda.zipBuffer, dest));
|
||||
@@ -452,6 +477,7 @@ async function writeLambda(
|
||||
handler: normalizePath(lambda.handler),
|
||||
memory,
|
||||
maxDuration,
|
||||
filePathMap,
|
||||
type: undefined,
|
||||
files: undefined,
|
||||
zipBuffer: undefined,
|
||||
@@ -559,3 +585,25 @@ export async function* findDirs(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes the `FileFsRef` instances from the `Files` object
|
||||
* and returns them in a JSON serializable map of repo root
|
||||
* relative paths to Lambda destination paths.
|
||||
*/
|
||||
function filesWithoutFsRefs(
|
||||
files: Files,
|
||||
repoRootPath: string
|
||||
): { files: Files; filePathMap?: Record<string, string> } {
|
||||
let filePathMap: Record<string, string> | undefined;
|
||||
const out: Files = {};
|
||||
for (const [path, file] of Object.entries(files)) {
|
||||
if (file.type === 'FileFsRef') {
|
||||
if (!filePathMap) filePathMap = {};
|
||||
filePathMap[path] = relative(repoRootPath, file.fsPath);
|
||||
} else {
|
||||
out[path] = file;
|
||||
}
|
||||
}
|
||||
return { files: out, filePathMap };
|
||||
}
|
||||
|
||||
@@ -42,6 +42,18 @@ async function processMessage(message) {
|
||||
// structure to JSON" errors, so delete the property...
|
||||
delete result.childProcesses;
|
||||
|
||||
if (builder.version === 3) {
|
||||
if (result.output.type === 'Lambda') {
|
||||
result.output.zipBuffer = await result.output.createZip();
|
||||
}
|
||||
} else {
|
||||
for (const output of Object.values(result.output)) {
|
||||
if (output.type === 'Lambda') {
|
||||
output.zipBuffer = await output.createZip();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
process.send({ type: 'buildResult', result });
|
||||
}
|
||||
|
||||
|
||||
@@ -361,8 +361,10 @@ export async function executeBuild(
|
||||
await oldAsset.fn.destroy();
|
||||
}
|
||||
|
||||
const ZipFile = asset.zipBuffer || (await asset.createZip());
|
||||
|
||||
asset.fn = await createFunction({
|
||||
Code: { ZipFile: asset.zipBuffer },
|
||||
Code: { ZipFile },
|
||||
Handler: asset.handler,
|
||||
Runtime: asset.runtime,
|
||||
MemorySize: asset.memory || 3008,
|
||||
|
||||
@@ -382,6 +382,9 @@ module.exports = async function prepare(session, binaryPath, tmpFixturesDir) {
|
||||
'index.html': 'Hello',
|
||||
'vercel.json': '{"builds":[{"src":"*.html","use":"@vercel/static"}]}',
|
||||
},
|
||||
'project-sensitive-env-vars': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'dev-proxy-headers-and-env': {
|
||||
'package.json': JSON.stringify({}),
|
||||
'server.js': `require('http').createServer((req, res) => {
|
||||
|
||||
49
packages/cli/test/integration-2.test.ts
vendored
49
packages/cli/test/integration-2.test.ts
vendored
@@ -681,6 +681,53 @@ test('vercel env with unknown `VERCEL_ORG_ID` or `VERCEL_PROJECT_ID` should erro
|
||||
expect(output.stderr).toContain('Project not found');
|
||||
});
|
||||
|
||||
test('add a sensitive env var', async () => {
|
||||
const dir = await setupE2EFixture('project-sensitive-env-vars');
|
||||
const projectName = `project-sensitive-env-vars-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
// remove previously linked project if it exists
|
||||
await remove(path.join(dir, '.vercel'));
|
||||
|
||||
const vc = execCli(binaryPath, ['link'], {
|
||||
cwd: dir,
|
||||
env: {
|
||||
FORCE_TTY: '1',
|
||||
},
|
||||
});
|
||||
|
||||
await setupProject(vc, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
await vc;
|
||||
|
||||
const link = require(path.join(dir, '.vercel/project.json'));
|
||||
|
||||
const addEnvCommand = execCli(
|
||||
binaryPath,
|
||||
['env', 'add', 'envVarName', 'production', '--sensitive'],
|
||||
{
|
||||
env: {
|
||||
VERCEL_ORG_ID: link.orgId,
|
||||
VERCEL_PROJECT_ID: link.projectId,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
await waitForPrompt(addEnvCommand, /What’s the value of [^?]+\?/);
|
||||
addEnvCommand.stdin?.write('test\n');
|
||||
|
||||
const output = await addEnvCommand;
|
||||
|
||||
expect(output.exitCode, formatOutput(output)).toBe(0);
|
||||
expect(output.stderr).toContain(
|
||||
'Added Environment Variable envVarName to Project'
|
||||
);
|
||||
});
|
||||
|
||||
test('whoami with `VERCEL_ORG_ID` should favor `--scope` and should error', async () => {
|
||||
if (!token) {
|
||||
throw new Error('Shared state "token" not set.');
|
||||
@@ -857,7 +904,7 @@ test('deploy pnpm twice using pnp and symlink=false', async () => {
|
||||
page = await fetch(stdout);
|
||||
text = await page.text();
|
||||
|
||||
expect(text).toBe('cache exists\n');
|
||||
expect(text).toContain('cache exists\n');
|
||||
});
|
||||
|
||||
test('reject deploying with wrong team .vercel config', async () => {
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
# @vercel/client
|
||||
|
||||
## 13.1.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 13.1.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 13.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- More helpful error message when `vc deploy --prebuilt` has missing files ([#11105](https://github.com/vercel/vercel/pull/11105))
|
||||
|
||||
## 13.1.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Upload files referenced by "filePathMap" during `vc deploy --prebuilt` ([#11077](https://github.com/vercel/vercel/pull/11077))
|
||||
|
||||
## 13.0.14
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "13.0.14",
|
||||
"version": "13.1.3",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -37,7 +37,8 @@
|
||||
"typescript": "4.9.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { lstatSync } from 'fs-extra';
|
||||
import { isAbsolute, join, relative } from 'path';
|
||||
import { isAbsolute, join, relative, sep } from 'path';
|
||||
import { hash, hashes, mapToObject } from './utils/hashes';
|
||||
import { upload } from './upload';
|
||||
import { buildFileTree, createDebug } from './utils';
|
||||
import { DeploymentError } from './errors';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
import {
|
||||
VercelClientOptions,
|
||||
DeploymentOptions,
|
||||
@@ -90,27 +91,43 @@ export default function buildCreateDeployment() {
|
||||
|
||||
let files;
|
||||
|
||||
if (clientOptions.archive === 'tgz') {
|
||||
debug('Packing tarball');
|
||||
const tarStream = tar
|
||||
.pack(workPath, {
|
||||
entries: fileList.map(file => relative(workPath, file)),
|
||||
})
|
||||
.pipe(createGzip());
|
||||
const tarBuffer = await streamToBuffer(tarStream);
|
||||
debug('Packed tarball');
|
||||
files = new Map([
|
||||
[
|
||||
hash(tarBuffer),
|
||||
{
|
||||
names: [join(workPath, '.vercel/source.tgz')],
|
||||
data: tarBuffer,
|
||||
mode: 0o666,
|
||||
},
|
||||
],
|
||||
]);
|
||||
} else {
|
||||
files = await hashes(fileList);
|
||||
try {
|
||||
if (clientOptions.archive === 'tgz') {
|
||||
debug('Packing tarball');
|
||||
const tarStream = tar
|
||||
.pack(workPath, {
|
||||
entries: fileList.map(file => relative(workPath, file)),
|
||||
})
|
||||
.pipe(createGzip());
|
||||
const tarBuffer = await streamToBuffer(tarStream);
|
||||
debug('Packed tarball');
|
||||
files = new Map([
|
||||
[
|
||||
hash(tarBuffer),
|
||||
{
|
||||
names: [join(workPath, '.vercel/source.tgz')],
|
||||
data: tarBuffer,
|
||||
mode: 0o666,
|
||||
},
|
||||
],
|
||||
]);
|
||||
} else {
|
||||
files = await hashes(fileList);
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
if (
|
||||
clientOptions.prebuilt &&
|
||||
isErrnoException(err) &&
|
||||
err.code === 'ENOENT' &&
|
||||
err.path
|
||||
) {
|
||||
const errPath = relative(workPath, err.path);
|
||||
err.message = `File does not exist: "${relative(workPath, errPath)}"`;
|
||||
if (errPath.split(sep).includes('node_modules')) {
|
||||
err.message = `Please ensure project dependencies have been installed:\n${err.message}`;
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { FilesMap } from './hashes';
|
||||
import { FetchOptions } from '@zeit/fetch';
|
||||
import { nodeFetch, zeitFetch } from './fetch';
|
||||
import { join, sep, relative } from 'path';
|
||||
import { join, sep, relative, basename } from 'path';
|
||||
import { URL } from 'url';
|
||||
import ignore from 'ignore';
|
||||
import { pkgVersion } from '../pkg';
|
||||
@@ -109,6 +109,29 @@ export async function buildFileTree(
|
||||
return ignored;
|
||||
};
|
||||
fileList = await readdir(path, [ignores]);
|
||||
|
||||
if (prebuilt) {
|
||||
// Traverse over the `.vc-config.json` files and include
|
||||
// the files referenced by the "filePathMap" properties
|
||||
const refs = new Set<string>();
|
||||
const vcConfigFilePaths = fileList.filter(
|
||||
file => basename(file) === '.vc-config.json'
|
||||
);
|
||||
await Promise.all(
|
||||
vcConfigFilePaths.map(async p => {
|
||||
const configJson = await readFile(p, 'utf8');
|
||||
const config = JSON.parse(configJson);
|
||||
if (!config.filePathMap) return;
|
||||
for (const v of Object.values(config.filePathMap) as string[]) {
|
||||
refs.add(join(path, v));
|
||||
}
|
||||
})
|
||||
);
|
||||
if (refs.size > 0) {
|
||||
fileList = fileList.concat(Array.from(refs));
|
||||
}
|
||||
}
|
||||
|
||||
debug(`Found ${fileList.length} files in the specified directory`);
|
||||
} else if (Array.isArray(path)) {
|
||||
// Array of file paths
|
||||
|
||||
1
packages/client/tests/fixtures/file-system-api/.gitignore
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!/.vercel
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"filePathMap": {
|
||||
"node_modules/another/index.js": "node_modules/another/index.js"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"filePathMap": {
|
||||
"node_modules/example/index.js": "node_modules/example/index.js"
|
||||
}
|
||||
}
|
||||
@@ -109,7 +109,7 @@ describe('buildFileTree()', () => {
|
||||
normalizeWindowsPaths(fileList).sort()
|
||||
);
|
||||
|
||||
const expectedIgnoreList = ['.vercel'];
|
||||
const expectedIgnoreList = ['.gitignore', '.vercel'];
|
||||
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
|
||||
normalizeWindowsPaths(ignoreList).sort()
|
||||
);
|
||||
@@ -124,14 +124,18 @@ describe('buildFileTree()', () => {
|
||||
);
|
||||
|
||||
const expectedFileList = toAbsolutePaths(cwd, [
|
||||
'.vercel/output/functions/api/another.func/.vc-config.json',
|
||||
'.vercel/output/functions/api/example.func/.vc-config.json',
|
||||
'.vercel/output/static/baz.txt',
|
||||
'.vercel/output/static/sub/qux.txt',
|
||||
'node_modules/another/index.js',
|
||||
'node_modules/example/index.js',
|
||||
]);
|
||||
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
|
||||
normalizeWindowsPaths(fileList).sort()
|
||||
);
|
||||
|
||||
const expectedIgnoreList = ['foo.txt', 'sub'];
|
||||
const expectedIgnoreList = ['.gitignore', 'foo.txt', 'sub'];
|
||||
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
|
||||
normalizeWindowsPaths(ignoreList).sort()
|
||||
);
|
||||
|
||||
@@ -37,7 +37,7 @@
|
||||
"@types/minimatch": "3.0.5",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "7.3.10",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"jest-junit": "16.0.0",
|
||||
"typescript": "4.9.5"
|
||||
}
|
||||
|
||||
@@ -1,5 +1,21 @@
|
||||
# @vercel/gatsby-plugin-vercel-builder
|
||||
|
||||
## 2.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 2.0.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [gatsby-plugin-vercel-builder] use --keep-names esbuild flag ([#11117](https://github.com/vercel/vercel/pull/11117))
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 2.0.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/gatsby-plugin-vercel-builder",
|
||||
"version": "2.0.16",
|
||||
"version": "2.0.18",
|
||||
"main": "dist/index.js",
|
||||
"files": [
|
||||
"dist",
|
||||
@@ -20,7 +20,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@sinclair/typebox": "0.25.24",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"esbuild": "0.14.47",
|
||||
"etag": "1.8.1",
|
||||
|
||||
@@ -35,6 +35,9 @@ export const writeHandler = async ({
|
||||
platform: 'node',
|
||||
bundle: true,
|
||||
minify: true,
|
||||
// prevents renaming edge cases from causing failures like:
|
||||
// https://github.com/node-fetch/node-fetch/issues/784
|
||||
keepNames: true,
|
||||
define: {
|
||||
'process.env.NODE_ENV': "'production'",
|
||||
vercel_pathPrefix: JSON.stringify(prefix),
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "6.1.5",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"async-retry": "1.3.3",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"execa": "3.2.0",
|
||||
"fs-extra": "11.1.0",
|
||||
"jest-junit": "16.0.0"
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
# @vercel/next
|
||||
|
||||
## 4.1.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update experimental bundle flag ([#11187](https://github.com/vercel/vercel/pull/11187))
|
||||
|
||||
- [next] Add flag for experimental grouping ([#11177](https://github.com/vercel/vercel/pull/11177))
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
- fix: missing experimental field ([#11184](https://github.com/vercel/vercel/pull/11184))
|
||||
|
||||
## 4.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
- Load common chunks on module initialization ([#11126](https://github.com/vercel/vercel/pull/11126))
|
||||
|
||||
- Fix index normalizing for app outputs ([#11099](https://github.com/vercel/vercel/pull/11099))
|
||||
|
||||
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
|
||||
|
||||
- Fix rewrite RSC handling with trailingSlash ([#11107](https://github.com/vercel/vercel/pull/11107))
|
||||
|
||||
## 4.1.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "4.1.0",
|
||||
"version": "4.1.2",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -23,7 +23,7 @@
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.2"
|
||||
"@vercel/nft": "0.26.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
@@ -40,7 +40,7 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
|
||||
@@ -511,7 +511,7 @@ export const build: BuildV2 = async ({
|
||||
entryPath,
|
||||
outputDirectory
|
||||
);
|
||||
const omittedPrerenderRoutes = new Set(
|
||||
const omittedPrerenderRoutes: ReadonlySet<string> = new Set(
|
||||
Object.keys(prerenderManifest.omittedRoutes)
|
||||
);
|
||||
|
||||
|
||||
@@ -47,7 +47,7 @@ import {
|
||||
UnwrapPromise,
|
||||
getOperationType,
|
||||
FunctionsConfigManifestV1,
|
||||
VariantsManifest,
|
||||
VariantsManifestLegacy,
|
||||
RSC_CONTENT_TYPE,
|
||||
RSC_PREFETCH_SUFFIX,
|
||||
normalizePrefetches,
|
||||
@@ -70,6 +70,30 @@ const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
|
||||
const EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION = 'v12.2.0';
|
||||
const CORRECTED_MANIFESTS_VERSION = 'v12.2.0';
|
||||
|
||||
// Ideally this should be in a Next.js manifest so we can change it in
|
||||
// the future but this also allows us to improve existing versions
|
||||
const PRELOAD_CHUNKS = {
|
||||
APP_ROUTER_PAGES: [
|
||||
'.next/server/webpack-runtime.js',
|
||||
'next/dist/client/components/action-async-storage.external.js',
|
||||
'next/dist/client/components/request-async-storage.external.js',
|
||||
'next/dist/client/components/static-generation-async-storage.external.js',
|
||||
'next/dist/compiled/next-server/app-page.runtime.prod.js',
|
||||
],
|
||||
APP_ROUTER_HANDLER: [
|
||||
'.next/server/webpack-runtime.js',
|
||||
'next/dist/compiled/next-server/app-route.runtime.prod.js',
|
||||
],
|
||||
PAGES_ROUTER_PAGES: [
|
||||
'.next/server/webpack-runtime.js',
|
||||
'next/dist/compiled/next-server/pages.runtime.prod.js',
|
||||
],
|
||||
PAGES_ROUTER_API: [
|
||||
'.next/server/webpack-api-runtime.js',
|
||||
'next/dist/compiled/next-server/pages-api.runtime.prod.js',
|
||||
],
|
||||
};
|
||||
|
||||
// related PR: https://github.com/vercel/next.js/pull/52997
|
||||
// and https://github.com/vercel/next.js/pull/56318
|
||||
const BUNDLED_SERVER_NEXT_VERSION = 'v13.5.4';
|
||||
@@ -127,7 +151,7 @@ export async function serverBuild({
|
||||
pagesDir: string;
|
||||
baseDir: string;
|
||||
canUsePreviewMode: boolean;
|
||||
omittedPrerenderRoutes: Set<string>;
|
||||
omittedPrerenderRoutes: ReadonlySet<string>;
|
||||
localePrefixed404: boolean;
|
||||
staticPages: { [key: string]: FileFsRef };
|
||||
lambdaAppPaths: { [key: string]: FileFsRef };
|
||||
@@ -158,10 +182,14 @@ export async function serverBuild({
|
||||
imagesManifest?: NextImagesManifest;
|
||||
prerenderManifest: NextPrerenderedRoutes;
|
||||
requiredServerFilesManifest: NextRequiredServerFilesManifest;
|
||||
variantsManifest: VariantsManifest | null;
|
||||
variantsManifest: VariantsManifestLegacy | null;
|
||||
}): Promise<BuildResult> {
|
||||
lambdaPages = Object.assign({}, lambdaPages, lambdaAppPaths);
|
||||
|
||||
const experimentalAllowBundling = Boolean(
|
||||
process.env.NEXT_EXPERIMENTAL_FUNCTION_BUNDLING
|
||||
);
|
||||
|
||||
const lambdas: { [key: string]: Lambda } = {};
|
||||
const prerenders: { [key: string]: Prerender } = {};
|
||||
const lambdaPageKeys = Object.keys(lambdaPages);
|
||||
@@ -217,7 +245,7 @@ export async function serverBuild({
|
||||
for (const rewrite of afterFilesRewrites) {
|
||||
if (rewrite.src && rewrite.dest) {
|
||||
rewrite.src = rewrite.src.replace(
|
||||
'(?:/)?',
|
||||
/\/?\(\?:\/\)\?/,
|
||||
'(?<rscsuff>(\\.prefetch)?\\.rsc)?(?:/)?'
|
||||
);
|
||||
let destQueryIndex = rewrite.dest.indexOf('?');
|
||||
@@ -336,7 +364,7 @@ export async function serverBuild({
|
||||
experimentalPPRRoutes.add(route);
|
||||
}
|
||||
|
||||
const prerenderRoutes = new Set<string>([
|
||||
const prerenderRoutes: ReadonlySet<string> = new Set<string>([
|
||||
...(canUsePreviewMode ? omittedPrerenderRoutes : []),
|
||||
...Object.keys(prerenderManifest.blockingFallbackRoutes),
|
||||
...Object.keys(prerenderManifest.fallbackRoutes),
|
||||
@@ -744,7 +772,7 @@ export async function serverBuild({
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
let traceResult: NodeFileTraceResult | undefined;
|
||||
let parentFilesMap: Map<string, Set<string>> | undefined;
|
||||
let parentFilesMap: ReadonlyMap<string, Set<string>> | undefined;
|
||||
|
||||
if (pathsToTrace.length > 0) {
|
||||
traceResult = await nodeFileTrace(pathsToTrace, {
|
||||
@@ -859,6 +887,7 @@ export async function serverBuild({
|
||||
const pageExtensions = requiredServerFilesManifest.config?.pageExtensions;
|
||||
|
||||
const pageLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -880,6 +909,7 @@ export async function serverBuild({
|
||||
}
|
||||
|
||||
const appRouterLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -898,6 +928,7 @@ export async function serverBuild({
|
||||
});
|
||||
|
||||
const appRouteHandlersLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -1068,9 +1099,58 @@ export async function serverBuild({
|
||||
}
|
||||
}
|
||||
|
||||
let launcherData = group.isAppRouter ? appLauncher : launcher;
|
||||
let preloadChunks: string[] = [];
|
||||
|
||||
if (process.env.VERCEL_NEXT_PRELOAD_COMMON === '1') {
|
||||
const nextPackageDir = path.dirname(
|
||||
resolveFrom(projectDir, 'next/package.json')
|
||||
);
|
||||
|
||||
if (group.isPages) {
|
||||
preloadChunks = PRELOAD_CHUNKS.PAGES_ROUTER_PAGES;
|
||||
} else if (group.isApiLambda) {
|
||||
preloadChunks = PRELOAD_CHUNKS.PAGES_ROUTER_API;
|
||||
} else if (group.isAppRouter && !group.isAppRouteHandler) {
|
||||
preloadChunks = PRELOAD_CHUNKS.APP_ROUTER_PAGES;
|
||||
} else if (group.isAppRouteHandler) {
|
||||
preloadChunks = PRELOAD_CHUNKS.APP_ROUTER_HANDLER;
|
||||
}
|
||||
const normalizedPreloadChunks: string[] = [];
|
||||
|
||||
for (const preloadChunk of preloadChunks) {
|
||||
const absoluteChunk = preloadChunk.startsWith('.next')
|
||||
? path.join(projectDir, preloadChunk)
|
||||
: path.join(nextPackageDir, '..', preloadChunk);
|
||||
|
||||
// ensure the chunks are actually in this layer
|
||||
if (
|
||||
group.pseudoLayer[
|
||||
path.join('.', path.relative(baseDir, absoluteChunk))
|
||||
]
|
||||
) {
|
||||
normalizedPreloadChunks.push(
|
||||
// relative files need to be prefixed with ./ for require
|
||||
preloadChunk.startsWith('.next')
|
||||
? `./${preloadChunk}`
|
||||
: preloadChunk
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (normalizedPreloadChunks.length > 0) {
|
||||
launcherData = launcherData.replace(
|
||||
'// @preserve next-server-preload-target',
|
||||
normalizedPreloadChunks
|
||||
.map(name => `require('${name}');`)
|
||||
.join('\n')
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const launcherFiles: { [name: string]: FileFsRef | FileBlob } = {
|
||||
[path.join(path.relative(baseDir, projectDir), '___next_launcher.cjs')]:
|
||||
new FileBlob({ data: group.isAppRouter ? appLauncher : launcher }),
|
||||
new FileBlob({ data: launcherData }),
|
||||
};
|
||||
const operationType = getOperationType({ group, prerenderManifest });
|
||||
|
||||
@@ -1090,6 +1170,7 @@ export async function serverBuild({
|
||||
maxDuration: group.maxDuration,
|
||||
isStreaming: group.isStreaming,
|
||||
nextVersion,
|
||||
experimentalAllowBundling,
|
||||
};
|
||||
|
||||
const lambda = await createLambdaFromPseudoLayers(options);
|
||||
@@ -1128,11 +1209,11 @@ export async function serverBuild({
|
||||
);
|
||||
});
|
||||
}
|
||||
let outputName = path.posix.join(entryDirectory, pageNoExt);
|
||||
|
||||
let outputName = normalizeIndexOutput(
|
||||
path.posix.join(entryDirectory, pageNoExt),
|
||||
true
|
||||
);
|
||||
if (!group.isAppRouter && !group.isAppRouteHandler) {
|
||||
outputName = normalizeIndexOutput(outputName, true);
|
||||
}
|
||||
|
||||
// If this is a PPR page, then we should prefix the output name.
|
||||
if (isPPR) {
|
||||
@@ -1443,9 +1524,10 @@ export async function serverBuild({
|
||||
continue;
|
||||
}
|
||||
|
||||
const pathname = normalizeIndexOutput(
|
||||
path.posix.join('./', entryDirectory, route === '/' ? '/index' : route),
|
||||
true
|
||||
const pathname = path.posix.join(
|
||||
'./',
|
||||
entryDirectory,
|
||||
route === '/' ? '/index' : route
|
||||
);
|
||||
|
||||
if (lambdas[pathname]) {
|
||||
|
||||
@@ -23,6 +23,8 @@ if (process.env.NODE_ENV !== 'production' && region !== 'dev1') {
|
||||
// eslint-disable-next-line
|
||||
const NextServer = require('__NEXT_SERVER_PATH__').default;
|
||||
|
||||
// @preserve next-server-preload-target
|
||||
|
||||
// __NEXT_CONFIG__ value is injected
|
||||
declare const __NEXT_CONFIG__: any;
|
||||
const conf = __NEXT_CONFIG__;
|
||||
|
||||
@@ -307,14 +307,14 @@ export async function getRoutesManifest(
|
||||
export async function getDynamicRoutes(
|
||||
entryPath: string,
|
||||
entryDirectory: string,
|
||||
dynamicPages: string[],
|
||||
dynamicPages: ReadonlyArray<string>,
|
||||
isDev?: boolean,
|
||||
routesManifest?: RoutesManifest,
|
||||
omittedRoutes?: Set<string>,
|
||||
omittedRoutes?: ReadonlySet<string>,
|
||||
canUsePreviewMode?: boolean,
|
||||
bypassToken?: string,
|
||||
isServerMode?: boolean,
|
||||
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>,
|
||||
dynamicMiddlewareRouteMap?: ReadonlyMap<string, RouteWithSrc>,
|
||||
experimentalPPR?: boolean
|
||||
): Promise<RouteWithSrc[]> {
|
||||
if (routesManifest) {
|
||||
@@ -442,7 +442,9 @@ export async function getDynamicRoutes(
|
||||
let getRouteRegex: ((pageName: string) => { re: RegExp }) | undefined =
|
||||
undefined;
|
||||
|
||||
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
|
||||
let getSortedRoutes:
|
||||
| ((normalizedPages: ReadonlyArray<string>) => string[])
|
||||
| undefined;
|
||||
|
||||
try {
|
||||
const resolved = require_.resolve('next-server/dist/lib/router/utils', {
|
||||
@@ -645,10 +647,10 @@ export function filterStaticPages(
|
||||
}
|
||||
|
||||
export function getFilesMapFromReasons(
|
||||
fileList: Set<string>,
|
||||
fileList: ReadonlySet<string>,
|
||||
reasons: NodeFileTraceReasons,
|
||||
ignoreFn?: (file: string, parent?: string) => boolean
|
||||
) {
|
||||
): ReadonlyMap<string, Set<string>> {
|
||||
// this uses the reasons tree to collect files specific to a
|
||||
// certain parent allowing us to not have to trace each parent
|
||||
// separately
|
||||
@@ -804,6 +806,7 @@ export interface CreateLambdaFromPseudoLayersOptions
|
||||
layers: PseudoLayer[];
|
||||
isStreaming?: boolean;
|
||||
nextVersion?: string;
|
||||
experimentalAllowBundling?: boolean;
|
||||
}
|
||||
|
||||
// measured with 1, 2, 5, 10, and `os.cpus().length || 5`
|
||||
@@ -815,6 +818,7 @@ export async function createLambdaFromPseudoLayers({
|
||||
layers,
|
||||
isStreaming,
|
||||
nextVersion,
|
||||
experimentalAllowBundling,
|
||||
...lambdaOptions
|
||||
}: CreateLambdaFromPseudoLayersOptions) {
|
||||
await createLambdaSema.acquire();
|
||||
@@ -862,6 +866,7 @@ export async function createLambdaFromPseudoLayers({
|
||||
slug: 'nextjs',
|
||||
version: nextVersion,
|
||||
},
|
||||
experimentalAllowBundling,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1361,7 +1366,7 @@ async function getSourceFilePathFromPage({
|
||||
}: {
|
||||
workPath: string;
|
||||
page: string;
|
||||
pageExtensions?: string[];
|
||||
pageExtensions?: ReadonlyArray<string>;
|
||||
}) {
|
||||
const usesSrcDir = await usesSrcDirectory(workPath);
|
||||
const extensionsToTry = pageExtensions || ['js', 'jsx', 'ts', 'tsx'];
|
||||
@@ -1502,13 +1507,14 @@ export async function getPageLambdaGroups({
|
||||
internalPages,
|
||||
pageExtensions,
|
||||
inversedAppPathManifest,
|
||||
experimentalAllowBundling,
|
||||
}: {
|
||||
entryPath: string;
|
||||
config: Config;
|
||||
functionsConfigManifest?: FunctionsConfigManifestV1;
|
||||
pages: string[];
|
||||
prerenderRoutes: Set<string>;
|
||||
experimentalPPRRoutes: Set<string> | undefined;
|
||||
pages: ReadonlyArray<string>;
|
||||
prerenderRoutes: ReadonlySet<string>;
|
||||
experimentalPPRRoutes: ReadonlySet<string> | undefined;
|
||||
pageTraces: {
|
||||
[page: string]: {
|
||||
[key: string]: FileFsRef;
|
||||
@@ -1521,9 +1527,10 @@ export async function getPageLambdaGroups({
|
||||
initialPseudoLayer: PseudoLayerResult;
|
||||
initialPseudoLayerUncompressed: number;
|
||||
lambdaCompressedByteLimit: number;
|
||||
internalPages: string[];
|
||||
pageExtensions?: string[];
|
||||
internalPages: ReadonlyArray<string>;
|
||||
pageExtensions?: ReadonlyArray<string>;
|
||||
inversedAppPathManifest?: Record<string, string>;
|
||||
experimentalAllowBundling?: boolean;
|
||||
}) {
|
||||
const groups: Array<LambdaGroup> = [];
|
||||
|
||||
@@ -1563,42 +1570,46 @@ export async function getPageLambdaGroups({
|
||||
opts = { ...vercelConfigOpts, ...opts };
|
||||
}
|
||||
|
||||
let matchingGroup = groups.find(group => {
|
||||
const matches =
|
||||
group.maxDuration === opts.maxDuration &&
|
||||
group.memory === opts.memory &&
|
||||
group.isPrerenders === isPrerenderRoute &&
|
||||
group.isExperimentalPPR === isExperimentalPPR;
|
||||
let matchingGroup = experimentalAllowBundling
|
||||
? undefined
|
||||
: groups.find(group => {
|
||||
const matches =
|
||||
group.maxDuration === opts.maxDuration &&
|
||||
group.memory === opts.memory &&
|
||||
group.isPrerenders === isPrerenderRoute &&
|
||||
group.isExperimentalPPR === isExperimentalPPR;
|
||||
|
||||
if (matches) {
|
||||
let newTracedFilesSize = group.pseudoLayerBytes;
|
||||
let newTracedFilesUncompressedSize = group.pseudoLayerUncompressedBytes;
|
||||
if (matches) {
|
||||
let newTracedFilesSize = group.pseudoLayerBytes;
|
||||
let newTracedFilesUncompressedSize =
|
||||
group.pseudoLayerUncompressedBytes;
|
||||
|
||||
for (const newPage of newPages) {
|
||||
Object.keys(pageTraces[newPage] || {}).map(file => {
|
||||
if (!group.pseudoLayer[file]) {
|
||||
const item = tracedPseudoLayer[file] as PseudoFile;
|
||||
for (const newPage of newPages) {
|
||||
Object.keys(pageTraces[newPage] || {}).map(file => {
|
||||
if (!group.pseudoLayer[file]) {
|
||||
const item = tracedPseudoLayer[file] as PseudoFile;
|
||||
|
||||
newTracedFilesSize += item.compBuffer?.byteLength || 0;
|
||||
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
|
||||
newTracedFilesSize += item.compBuffer?.byteLength || 0;
|
||||
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
|
||||
}
|
||||
});
|
||||
newTracedFilesSize +=
|
||||
compressedPages[newPage].compBuffer.byteLength;
|
||||
newTracedFilesUncompressedSize +=
|
||||
compressedPages[newPage].uncompressedSize;
|
||||
}
|
||||
});
|
||||
newTracedFilesSize += compressedPages[newPage].compBuffer.byteLength;
|
||||
newTracedFilesUncompressedSize +=
|
||||
compressedPages[newPage].uncompressedSize;
|
||||
}
|
||||
|
||||
const underUncompressedLimit =
|
||||
newTracedFilesUncompressedSize <
|
||||
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
|
||||
const underCompressedLimit =
|
||||
newTracedFilesSize <
|
||||
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
|
||||
const underUncompressedLimit =
|
||||
newTracedFilesUncompressedSize <
|
||||
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
|
||||
const underCompressedLimit =
|
||||
newTracedFilesSize <
|
||||
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
|
||||
|
||||
return underUncompressedLimit && underCompressedLimit;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
return underUncompressedLimit && underCompressedLimit;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
if (matchingGroup) {
|
||||
matchingGroup.pages.push(page);
|
||||
@@ -1906,7 +1917,7 @@ type OnPrerenderRouteArgs = {
|
||||
isServerMode: boolean;
|
||||
canUsePreviewMode: boolean;
|
||||
lambdas: { [key: string]: Lambda };
|
||||
experimentalStreamingLambdaPaths: Map<string, string> | undefined;
|
||||
experimentalStreamingLambdaPaths: ReadonlyMap<string, string> | undefined;
|
||||
prerenders: { [key: string]: Prerender | File };
|
||||
pageLambdaMap: { [key: string]: string };
|
||||
routesManifest?: RoutesManifest;
|
||||
@@ -2053,6 +2064,11 @@ export const onPrerenderRoute =
|
||||
|
||||
let isAppPathRoute = false;
|
||||
|
||||
// experimentalPPR signals app path route
|
||||
if (appDir && experimentalPPR) {
|
||||
isAppPathRoute = true;
|
||||
}
|
||||
|
||||
// TODO: leverage manifest to determine app paths more accurately
|
||||
if (appDir && srcRoute && (!dataRoute || dataRoute?.endsWith('.rsc'))) {
|
||||
isAppPathRoute = true;
|
||||
@@ -2184,7 +2200,6 @@ export const onPrerenderRoute =
|
||||
if (routeKey !== '/index' && routeKey.endsWith('/index')) {
|
||||
routeKey = `${routeKey}/index`;
|
||||
routeFileNoExt = routeKey;
|
||||
origRouteFileNoExt = routeKey;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2255,15 +2270,20 @@ export const onPrerenderRoute =
|
||||
const lambdaId = pageLambdaMap[outputSrcPathPage];
|
||||
lambda = lambdas[lambdaId];
|
||||
} else {
|
||||
const outputSrcPathPage = normalizeIndexOutput(
|
||||
let outputSrcPathPage =
|
||||
srcRoute == null
|
||||
? outputPathPageOrig
|
||||
: path.posix.join(
|
||||
entryDirectory,
|
||||
srcRoute === '/' ? '/index' : srcRoute
|
||||
),
|
||||
isServerMode
|
||||
);
|
||||
);
|
||||
|
||||
if (!isAppPathRoute) {
|
||||
outputSrcPathPage = normalizeIndexOutput(
|
||||
outputSrcPathPage,
|
||||
isServerMode
|
||||
);
|
||||
}
|
||||
|
||||
lambda = lambdas[outputSrcPathPage];
|
||||
}
|
||||
@@ -2464,11 +2484,18 @@ export const onPrerenderRoute =
|
||||
routesManifest,
|
||||
locale
|
||||
);
|
||||
const localeOutputPathPage = normalizeIndexOutput(
|
||||
path.posix.join(entryDirectory, localeRouteFileNoExt),
|
||||
isServerMode
|
||||
let localeOutputPathPage = path.posix.join(
|
||||
entryDirectory,
|
||||
localeRouteFileNoExt
|
||||
);
|
||||
|
||||
if (!isAppPathRoute) {
|
||||
localeOutputPathPage = normalizeIndexOutput(
|
||||
localeOutputPathPage,
|
||||
isServerMode
|
||||
);
|
||||
}
|
||||
|
||||
const origPrerenderPage = prerenders[outputPathPage];
|
||||
prerenders[localeOutputPathPage] = {
|
||||
...origPrerenderPage,
|
||||
@@ -2805,7 +2832,7 @@ export async function getMiddlewareBundle({
|
||||
appPathRoutesManifest: Record<string, string>;
|
||||
}): Promise<{
|
||||
staticRoutes: Route[];
|
||||
dynamicRouteMap: Map<string, RouteWithSrc>;
|
||||
dynamicRouteMap: ReadonlyMap<string, RouteWithSrc>;
|
||||
edgeFunctions: Record<string, EdgeFunction>;
|
||||
}> {
|
||||
const middlewareManifest = await getMiddlewareManifest(
|
||||
@@ -2969,14 +2996,17 @@ export async function getMiddlewareBundle({
|
||||
}
|
||||
|
||||
if (routesManifest?.basePath) {
|
||||
shortPath = normalizeIndexOutput(
|
||||
path.posix.join(
|
||||
'./',
|
||||
routesManifest?.basePath,
|
||||
shortPath.replace(/^\//, '')
|
||||
),
|
||||
true
|
||||
const isAppPathRoute = !!appPathRoutesManifest[shortPath];
|
||||
|
||||
shortPath = path.posix.join(
|
||||
'./',
|
||||
routesManifest?.basePath,
|
||||
shortPath.replace(/^\//, '')
|
||||
);
|
||||
|
||||
if (!isAppPathRoute) {
|
||||
shortPath = normalizeIndexOutput(shortPath, true);
|
||||
}
|
||||
}
|
||||
|
||||
worker.edgeFunction.name = shortPath;
|
||||
@@ -3238,7 +3268,8 @@ export function isApiPage(page: string | undefined) {
|
||||
.match(/(serverless|server)\/pages\/api(\/|\.js$)/);
|
||||
}
|
||||
|
||||
export type VariantsManifest = Record<
|
||||
/** @deprecated */
|
||||
export type VariantsManifestLegacy = Record<
|
||||
string,
|
||||
{
|
||||
defaultValue?: unknown;
|
||||
@@ -3249,7 +3280,7 @@ export type VariantsManifest = Record<
|
||||
export async function getVariantsManifest(
|
||||
entryPath: string,
|
||||
outputDirectory: string
|
||||
): Promise<null | VariantsManifest> {
|
||||
): Promise<null | VariantsManifestLegacy> {
|
||||
const pathVariantsManifest = path.join(
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
@@ -3263,7 +3294,7 @@ export async function getVariantsManifest(
|
||||
|
||||
if (!hasVariantsManifest) return null;
|
||||
|
||||
const variantsManifest: VariantsManifest = await fs.readJSON(
|
||||
const variantsManifest: VariantsManifestLegacy = await fs.readJSON(
|
||||
pathVariantsManifest
|
||||
);
|
||||
|
||||
@@ -3285,7 +3316,7 @@ export async function getServerlessPages(params: {
|
||||
glob('**/route.js', appDir),
|
||||
glob('**/_not-found.js', appDir),
|
||||
]).then(items => Object.assign(...items))
|
||||
: Promise.resolve({}),
|
||||
: Promise.resolve({} as Record<string, FileFsRef>),
|
||||
getMiddlewareManifest(params.entryPath, params.outputDirectory),
|
||||
]);
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
"redirect": "manual"
|
||||
},
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -46,7 +46,7 @@
|
||||
"redirect": "manual"
|
||||
},
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -70,7 +70,7 @@
|
||||
"redirect": "manual"
|
||||
},
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
|
||||
7
packages/next/test/fixtures/00-app-dir-no-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
7
packages/next/test/fixtures/00-app-dir-no-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export default function Page() {
|
||||
return (
|
||||
<>
|
||||
<p>dynamic-index</p>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -18,7 +18,22 @@
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": "dynamic-index"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": ":",
|
||||
"mustNotContain": "<html",
|
||||
"headers": {
|
||||
"RSC": 1,
|
||||
"Next-Router-Prefetch": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/rewritten-to-dashboard",
|
||||
"status": 200,
|
||||
"mustContain": "html"
|
||||
|
||||
7
packages/next/test/fixtures/00-app-dir-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
7
packages/next/test/fixtures/00-app-dir-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export default function Page() {
|
||||
return (
|
||||
<>
|
||||
<p>dynamic-index</p>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -18,6 +18,21 @@
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": "dynamic-index"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": ":",
|
||||
"mustNotContain": "<html",
|
||||
"headers": {
|
||||
"RSC": 1,
|
||||
"Next-Router-Prefetch": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/rewritten-to-dashboard",
|
||||
"status": 200,
|
||||
@@ -132,7 +147,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from /ssg",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -148,7 +163,7 @@
|
||||
"path": "/ssg",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -160,7 +175,7 @@
|
||||
"path": "/ssg",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch",
|
||||
"content-type": "text/x-component"
|
||||
},
|
||||
"headers": {
|
||||
@@ -195,14 +210,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard/deployments/[id]/settings",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/123/settings",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -215,14 +230,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "catchall",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/catchall/something",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -235,7 +250,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -255,7 +270,7 @@
|
||||
},
|
||||
"responseHeaders": {
|
||||
"content-type": "text/x-component",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -31,7 +31,7 @@
|
||||
},
|
||||
"responseHeaders": {
|
||||
"content-type": "text/x-component",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -6,7 +6,11 @@ module.exports = {
|
||||
rewrites: async () => {
|
||||
return [
|
||||
{
|
||||
source: '/rewritten-to-dashboard',
|
||||
source: '/rewritten-to-dashboard/',
|
||||
destination: '/dashboard/',
|
||||
},
|
||||
{
|
||||
source: '/:locale/t/size-chart/:chart/',
|
||||
destination: '/dashboard',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -6,6 +6,30 @@
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/en-us/t/size-chart/mens/",
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard"
|
||||
},
|
||||
{
|
||||
"path": "/en-us/t/size-chart/mens/",
|
||||
"status": 200,
|
||||
"headers": {
|
||||
"RSC": 1,
|
||||
"Next-Router-Prefetch": 1
|
||||
},
|
||||
"mustContain": ":{",
|
||||
"mustNotContain": ".prefetch"
|
||||
},
|
||||
{
|
||||
"path": "/en-us/t/size-chart/mens/",
|
||||
"status": 200,
|
||||
"headers": {
|
||||
"RSC": 1
|
||||
},
|
||||
"mustContain": ":{",
|
||||
"mustNotContain": ".rsc"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic/category-1/id-1/",
|
||||
"status": 200,
|
||||
@@ -23,14 +47,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from /ssg",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/ssg/",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -63,14 +87,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard/deployments/[id]/settings",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/123/settings/",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -83,14 +107,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "catchall",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/catchall/something/",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -103,7 +127,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -123,7 +147,7 @@
|
||||
},
|
||||
"responseHeaders": {
|
||||
"content-type": "text/x-component",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "about",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
# @vercel/node
|
||||
|
||||
## 3.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
## 3.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- build: upgrade edge-runtime ([#11148](https://github.com/vercel/vercel/pull/11148))
|
||||
|
||||
- refactor: simplify content-length check ([#11150](https://github.com/vercel/vercel/pull/11150))
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 3.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 3.0.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "3.0.17",
|
||||
"version": "3.0.20",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -20,16 +20,18 @@
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@edge-runtime/node-utils": "2.2.1",
|
||||
"@edge-runtime/primitives": "4.0.5",
|
||||
"@edge-runtime/vm": "3.1.7",
|
||||
"@edge-runtime/node-utils": "2.3.0",
|
||||
"@edge-runtime/primitives": "4.1.0",
|
||||
"@edge-runtime/vm": "3.2.0",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/nft": "0.26.2",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/static-config": "3.0.0",
|
||||
"async-listen": "3.0.0",
|
||||
"edge-runtime": "2.5.7",
|
||||
"cjs-module-lexer": "1.2.3",
|
||||
"edge-runtime": "2.5.9",
|
||||
"es-module-lexer": "1.4.1",
|
||||
"esbuild": "0.14.47",
|
||||
"etag": "1.8.1",
|
||||
"node-fetch": "2.6.9",
|
||||
|
||||
@@ -11,11 +11,17 @@ import type { VercelProxyResponse } from './types.js';
|
||||
import { Config } from '@vercel/build-utils';
|
||||
import { createEdgeEventHandler } from './edge-functions/edge-handler.mjs';
|
||||
import { createServer, IncomingMessage, ServerResponse } from 'http';
|
||||
import { createServerlessEventHandler } from './serverless-functions/serverless-handler.mjs';
|
||||
import {
|
||||
createServerlessEventHandler,
|
||||
HTTP_METHODS,
|
||||
} from './serverless-functions/serverless-handler.mjs';
|
||||
import { isEdgeRuntime, logError, validateConfiguredRuntime } from './utils.js';
|
||||
import { init, parse as parseEsm } from 'es-module-lexer';
|
||||
import { parse as parseCjs } from 'cjs-module-lexer';
|
||||
import { getConfig } from '@vercel/static-config';
|
||||
import { Project } from 'ts-morph';
|
||||
import { listen } from 'async-listen';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
const parseConfig = (entryPointPath: string) =>
|
||||
getConfig(new Project(), entryPointPath);
|
||||
@@ -46,12 +52,31 @@ async function createEventHandler(
|
||||
);
|
||||
}
|
||||
|
||||
const content = await readFile(entrypointPath, 'utf8');
|
||||
|
||||
const isStreaming =
|
||||
staticConfig?.supportsResponseStreaming ||
|
||||
(await hasWebHandlers(async () => parseCjs(content).exports)) ||
|
||||
(await hasWebHandlers(async () =>
|
||||
init.then(() => parseEsm(content)[1].map(specifier => specifier.n))
|
||||
));
|
||||
|
||||
return createServerlessEventHandler(entrypointPath, {
|
||||
mode: staticConfig?.supportsResponseStreaming ? 'streaming' : 'buffer',
|
||||
mode: isStreaming ? 'streaming' : 'buffer',
|
||||
shouldAddHelpers: options.shouldAddHelpers,
|
||||
});
|
||||
}
|
||||
|
||||
async function hasWebHandlers(getExports: () => Promise<string[]>) {
|
||||
const exports = await getExports().catch(() => []);
|
||||
for (const name of exports) {
|
||||
if (HTTP_METHODS.includes(name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
let handleEvent: (request: IncomingMessage) => Promise<VercelProxyResponse>;
|
||||
let handlerEventError: Error;
|
||||
let onExit: (() => Promise<void>) | undefined;
|
||||
|
||||
@@ -230,8 +230,10 @@ export async function createEdgeEventHandler(
|
||||
}
|
||||
|
||||
const body: Buffer | string | undefined = await serializeBody(request);
|
||||
if (body !== undefined)
|
||||
|
||||
if (body !== undefined && body.length) {
|
||||
request.headers['content-length'] = String(body.length);
|
||||
}
|
||||
|
||||
const url = new URL(request.url ?? '/', server.url);
|
||||
const response = await undiciRequest(url, {
|
||||
|
||||
@@ -1,26 +1,7 @@
|
||||
import type { ServerResponse, IncomingMessage } from 'http';
|
||||
import type { NodeHandler } from '@edge-runtime/node-utils';
|
||||
import { buildToNodeHandler } from '@edge-runtime/node-utils';
|
||||
|
||||
class FetchEvent {
|
||||
public request: Request;
|
||||
public awaiting: Set<Promise<void>>;
|
||||
public response: Response | null;
|
||||
|
||||
constructor(request: Request) {
|
||||
this.request = request;
|
||||
this.response = null;
|
||||
this.awaiting = new Set();
|
||||
}
|
||||
|
||||
respondWith(response: Response) {
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
waitUntil() {
|
||||
throw new Error('waitUntil is not implemented yet for Node.js');
|
||||
}
|
||||
}
|
||||
import Edge from '@edge-runtime/primitives';
|
||||
|
||||
const webHandlerToNodeHandler = buildToNodeHandler(
|
||||
{
|
||||
@@ -32,8 +13,8 @@ const webHandlerToNodeHandler = buildToNodeHandler(
|
||||
super(input, addDuplexToInit(init));
|
||||
}
|
||||
},
|
||||
Uint8Array: Uint8Array,
|
||||
FetchEvent: FetchEvent,
|
||||
Uint8Array,
|
||||
FetchEvent: Edge.FetchEvent,
|
||||
},
|
||||
{ defaultOrigin: 'https://vercel.com' }
|
||||
);
|
||||
|
||||
@@ -27,7 +27,7 @@ type ServerlessFunctionSignature = (
|
||||
const [NODE_MAJOR] = process.versions.node.split('.').map(v => Number(v));
|
||||
|
||||
/* https://nextjs.org/docs/app/building-your-application/routing/router-handlers#supported-http-methods */
|
||||
const HTTP_METHODS = [
|
||||
export const HTTP_METHODS = [
|
||||
'GET',
|
||||
'HEAD',
|
||||
'OPTIONS',
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
/* global Response */
|
||||
|
||||
const baseUrl = ({ headers }) =>
|
||||
`${headers.get('x-forwarded-proto')}://${headers.get('x-forwarded-host')}`;
|
||||
|
||||
export function GET(request) {
|
||||
const { searchParams } = new URL(request.url, baseUrl(request));
|
||||
const name = searchParams.get('name');
|
||||
return new Response(`Greetings, ${name}`);
|
||||
}
|
||||
50
packages/node/test/dev-fixtures/web-handlers-edge.js
Normal file
50
packages/node/test/dev-fixtures/web-handlers-edge.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/* global ReadableStream, TextEncoderStream, Response */
|
||||
|
||||
export const config = { runtime: 'edge' };
|
||||
|
||||
const DEFER_MS = 10;
|
||||
|
||||
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
const streaming =
|
||||
text =>
|
||||
(_, { waitUntil }) => {
|
||||
const DATA = text.split(' ');
|
||||
let index = 0;
|
||||
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
while (index < DATA.length) {
|
||||
const data = DATA[index++];
|
||||
let chunk = data;
|
||||
if (index !== DATA.length) chunk += ' ';
|
||||
controller.enqueue(chunk);
|
||||
await wait(DEFER_MS);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
}).pipeThrough(new TextEncoderStream());
|
||||
|
||||
waitUntil(wait(DATA.length * DEFER_MS));
|
||||
|
||||
return new Response(readable, {
|
||||
headers: {
|
||||
'Content-Type': 'text/plain',
|
||||
'x-web-handler': text,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const GET = streaming('Web handler using GET');
|
||||
|
||||
export const HEAD = streaming('Web handler using HEAD');
|
||||
|
||||
export const OPTIONS = streaming('Web handler using OPTIONS');
|
||||
|
||||
export const POST = streaming('Web handler using POST');
|
||||
|
||||
export const PUT = streaming('Web handler using PUT');
|
||||
|
||||
export const DELETE = streaming('Web handler using DELETE');
|
||||
|
||||
export const PATCH = streaming('Web handler using PATCH');
|
||||
48
packages/node/test/dev-fixtures/web-handlers-node.js
Normal file
48
packages/node/test/dev-fixtures/web-handlers-node.js
Normal file
@@ -0,0 +1,48 @@
|
||||
/* global ReadableStream, TextEncoderStream, Response */
|
||||
|
||||
const DEFER_MS = 10;
|
||||
|
||||
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
const streaming =
|
||||
text =>
|
||||
(_, { waitUntil }) => {
|
||||
const DATA = text.split(' ');
|
||||
let index = 0;
|
||||
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
while (index < DATA.length) {
|
||||
const data = DATA[index++];
|
||||
let chunk = data;
|
||||
if (index !== DATA.length) chunk += ' ';
|
||||
controller.enqueue(chunk);
|
||||
await wait(DEFER_MS);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
}).pipeThrough(new TextEncoderStream());
|
||||
|
||||
waitUntil(wait(DATA.length * DEFER_MS));
|
||||
|
||||
return new Response(readable, {
|
||||
headers: {
|
||||
'Content-Type': 'text/plain',
|
||||
'x-web-handler': text,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const GET = streaming('Web handler using GET');
|
||||
|
||||
export const HEAD = streaming('Web handler using HEAD');
|
||||
|
||||
export const OPTIONS = streaming('Web handler using OPTIONS');
|
||||
|
||||
export const POST = streaming('Web handler using POST');
|
||||
|
||||
export const PUT = streaming('Web handler using PUT');
|
||||
|
||||
export const DELETE = streaming('Web handler using DELETE');
|
||||
|
||||
export const PATCH = streaming('Web handler using PATCH');
|
||||
@@ -30,9 +30,9 @@ function testForkDevServer(entrypoint: string) {
|
||||
}
|
||||
|
||||
(NODE_MAJOR < 18 ? test.skip : test)(
|
||||
'runs an serverless function that exports GET',
|
||||
'web handlers for node runtime',
|
||||
async () => {
|
||||
const child = testForkDevServer('./serverless-response.js');
|
||||
const child = testForkDevServer('./web-handlers-node.js');
|
||||
try {
|
||||
const result = await readMessage(child);
|
||||
if (result.state !== 'message') {
|
||||
@@ -43,20 +43,251 @@ function testForkDevServer(entrypoint: string) {
|
||||
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/serverless-response?name=Vercel`
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'GET' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
}).toEqual({ status: 200, body: 'Greetings, Vercel' });
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using GET',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using GET',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'POST' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using POST',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using POST',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'DELETE' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using DELETE',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using DELETE',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'PUT' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PUT',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PUT',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'PATCH' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PATCH',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PATCH',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'HEAD' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
'x-web-handler': 'Web handler using HEAD',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using OPTIONS',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using OPTIONS',
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
child.kill(9);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
(NODE_MAJOR < 18 ? test.skip : test)(
|
||||
'web handlers for edge runtime',
|
||||
async () => {
|
||||
const child = testForkDevServer('./web-handlers-edge.js');
|
||||
try {
|
||||
const result = await readMessage(child);
|
||||
if (result.state !== 'message') {
|
||||
throw new Error('Exited. error: ' + JSON.stringify(result.value));
|
||||
}
|
||||
|
||||
const { address, port } = result.value;
|
||||
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/serverless-response?name=Vercel`,
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'GET' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using GET',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using GET',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'POST' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using POST',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using POST',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'DELETE' }
|
||||
);
|
||||
|
||||
console.log(response);
|
||||
expect({
|
||||
status: response.status,
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using DELETE',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'PUT' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PUT',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PUT',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'PATCH' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PATCH',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PATCH',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'HEAD' }
|
||||
);
|
||||
expect({ status: response.status }).toEqual({ status: 405 });
|
||||
expect({
|
||||
status: response.status,
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
'x-web-handler': 'Web handler using HEAD',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using OPTIONS',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using OPTIONS',
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
child.kill(9);
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
# @vercel/python
|
||||
|
||||
## 4.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Remove deprecated `createLambda()` usage ([#11080](https://github.com/vercel/vercel/pull/11080))
|
||||
|
||||
## 4.1.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/python",
|
||||
"version": "4.1.0",
|
||||
"version": "4.1.1",
|
||||
"main": "./dist/index.js",
|
||||
"license": "Apache-2.0",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
|
||||
@@ -26,7 +26,7 @@
|
||||
"@types/jest": "27.4.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/which": "3.0.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "11.1.1",
|
||||
"jest-junit": "16.0.0",
|
||||
|
||||
@@ -1,23 +1,27 @@
|
||||
import { join, dirname, basename } from 'path';
|
||||
import execa from 'execa';
|
||||
import fs from 'fs';
|
||||
import execa from 'execa';
|
||||
import { promisify } from 'util';
|
||||
const readFile = promisify(fs.readFile);
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
import { join, dirname, basename } from 'path';
|
||||
import {
|
||||
GlobOptions,
|
||||
BuildOptions,
|
||||
getWriteableDirectory,
|
||||
download,
|
||||
glob,
|
||||
createLambda,
|
||||
Lambda,
|
||||
FileBlob,
|
||||
shouldServe,
|
||||
debug,
|
||||
NowBuildError,
|
||||
type BuildOptions,
|
||||
type GlobOptions,
|
||||
type BuildV3,
|
||||
type Files,
|
||||
} from '@vercel/build-utils';
|
||||
import { installRequirement, installRequirementsFile } from './install';
|
||||
import { getLatestPythonVersion, getSupportedPythonVersion } from './version';
|
||||
|
||||
const readFile = promisify(fs.readFile);
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
async function pipenvConvert(cmd: string, srcDir: string) {
|
||||
debug('Running pipfile2req...');
|
||||
try {
|
||||
@@ -53,13 +57,13 @@ export async function downloadFilesInWorkPath({
|
||||
return workPath;
|
||||
}
|
||||
|
||||
export const build = async ({
|
||||
export const build: BuildV3 = async ({
|
||||
workPath,
|
||||
files: originalFiles,
|
||||
entrypoint,
|
||||
meta = {},
|
||||
config,
|
||||
}: BuildOptions) => {
|
||||
}) => {
|
||||
let pythonVersion = getLatestPythonVersion(meta);
|
||||
|
||||
workPath = await downloadFilesInWorkPath({
|
||||
@@ -190,12 +194,6 @@ export const build = async ({
|
||||
.replace(/__VC_HANDLER_MODULE_NAME/g, moduleName)
|
||||
.replace(/__VC_HANDLER_ENTRYPOINT/g, entrypointWithSuffix);
|
||||
|
||||
// in order to allow the user to have `server.py`, we need our `server.py` to be called
|
||||
// somethig else
|
||||
const handlerPyFilename = 'vc__handler__python';
|
||||
|
||||
await writeFile(join(workPath, `${handlerPyFilename}.py`), handlerPyContents);
|
||||
|
||||
const globOptions: GlobOptions = {
|
||||
cwd: workPath,
|
||||
ignore:
|
||||
@@ -204,14 +202,22 @@ export const build = async ({
|
||||
: 'node_modules/**',
|
||||
};
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', globOptions),
|
||||
const files: Files = await glob('**', globOptions);
|
||||
|
||||
// in order to allow the user to have `server.py`, we
|
||||
// need our `server.py` to be called something else
|
||||
const handlerPyFilename = 'vc__handler__python';
|
||||
|
||||
files[`${handlerPyFilename}.py`] = new FileBlob({ data: handlerPyContents });
|
||||
|
||||
const output = new Lambda({
|
||||
files,
|
||||
handler: `${handlerPyFilename}.vc_handler`,
|
||||
runtime: pythonVersion.runtime,
|
||||
environment: {},
|
||||
});
|
||||
|
||||
return { output: lambda };
|
||||
return { output };
|
||||
};
|
||||
|
||||
export { shouldServe };
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
# @vercel/redwood
|
||||
|
||||
## 2.0.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
## 2.0.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
## 2.0.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/redwood",
|
||||
"version": "2.0.6",
|
||||
"version": "2.0.8",
|
||||
"main": "./dist/index.js",
|
||||
"license": "Apache-2.0",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -20,7 +20,7 @@
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.2",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"semver": "6.3.1"
|
||||
},
|
||||
@@ -28,7 +28,7 @@
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"execa": "3.2.0",
|
||||
"fs-extra": "11.1.0",
|
||||
"jest-junit": "16.0.0"
|
||||
|
||||
@@ -1,5 +1,27 @@
|
||||
# @vercel/remix-builder
|
||||
|
||||
## 2.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Don't install Remix fork when not using split configuration ([#11152](https://github.com/vercel/vercel/pull/11152))
|
||||
|
||||
- Add `serverBundles` post-build sanity check and fallback ([#11153](https://github.com/vercel/vercel/pull/11153))
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.6.0 ([#11162](https://github.com/vercel/vercel/pull/11162))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.7.0 ([#11180](https://github.com/vercel/vercel/pull/11180))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.7.2 ([#11186](https://github.com/vercel/vercel/pull/11186))
|
||||
|
||||
## 2.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
## 2.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/remix-builder",
|
||||
"version": "2.0.18",
|
||||
"version": "2.0.20",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -21,16 +21,16 @@
|
||||
"defaults"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.2",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/static-config": "3.0.0",
|
||||
"ts-morph": "12.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.5.1",
|
||||
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.7.2",
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "7.3.13",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"jest-junit": "16.0.0",
|
||||
"path-to-regexp": "6.2.1",
|
||||
"semver": "7.5.2"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Project } from 'ts-morph';
|
||||
import { readFileSync, promises as fs } from 'fs';
|
||||
import { readFileSync, promises as fs, existsSync } from 'fs';
|
||||
import { basename, dirname, extname, join, posix, relative, sep } from 'path';
|
||||
import {
|
||||
debug,
|
||||
@@ -174,9 +174,83 @@ export const build: BuildV2 = async ({
|
||||
pkg.dependencies?.['@remix-run/dev'] ||
|
||||
pkg.devDependencies?.['@remix-run/dev'];
|
||||
|
||||
// Override the official `@remix-run/dev` package with the
|
||||
// Vercel fork, which supports the `serverBundles` config
|
||||
const serverBundlesMap = new Map<string, ConfigRoute[]>();
|
||||
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
|
||||
|
||||
// Read the `export const config` (if any) for each route
|
||||
const project = new Project();
|
||||
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
|
||||
for (const route of remixRoutes) {
|
||||
const routePath = join(remixConfig.appDirectory, route.file);
|
||||
let staticConfig = getConfig(project, routePath);
|
||||
if (staticConfig && isHydrogen2) {
|
||||
console.log(
|
||||
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
|
||||
);
|
||||
staticConfig = null;
|
||||
}
|
||||
staticConfigsMap.set(route, staticConfig);
|
||||
}
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
const config = getResolvedRouteConfig(
|
||||
route,
|
||||
remixConfig.routes,
|
||||
staticConfigsMap,
|
||||
isHydrogen2
|
||||
);
|
||||
resolvedConfigsMap.set(route, config);
|
||||
}
|
||||
|
||||
// Figure out which routes belong to which server bundles
|
||||
// based on having common static config properties
|
||||
for (const route of remixRoutes) {
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const config = resolvedConfigsMap.get(route);
|
||||
if (!config) {
|
||||
throw new Error(`Expected resolved config for "${route.id}"`);
|
||||
}
|
||||
const hash = calculateRouteConfigHash(config);
|
||||
|
||||
let routesForHash = serverBundlesMap.get(hash);
|
||||
if (!Array.isArray(routesForHash)) {
|
||||
routesForHash = [];
|
||||
serverBundlesMap.set(hash, routesForHash);
|
||||
}
|
||||
|
||||
routesForHash.push(route);
|
||||
}
|
||||
|
||||
let serverBundles: ServerBundle[] = Array.from(
|
||||
serverBundlesMap.entries()
|
||||
).map(([hash, routes]) => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
|
||||
return {
|
||||
serverBuildPath: isHydrogen2
|
||||
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
|
||||
: `${relative(
|
||||
entrypointFsDirname,
|
||||
dirname(remixConfig.serverBuildPath)
|
||||
)}/build-${runtime}-${hash}.js`,
|
||||
routes: routes.map(r => r.id),
|
||||
};
|
||||
});
|
||||
|
||||
// If the project is *not* relying on split configurations, then set
|
||||
// the `serverBuildPath` to the default Remix path, since the forked
|
||||
// Remix compiler will not be used
|
||||
if (!isHydrogen2 && serverBundles.length === 1) {
|
||||
// `serverBuildTarget` and `serverBuildPath` are undefined with
|
||||
// our remix config modifications, so use the default build path
|
||||
serverBundles[0].serverBuildPath = 'build/index.js';
|
||||
}
|
||||
|
||||
// If the project is relying on split configurations, then override
|
||||
// the official `@remix-run/dev` package with the Vercel fork,
|
||||
// which supports the `serverBundles` config
|
||||
if (
|
||||
serverBundles.length > 1 &&
|
||||
!isHydrogen2 &&
|
||||
remixRunDevPkg.name !== '@vercel/remix-run-dev' &&
|
||||
!remixRunDevPkgVersion?.startsWith('https:')
|
||||
@@ -266,72 +340,7 @@ export const build: BuildV2 = async ({
|
||||
? `${remixConfigPath}.original${extname(remixConfigPath)}`
|
||||
: undefined;
|
||||
|
||||
// These get populated inside the try/catch below
|
||||
let serverBundles: ServerBundle[];
|
||||
const serverBundlesMap = new Map<string, ConfigRoute[]>();
|
||||
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
|
||||
|
||||
try {
|
||||
// Read the `export const config` (if any) for each route
|
||||
const project = new Project();
|
||||
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
|
||||
for (const route of remixRoutes) {
|
||||
const routePath = join(remixConfig.appDirectory, route.file);
|
||||
let staticConfig = getConfig(project, routePath);
|
||||
if (staticConfig && isHydrogen2) {
|
||||
console.log(
|
||||
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
|
||||
);
|
||||
staticConfig = null;
|
||||
}
|
||||
staticConfigsMap.set(route, staticConfig);
|
||||
}
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
const config = getResolvedRouteConfig(
|
||||
route,
|
||||
remixConfig.routes,
|
||||
staticConfigsMap,
|
||||
isHydrogen2
|
||||
);
|
||||
resolvedConfigsMap.set(route, config);
|
||||
}
|
||||
|
||||
// Figure out which routes belong to which server bundles
|
||||
// based on having common static config properties
|
||||
for (const route of remixRoutes) {
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const config = resolvedConfigsMap.get(route);
|
||||
if (!config) {
|
||||
throw new Error(`Expected resolved config for "${route.id}"`);
|
||||
}
|
||||
const hash = calculateRouteConfigHash(config);
|
||||
|
||||
let routesForHash = serverBundlesMap.get(hash);
|
||||
if (!Array.isArray(routesForHash)) {
|
||||
routesForHash = [];
|
||||
serverBundlesMap.set(hash, routesForHash);
|
||||
}
|
||||
|
||||
routesForHash.push(route);
|
||||
}
|
||||
|
||||
serverBundles = Array.from(serverBundlesMap.entries()).map(
|
||||
([hash, routes]) => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
|
||||
return {
|
||||
serverBuildPath: isHydrogen2
|
||||
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
|
||||
: `${relative(
|
||||
entrypointFsDirname,
|
||||
dirname(remixConfig.serverBuildPath)
|
||||
)}/build-${runtime}-${hash}.js`,
|
||||
routes: routes.map(r => r.id),
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
// We need to patch the `remix.config.js` file to force some values necessary
|
||||
// for a build that works on either Node.js or the Edge runtime
|
||||
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) {
|
||||
@@ -468,6 +477,24 @@ module.exports = config;`;
|
||||
|
||||
const staticDir = join(entrypointFsDirname, 'public');
|
||||
|
||||
// Do a sanity check to ensure that the server bundles `serverBuildPath` was actually created.
|
||||
// If it was not, then that usually means the Vercel forked Remix compiler was not used and
|
||||
// thus only a singular server bundle was produced.
|
||||
const serverBundlesRespected = existsSync(
|
||||
join(entrypointFsDirname, serverBundles[0].serverBuildPath)
|
||||
);
|
||||
if (!serverBundlesRespected) {
|
||||
console.warn(
|
||||
'WARN: `serverBundles` configuration failed. Falling back to a singular server bundle.'
|
||||
);
|
||||
serverBundles = [
|
||||
{
|
||||
serverBuildPath: 'build/index.js',
|
||||
routes: serverBundles.flatMap(b => b.routes),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const [staticFiles, buildAssets, ...functions] = await Promise.all([
|
||||
glob('**', staticDir),
|
||||
glob('**', remixConfig.assetsBuildDirectory),
|
||||
@@ -476,12 +503,13 @@ module.exports = config;`;
|
||||
const config = resolvedConfigsMap.get(firstRoute) ?? {
|
||||
runtime: 'nodejs',
|
||||
};
|
||||
const serverBuildPath = join(entrypointFsDirname, bundle.serverBuildPath);
|
||||
|
||||
if (config.runtime === 'edge') {
|
||||
return createRenderEdgeFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.serverBuildPath),
|
||||
serverBuildPath,
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
@@ -492,7 +520,7 @@ module.exports = config;`;
|
||||
nodeVersion,
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.serverBuildPath),
|
||||
serverBuildPath,
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
|
||||
9875
packages/remix/test/fixtures/11-hydrogen-2-js/pnpm-lock.yaml
generated
vendored
Normal file
9875
packages/remix/test/fixtures/11-hydrogen-2-js/pnpm-lock.yaml
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
30
packages/remix/test/fixtures/14-node-linker-hoisted/.gitignore
vendored
Normal file
30
packages/remix/test/fixtures/14-node-linker-hoisted/.gitignore
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
node_modules
|
||||
|
||||
/.cache
|
||||
/build
|
||||
/public/build
|
||||
/api/_build
|
||||
.eslintcache
|
||||
|
||||
tsconfig.tsbuildinfo
|
||||
.vercel
|
||||
.env
|
||||
fly.wireguard.conf
|
||||
|
||||
pc-api-8703296620103690660-58-0d0aff733697.json
|
||||
|
||||
/.vscode/**/*
|
||||
|
||||
tailwind.css
|
||||
|
||||
.cache/*
|
||||
.cache
|
||||
.data/*
|
||||
.data
|
||||
.turbo
|
||||
.turbo/*
|
||||
turbo-build.log
|
||||
turbo-*.log
|
||||
.expo
|
||||
pc-api-8703296620103690660-58-a47abfbe9daa.json
|
||||
dist
|
||||
12
packages/remix/test/fixtures/14-node-linker-hoisted/.npmrc
vendored
Normal file
12
packages/remix/test/fixtures/14-node-linker-hoisted/.npmrc
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
# Expo doesn't play nice with pnpm by default.
|
||||
# The symbolic links of pnpm break the rules of Expo monorepos.
|
||||
# @link https://docs.expo.dev/guides/monorepos/#common-issues
|
||||
node-linker=hoisted
|
||||
|
||||
# In order to cache Prisma correctly
|
||||
public-hoist-pattern[]=*prisma*
|
||||
public-hoist-pattern[]=*uuid*
|
||||
# FIXME: @prisma/client is required by the @acme/auth,
|
||||
# but we don't want it installed there since it's already
|
||||
# installed in the @acme/db package
|
||||
strict-peer-dependencies=false
|
||||
9
packages/remix/test/fixtures/14-node-linker-hoisted/.prettierignore
vendored
Normal file
9
packages/remix/test/fixtures/14-node-linker-hoisted/.prettierignore
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
node_modules
|
||||
|
||||
/build
|
||||
/dist
|
||||
/public/build
|
||||
.env
|
||||
|
||||
/postgres-data
|
||||
|
||||
9
packages/remix/test/fixtures/14-node-linker-hoisted/.prettierrc
vendored
Normal file
9
packages/remix/test/fixtures/14-node-linker-hoisted/.prettierrc
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"trailingComma": "all",
|
||||
"tabWidth": 2,
|
||||
"semi": false,
|
||||
"singleQuote": false,
|
||||
"printWidth": 130,
|
||||
"plugins": ["prettier-plugin-tailwindcss"],
|
||||
"tailwindFunctions": ["merge", "join", "cva"]
|
||||
}
|
||||
6
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/.eslintrc.commit.js
vendored
Normal file
6
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/.eslintrc.commit.js
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/**
|
||||
* @type {import('@types/eslint').Linter.BaseConfig}
|
||||
*/
|
||||
module.exports = {
|
||||
extends: ["@ramble/eslint-config/commit", "./.eslintrc.js"],
|
||||
}
|
||||
32
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/.eslintrc.js
vendored
Normal file
32
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/.eslintrc.js
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
const OFF = "off"
|
||||
const ERROR = "error"
|
||||
|
||||
/**
|
||||
* @type {import('@types/eslint').Linter.BaseConfig}
|
||||
*/
|
||||
module.exports = {
|
||||
root: true,
|
||||
parser: "@typescript-eslint/parser",
|
||||
plugins: ["@typescript-eslint"],
|
||||
extends: [
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:react/recommended",
|
||||
"plugin:react-hooks/recommended",
|
||||
"plugin:react/jsx-runtime",
|
||||
"prettier",
|
||||
],
|
||||
rules: {
|
||||
"@typescript-eslint/no-non-null-assertion": OFF,
|
||||
"@typescript-eslint/no-var-requires": ERROR,
|
||||
"@typescript-eslint/no-non-null-asserted-optional-chain": OFF,
|
||||
"react/function-component-definition": ERROR,
|
||||
"@typescript-eslint/no-unused-vars": [ERROR, { args: "none", argsIgnorePattern: "^_", varsIgnorePattern: "^_" }],
|
||||
"react/prop-types": OFF,
|
||||
"react/no-unescaped-entities": OFF,
|
||||
},
|
||||
settings: {
|
||||
react: {
|
||||
version: "detect",
|
||||
},
|
||||
},
|
||||
}
|
||||
21
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/.gitignore
vendored
Normal file
21
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/.gitignore
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
node_modules
|
||||
|
||||
/.cache
|
||||
/build
|
||||
/public/build
|
||||
/api/_build
|
||||
.eslintcache
|
||||
|
||||
tsconfig.tsbuildinfo
|
||||
.vercel
|
||||
.env
|
||||
fly.wireguard.conf
|
||||
|
||||
/.vscode/**/*
|
||||
|
||||
tailwind.css
|
||||
|
||||
.cache/*
|
||||
.cache
|
||||
.data/*
|
||||
.data
|
||||
46
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/app/root.tsx
vendored
Normal file
46
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/app/root.tsx
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
import { Links, LiveReload, Meta, MetaFunction, Outlet, Scripts, ScrollRestoration } from "@remix-run/react"
|
||||
import * as React from "react"
|
||||
|
||||
export const meta: MetaFunction = () => {
|
||||
return [{ title: "Hello" }, { name: "description", content: "World" }]
|
||||
}
|
||||
|
||||
export default function App() {
|
||||
return (
|
||||
<Document>
|
||||
<Outlet />
|
||||
</Document>
|
||||
)
|
||||
}
|
||||
|
||||
export function ErrorBoundary() {
|
||||
return (
|
||||
<Document>
|
||||
<h2>error</h2>
|
||||
</Document>
|
||||
)
|
||||
}
|
||||
|
||||
interface DocumentProps {
|
||||
children: React.ReactNode
|
||||
}
|
||||
|
||||
function Document({ children }: DocumentProps) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charSet="utf-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
|
||||
<Meta />
|
||||
<Links />
|
||||
</head>
|
||||
<body>
|
||||
{children}
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
<LiveReload />
|
||||
</body>
|
||||
</html>
|
||||
)
|
||||
}
|
||||
7
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/app/routes/_index.tsx
vendored
Normal file
7
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/app/routes/_index.tsx
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export default function Home() {
|
||||
return (
|
||||
<div>
|
||||
<p>hello</p>
|
||||
</div>
|
||||
)
|
||||
}
|
||||
46
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/package.json
vendored
Normal file
46
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/package.json
vendored
Normal file
@@ -0,0 +1,46 @@
|
||||
{
|
||||
"name": "@ramble/remix",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"private": true,
|
||||
"license": "",
|
||||
"scripts": {
|
||||
"build": "remix build",
|
||||
"dev": "pnpm with-env remix dev --manual",
|
||||
"routes": "remix routes",
|
||||
"lint:fix": "eslint --config .eslintrc.commit.js --fix --cache --ext .ts,.tsx app",
|
||||
"lint": "eslint --config .eslintrc.js --ext .ts,.tsx app",
|
||||
"prettier": "prettier --check app",
|
||||
"prettier:fix": "prettier --write app",
|
||||
"format": "pnpm run lint:fix && pnpm run prettier:fix",
|
||||
"typecheck": "tsc --noEmit",
|
||||
"with-env": "dotenv -e ../../.env --"
|
||||
},
|
||||
"dependencies": {
|
||||
"@remix-run/css-bundle": "2.6.0",
|
||||
"@remix-run/node": "2.6.0",
|
||||
"@remix-run/react": "2.6.0",
|
||||
"@remix-run/serve": "2.6.0",
|
||||
"@vercel/remix": "2.5.1",
|
||||
"isbot": "^4",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "2.6.0",
|
||||
"@remix-run/eslint-config": "2.6.0",
|
||||
"@types/react": "18.2.55",
|
||||
"@types/react-dom": "18.2.19",
|
||||
"@typescript-eslint/eslint-plugin": "6.21.0",
|
||||
"@typescript-eslint/parser": "6.21.0",
|
||||
"dotenv-cli": "7.3.0",
|
||||
"eslint": "8.56.0",
|
||||
"eslint-config-prettier": "9.1.0",
|
||||
"eslint-plugin-import": "2.29.1",
|
||||
"eslint-plugin-simple-import-sort": "12.0.0",
|
||||
"prettier": "3.2.5",
|
||||
"remix-flat-routes": "0.6.0",
|
||||
"typescript": "5.3.3"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
BIN
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/public/favicon.ico
vendored
Normal file
BIN
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/public/favicon.ico
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
7
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/remix.config.js
vendored
Normal file
7
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/remix.config.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* @type {import('@remix-run/dev').AppConfig}
|
||||
*/
|
||||
module.exports = {
|
||||
serverModuleFormat: "cjs",
|
||||
serverBuildTarget: "cjs",
|
||||
}
|
||||
2
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/remix.env.d.ts
vendored
Normal file
2
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/remix.env.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/// <reference types="@remix-run/dev" />
|
||||
/// <reference types="@remix-run/node/globals" />
|
||||
24
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/tsconfig.json
vendored
Normal file
24
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/tsconfig.json
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2019"],
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"noUncheckedIndexedAccess": true,
|
||||
"module": "ES2022",
|
||||
"outDir": "build",
|
||||
"moduleResolution": "Bundler",
|
||||
"resolveJsonModule": true,
|
||||
"target": "ES2019",
|
||||
"strict": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
"skipLibCheck": true,
|
||||
"noEmit": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"allowJs": true
|
||||
}
|
||||
}
|
||||
3
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/vercel.json
vendored
Normal file
3
packages/remix/test/fixtures/14-node-linker-hoisted/apps/remix/vercel.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"$schema": "https://openapi.vercel.sh/vercel.json"
|
||||
}
|
||||
30
packages/remix/test/fixtures/14-node-linker-hoisted/package.json
vendored
Normal file
30
packages/remix/test/fixtures/14-node-linker-hoisted/package.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "@ramble/root",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"turbo": "1.12.3"
|
||||
},
|
||||
"type": "commonjs",
|
||||
"devDependencies": {
|
||||
"prettier": "3.2.5",
|
||||
"typescript": "5.3.3"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "turbo run build",
|
||||
"dev": "turbo run dev",
|
||||
"ci": "turbo run typecheck lint prettier",
|
||||
"lint": "turbo run lint",
|
||||
"fix": "turbo run lint:fix prettier:fix",
|
||||
"prettier": "turbo run prettier",
|
||||
"prettier:fix": "turbo run prettier:fix",
|
||||
"lint:fix": "turbo run lint:fix",
|
||||
"format": "turbo run lint:fix && turbo run prettier:fix",
|
||||
"typecheck": "turbo run typecheck",
|
||||
"db:generate": "turbo run db:generate"
|
||||
},
|
||||
"packageManager": "pnpm@8.12.0",
|
||||
"engines": {
|
||||
"node": ">=18",
|
||||
"pnpm": ">=8"
|
||||
}
|
||||
}
|
||||
6737
packages/remix/test/fixtures/14-node-linker-hoisted/pnpm-lock.yaml
generated
vendored
Normal file
6737
packages/remix/test/fixtures/14-node-linker-hoisted/pnpm-lock.yaml
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
5
packages/remix/test/fixtures/14-node-linker-hoisted/pnpm-workspace.yaml
vendored
Normal file
5
packages/remix/test/fixtures/14-node-linker-hoisted/pnpm-workspace.yaml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
packages:
|
||||
- apps/*
|
||||
- packages/*
|
||||
- packages/config/*
|
||||
- packages/server/*
|
||||
8
packages/remix/test/fixtures/14-node-linker-hoisted/probes.json
vendored
Normal file
8
packages/remix/test/fixtures/14-node-linker-hoisted/probes.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"probes": [
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "<p>hello</p>"
|
||||
}
|
||||
]
|
||||
}
|
||||
20
packages/remix/test/fixtures/14-node-linker-hoisted/tsconfig.json
vendored
Normal file
20
packages/remix/test/fixtures/14-node-linker-hoisted/tsconfig.json
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es2017",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"checkJs": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"incremental": true,
|
||||
"noUncheckedIndexedAccess": true
|
||||
}
|
||||
}
|
||||
40
packages/remix/test/fixtures/14-node-linker-hoisted/turbo.json
vendored
Normal file
40
packages/remix/test/fixtures/14-node-linker-hoisted/turbo.json
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
"$schema": "https://turborepo.org/schema.json",
|
||||
"pipeline": {
|
||||
"build": {
|
||||
"dependsOn": ["^build"],
|
||||
"outputs": ["build/**", "public/build/**"]
|
||||
},
|
||||
"fix": {
|
||||
"outputs": []
|
||||
},
|
||||
"lint": {
|
||||
"outputs": []
|
||||
},
|
||||
"lint:fix": {
|
||||
"outputs": []
|
||||
},
|
||||
"prettier": {
|
||||
"outputs": []
|
||||
},
|
||||
"format": {
|
||||
"outputs": []
|
||||
},
|
||||
"prettier:fix": {
|
||||
"outputs": []
|
||||
},
|
||||
"dev": {
|
||||
"cache": false
|
||||
},
|
||||
"db:generate": {
|
||||
"cache": false
|
||||
},
|
||||
"db:push": {
|
||||
"cache": false
|
||||
},
|
||||
"typecheck": {
|
||||
"dependsOn": ["^build"],
|
||||
"outputs": []
|
||||
}
|
||||
}
|
||||
}
|
||||
7
packages/remix/test/fixtures/14-node-linker-hoisted/vercel.json
vendored
Normal file
7
packages/remix/test/fixtures/14-node-linker-hoisted/vercel.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"projectSettings": {
|
||||
"framework": "remix",
|
||||
"rootDirectory": "apps/remix",
|
||||
"sourceFilesOutsideRootDirectory": true
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,13 @@
|
||||
# @vercel/ruby
|
||||
|
||||
## 2.0.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- add ruby3 to path during build ([#11094](https://github.com/vercel/vercel/pull/11094))
|
||||
|
||||
- Remove deprecated `createLambda()` usage ([#11080](https://github.com/vercel/vercel/pull/11080))
|
||||
|
||||
## 2.0.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@vercel/ruby",
|
||||
"author": "Nathan Cahill <nathan@nathancahill.com>",
|
||||
"version": "2.0.4",
|
||||
"version": "2.0.5",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/ruby",
|
||||
@@ -23,7 +23,7 @@
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "8.0.0",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"execa": "2.0.4",
|
||||
"fs-extra": "^7.0.1",
|
||||
"jest-junit": "16.0.0",
|
||||
|
||||
@@ -10,14 +10,16 @@ import {
|
||||
writeFile,
|
||||
} from 'fs-extra';
|
||||
import {
|
||||
BuildOptions,
|
||||
download,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
createLambda,
|
||||
Lambda,
|
||||
debug,
|
||||
walkParentDirs,
|
||||
cloneEnv,
|
||||
FileBlob,
|
||||
type Files,
|
||||
type BuildV3,
|
||||
} from '@vercel/build-utils';
|
||||
import { installBundler } from './install-ruby';
|
||||
|
||||
@@ -46,6 +48,7 @@ async function bundleInstall(
|
||||
bundlePath: string,
|
||||
bundleDir: string,
|
||||
gemfilePath: string,
|
||||
rubyPath: string,
|
||||
runtime: string
|
||||
) {
|
||||
debug(`running "bundle install --deployment"...`);
|
||||
@@ -74,7 +77,7 @@ async function bundleInstall(
|
||||
|
||||
const bundlerEnv = cloneEnv(process.env, {
|
||||
// Ensure the correct version of `ruby` is in front of the $PATH
|
||||
PATH: `${dirname(bundlePath)}:${process.env.PATH}`,
|
||||
PATH: `${dirname(rubyPath)}:${dirname(bundlePath)}:${process.env.PATH}`,
|
||||
BUNDLE_SILENCE_ROOT_WARNING: '1',
|
||||
BUNDLE_APP_CONFIG: bundleAppConfig,
|
||||
BUNDLE_JOBS: '4',
|
||||
@@ -114,13 +117,13 @@ async function bundleInstall(
|
||||
|
||||
export const version = 3;
|
||||
|
||||
export async function build({
|
||||
export const build: BuildV3 = async ({
|
||||
workPath,
|
||||
files,
|
||||
entrypoint,
|
||||
config,
|
||||
meta = {},
|
||||
}: BuildOptions) {
|
||||
}) => {
|
||||
await download(files, workPath, meta);
|
||||
const entrypointFsDirname = join(workPath, dirname(entrypoint));
|
||||
const gemfileName = 'Gemfile';
|
||||
@@ -140,10 +143,8 @@ export async function build({
|
||||
const gemfileContents = gemfilePath
|
||||
? await readFile(gemfilePath, 'utf8')
|
||||
: '';
|
||||
const { gemHome, bundlerPath, vendorPath, runtime } = await installBundler(
|
||||
meta,
|
||||
gemfileContents
|
||||
);
|
||||
const { gemHome, bundlerPath, vendorPath, runtime, rubyPath } =
|
||||
await installBundler(meta, gemfileContents);
|
||||
process.env.GEM_HOME = gemHome;
|
||||
debug(`Checking existing vendor directory at "${vendorPath}"`);
|
||||
const vendorDir = join(workPath, vendorPath);
|
||||
@@ -187,7 +188,13 @@ export async function build({
|
||||
} else {
|
||||
// try installing. this won't work if native extesions are required.
|
||||
// if that's the case, gems should be vendored locally before deploying.
|
||||
await bundleInstall(bundlerPath, bundleDir, gemfilePath, runtime);
|
||||
await bundleInstall(
|
||||
bundlerPath,
|
||||
bundleDir,
|
||||
gemfilePath,
|
||||
rubyPath,
|
||||
runtime
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -217,12 +224,11 @@ export async function build({
|
||||
// somethig else
|
||||
const handlerRbFilename = 'vc__handler__ruby';
|
||||
|
||||
await writeFile(
|
||||
join(workPath, `${handlerRbFilename}.rb`),
|
||||
nowHandlerRbContents
|
||||
);
|
||||
const outputFiles: Files = await glob('**', workPath);
|
||||
|
||||
const outputFiles = await glob('**', workPath);
|
||||
outputFiles[`${handlerRbFilename}.rb`] = new FileBlob({
|
||||
data: nowHandlerRbContents,
|
||||
});
|
||||
|
||||
// static analysis is impossible with ruby.
|
||||
// instead, provide `includeFiles` and `excludeFiles` config options to reduce bundle size.
|
||||
@@ -253,12 +259,12 @@ export async function build({
|
||||
}
|
||||
}
|
||||
|
||||
const lambda = await createLambda({
|
||||
const output = new Lambda({
|
||||
files: outputFiles,
|
||||
handler: `${handlerRbFilename}.vc__handler`,
|
||||
runtime,
|
||||
environment: {},
|
||||
});
|
||||
|
||||
return { output: lambda };
|
||||
}
|
||||
return { output };
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user