mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
11 Commits
feature/cr
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a630e19896 | ||
|
|
00430eeabf | ||
|
|
49f453742b | ||
|
|
bcb8d4f812 | ||
|
|
d42a8a6588 | ||
|
|
301bcf58fb | ||
|
|
11d0091393 | ||
|
|
6405fb51a1 | ||
|
|
edd477e602 | ||
|
|
4f8f8a5b98 | ||
|
|
a8e66eef41 |
4
.github/workflows/test.yml
vendored
4
.github/workflows/test.yml
vendored
@@ -82,11 +82,11 @@ jobs:
|
||||
- run: yarn install --network-timeout 1000000
|
||||
|
||||
- name: Build ${{matrix.packageName}} and all its dependencies
|
||||
run: node_modules/.bin/turbo run build --cache-dir=".turbo" --scope=${{matrix.packageName}} --include-dependencies --no-deps
|
||||
run: node utils/gen.js && node_modules/.bin/turbo run build --cache-dir=".turbo" --scope=${{matrix.packageName}} --include-dependencies --no-deps
|
||||
env:
|
||||
FORCE_COLOR: '1'
|
||||
- name: Test ${{matrix.packageName}}
|
||||
run: node_modules/.bin/turbo run test --cache-dir=".turbo" --scope=${{matrix.packageName}} --no-deps -- ${{ join(matrix.testPaths, ' ') }}
|
||||
run: node utils/gen.js && node_modules/.bin/turbo run test --cache-dir=".turbo" --scope=${{matrix.packageName}} --no-deps -- ${{ join(matrix.testPaths, ' ') }}
|
||||
shell: bash
|
||||
env:
|
||||
VERCEL_CLI_VERSION: ${{ needs.setup.outputs.dplUrl }}/tarballs/vercel.tgz
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -28,3 +28,4 @@ test/lib/deployment/failed-page.txt
|
||||
__pycache__
|
||||
.vercel
|
||||
.turbo
|
||||
turbo-cache-key.json
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"extends": "next/core-web-vitals"
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
swcMinify: true,
|
||||
}
|
||||
|
||||
module.exports = nextConfig
|
||||
|
||||
4800
examples/nextjs/package-lock.json
generated
4800
examples/nextjs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -9,12 +9,8 @@
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "13.0.0",
|
||||
"next": "13.0.1",
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "8.26.0",
|
||||
"eslint-config-next": "13.0.0"
|
||||
}
|
||||
}
|
||||
|
||||
10
package.json
10
package.json
@@ -44,14 +44,14 @@
|
||||
"publish-canary": "git checkout main && git pull && lerna version prerelease --preid canary --message \"Publish Canary\" --exact",
|
||||
"publish-from-github": "./utils/publish.sh",
|
||||
"changelog": "node utils/changelog.js",
|
||||
"build": "turbo run build",
|
||||
"build": "node utils/gen.js && turbo run build",
|
||||
"vercel-build": "yarn build && yarn run pack && cd api && node -r ts-eager/register ./_lib/script/build.ts",
|
||||
"pre-commit": "lint-staged",
|
||||
"test": "jest --rootDir=\"test\" --testPathPattern=\"\\.test.js\"",
|
||||
"test-unit": "yarn test && turbo run test-unit",
|
||||
"test-integration-cli": "turbo run test-integration-cli",
|
||||
"test-integration-once": "turbo run test-integration-once",
|
||||
"test-integration-dev": "turbo run test-integration-dev",
|
||||
"test-unit": "yarn test && node utils/gen.js && turbo run test-unit",
|
||||
"test-integration-cli": "node utils/gen.js && turbo run test-integration-cli",
|
||||
"test-integration-once": "node utils/gen.js && turbo run test-integration-once",
|
||||
"test-integration-dev": "node utils/gen.js && turbo run test-integration-dev",
|
||||
"lint": "eslint . --ext .ts,.js",
|
||||
"prepare": "husky install",
|
||||
"pack": "cd utils && node -r ts-eager/register ./pack.ts"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "5.5.5",
|
||||
"version": "5.5.6",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -3,28 +3,37 @@ import { NodeVersion } from '../types';
|
||||
import { NowBuildError } from '../errors';
|
||||
import debug from '../debug';
|
||||
|
||||
const allOptions = [
|
||||
{ major: 16, range: '16.x', runtime: 'nodejs16.x' },
|
||||
{ major: 14, range: '14.x', runtime: 'nodejs14.x' },
|
||||
{
|
||||
major: 12,
|
||||
range: '12.x',
|
||||
runtime: 'nodejs12.x',
|
||||
discontinueDate: new Date('2022-10-03'),
|
||||
},
|
||||
{
|
||||
major: 10,
|
||||
range: '10.x',
|
||||
runtime: 'nodejs10.x',
|
||||
discontinueDate: new Date('2021-04-20'),
|
||||
},
|
||||
{
|
||||
major: 8,
|
||||
range: '8.10.x',
|
||||
runtime: 'nodejs8.10',
|
||||
discontinueDate: new Date('2020-01-06'),
|
||||
},
|
||||
] as const;
|
||||
function getOptions() {
|
||||
const options = [
|
||||
{ major: 16, range: '16.x', runtime: 'nodejs16.x' },
|
||||
{ major: 14, range: '14.x', runtime: 'nodejs14.x' },
|
||||
{
|
||||
major: 12,
|
||||
range: '12.x',
|
||||
runtime: 'nodejs12.x',
|
||||
discontinueDate: new Date('2022-10-03'),
|
||||
},
|
||||
{
|
||||
major: 10,
|
||||
range: '10.x',
|
||||
runtime: 'nodejs10.x',
|
||||
discontinueDate: new Date('2021-04-20'),
|
||||
},
|
||||
{
|
||||
major: 8,
|
||||
range: '8.10.x',
|
||||
runtime: 'nodejs8.10',
|
||||
discontinueDate: new Date('2020-01-06'),
|
||||
},
|
||||
] as const;
|
||||
if (process.env.VERCEL_ALLOW_NODEJS18 === '1') {
|
||||
return [
|
||||
{ major: 18, range: '18.x', runtime: 'nodejs18.x' },
|
||||
...options,
|
||||
] as const;
|
||||
}
|
||||
return options;
|
||||
}
|
||||
|
||||
function getHint(isAuto = false) {
|
||||
const { major, range } = getLatestNodeVersion();
|
||||
@@ -34,11 +43,11 @@ function getHint(isAuto = false) {
|
||||
}
|
||||
|
||||
export function getLatestNodeVersion() {
|
||||
return allOptions[0];
|
||||
return getOptions()[0];
|
||||
}
|
||||
|
||||
export function getDiscontinuedNodeVersions(): NodeVersion[] {
|
||||
return allOptions.filter(isDiscontinued);
|
||||
return getOptions().filter(isDiscontinued);
|
||||
}
|
||||
|
||||
export async function getSupportedNodeVersion(
|
||||
@@ -50,7 +59,7 @@ export async function getSupportedNodeVersion(
|
||||
if (engineRange) {
|
||||
const found =
|
||||
validRange(engineRange) &&
|
||||
allOptions.some(o => {
|
||||
getOptions().some(o => {
|
||||
// the array is already in order so return the first
|
||||
// match which will be the newest version of node
|
||||
selection = o;
|
||||
|
||||
@@ -23,6 +23,7 @@ export interface LambdaOptionsBase {
|
||||
regions?: string[];
|
||||
supportsMultiPayloads?: boolean;
|
||||
supportsWrapper?: boolean;
|
||||
experimentalResponseStreaming?: boolean;
|
||||
}
|
||||
|
||||
export interface LambdaOptionsWithFiles extends LambdaOptionsBase {
|
||||
@@ -60,6 +61,7 @@ export class Lambda {
|
||||
zipBuffer?: Buffer;
|
||||
supportsMultiPayloads?: boolean;
|
||||
supportsWrapper?: boolean;
|
||||
experimentalResponseStreaming?: boolean;
|
||||
|
||||
constructor(opts: LambdaOptions) {
|
||||
const {
|
||||
@@ -72,6 +74,7 @@ export class Lambda {
|
||||
regions,
|
||||
supportsMultiPayloads,
|
||||
supportsWrapper,
|
||||
experimentalResponseStreaming,
|
||||
} = opts;
|
||||
if ('files' in opts) {
|
||||
assert(typeof opts.files === 'object', '"files" must be an object');
|
||||
@@ -132,6 +135,7 @@ export class Lambda {
|
||||
this.zipBuffer = 'zipBuffer' in opts ? opts.zipBuffer : undefined;
|
||||
this.supportsMultiPayloads = supportsMultiPayloads;
|
||||
this.supportsWrapper = supportsWrapper;
|
||||
this.experimentalResponseStreaming = experimentalResponseStreaming;
|
||||
}
|
||||
|
||||
async createZip(): Promise<Buffer> {
|
||||
|
||||
17
packages/build-utils/test/unit.test.ts
vendored
17
packages/build-utils/test/unit.test.ts
vendored
@@ -285,6 +285,23 @@ it('should match all semver ranges', async () => {
|
||||
);
|
||||
});
|
||||
|
||||
it('should only allow nodejs18.x when env var is set', async () => {
|
||||
try {
|
||||
expect(getLatestNodeVersion()).toHaveProperty('major', 16);
|
||||
expect(getSupportedNodeVersion('18.x')).rejects.toThrow();
|
||||
|
||||
process.env.VERCEL_ALLOW_NODEJS18 = '1';
|
||||
|
||||
expect(getLatestNodeVersion()).toHaveProperty('major', 18);
|
||||
expect(await getSupportedNodeVersion('18.x')).toHaveProperty('major', 18);
|
||||
expect(await getSupportedNodeVersion('18')).toHaveProperty('major', 18);
|
||||
expect(await getSupportedNodeVersion('18.1.0')).toHaveProperty('major', 18);
|
||||
expect(await getSupportedNodeVersion('>=16')).toHaveProperty('major', 18);
|
||||
} finally {
|
||||
delete process.env.VERCEL_ALLOW_NODEJS18;
|
||||
}
|
||||
});
|
||||
|
||||
it('should ignore node version in vercel dev getNodeVersion()', async () => {
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "28.4.12",
|
||||
"version": "28.4.13",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -41,16 +41,16 @@
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/go": "2.2.13",
|
||||
"@vercel/hydrogen": "0.0.26",
|
||||
"@vercel/next": "3.2.6",
|
||||
"@vercel/node": "2.5.26",
|
||||
"@vercel/python": "3.1.22",
|
||||
"@vercel/redwood": "1.0.31",
|
||||
"@vercel/remix": "1.0.32",
|
||||
"@vercel/ruby": "1.3.39",
|
||||
"@vercel/static-build": "1.0.32",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/go": "2.2.14",
|
||||
"@vercel/hydrogen": "0.0.27",
|
||||
"@vercel/next": "3.2.7",
|
||||
"@vercel/node": "2.6.0",
|
||||
"@vercel/python": "3.1.23",
|
||||
"@vercel/redwood": "1.0.32",
|
||||
"@vercel/remix": "1.0.33",
|
||||
"@vercel/ruby": "1.3.40",
|
||||
"@vercel/static-build": "1.0.33",
|
||||
"update-notifier": "5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -95,9 +95,10 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "12.2.13",
|
||||
"@vercel/frameworks": "1.1.8",
|
||||
"@vercel/fs-detectors": "3.4.5",
|
||||
"@vercel/client": "12.2.14",
|
||||
"@vercel/error-utils": "1.0.1",
|
||||
"@vercel/frameworks": "1.1.9",
|
||||
"@vercel/fs-detectors": "3.4.6",
|
||||
"@vercel/fun": "1.0.4",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
|
||||
@@ -70,7 +70,7 @@ import getPrebuiltJson from '../../util/deploy/get-prebuilt-json';
|
||||
import { createGitMeta } from '../../util/create-git-meta';
|
||||
import { isValidArchive } from '../../util/deploy/validate-archive-format';
|
||||
import { parseEnv } from '../../util/parse-env';
|
||||
import { errorToString, isErrnoException, isError } from '../../util/is-error';
|
||||
import { errorToString, isErrnoException, isError } from '@vercel/error-utils';
|
||||
import { pickOverrides } from '../../util/projects/project-settings';
|
||||
|
||||
export default async (client: Client): Promise<number> => {
|
||||
|
||||
@@ -15,7 +15,7 @@ import readConfig from '../../util/config/read-config';
|
||||
import readJSONFile from '../../util/read-json-file';
|
||||
import { getPkgName, getCommandName } from '../../util/pkg-name';
|
||||
import { CantParseJSONFile } from '../../util/errors-ts';
|
||||
import { isErrnoException } from '../../util/is-error';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
|
||||
const COMMAND_CONFIG = {
|
||||
dev: ['dev'],
|
||||
|
||||
@@ -11,7 +11,7 @@ import promptBool from '../../util/input/prompt-bool';
|
||||
import purchaseDomain from '../../util/domains/purchase-domain';
|
||||
import stamp from '../../util/output/stamp';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
import { errorToString } from '../../util/is-error';
|
||||
import { errorToString } from '@vercel/error-utils';
|
||||
|
||||
type Options = {};
|
||||
|
||||
|
||||
2
packages/cli/src/commands/env/pull.ts
vendored
2
packages/cli/src/commands/env/pull.ts
vendored
@@ -18,7 +18,7 @@ import {
|
||||
buildDeltaString,
|
||||
createEnvObject,
|
||||
} from '../../util/env/diff-env-files';
|
||||
import { isErrnoException } from '../../util/is-error';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
|
||||
const CONTENTS_PREFIX = '# Created by Vercel CLI\n';
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import handleError from '../../util/handle-error';
|
||||
import logo from '../../util/output/logo';
|
||||
import init from './init';
|
||||
import { getPkgName } from '../../util/pkg-name';
|
||||
import { isError } from '../../util/is-error';
|
||||
import { isError } from '@vercel/error-utils';
|
||||
|
||||
const COMMAND_CONFIG = {
|
||||
init: ['init'],
|
||||
|
||||
@@ -13,7 +13,7 @@ import { getDeployment } from '../util/get-deployment';
|
||||
import { Deployment } from '@vercel/client';
|
||||
import { Build } from '../types';
|
||||
import title from 'title';
|
||||
import { isErrnoException } from '../util/is-error';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
import { isAPIError } from '../util/errors-ts';
|
||||
import { URL } from 'url';
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ import { getLinkedProject } from '../util/projects/link';
|
||||
import { ensureLink } from '../util/link/ensure-link';
|
||||
import getScope from '../util/get-scope';
|
||||
import { isAPIError } from '../util/errors-ts';
|
||||
import { isErrnoException } from '../util/is-error';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
@@ -56,7 +56,7 @@ const help = () => {
|
||||
${chalk.gray('–')} List all deployments for the project ${chalk.dim(
|
||||
'`my-app`'
|
||||
)} in the team of the currently linked project
|
||||
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} ls my-app`)}
|
||||
|
||||
${chalk.gray('–')} Filter deployments by metadata
|
||||
|
||||
@@ -6,7 +6,7 @@ import getArgs from '../util/get-args';
|
||||
import Client from '../util/client';
|
||||
import { getCommandName, getPkgName } from '../util/pkg-name';
|
||||
import { isAPIError } from '../util/errors-ts';
|
||||
import { errorToString } from '../util/is-error';
|
||||
import { errorToString } from '@vercel/error-utils';
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
|
||||
@@ -11,7 +11,7 @@ import { getPkgName, getCommandName } from '../../util/pkg-name';
|
||||
import Client from '../../util/client';
|
||||
import createTeam from '../../util/teams/create-team';
|
||||
import patchTeam from '../../util/teams/patch-team';
|
||||
import { errorToString, isError } from '../../util/is-error';
|
||||
import { errorToString, isError } from '@vercel/error-utils';
|
||||
|
||||
const validateSlugKeypress = (data: string, value: string) =>
|
||||
// TODO: the `value` here should contain the current value + the keypress
|
||||
|
||||
@@ -12,7 +12,7 @@ import { email as regexEmail } from '../../util/input/regexes';
|
||||
import getTeams from '../../util/teams/get-teams';
|
||||
import inviteUserToTeam from '../../util/teams/invite-user-to-team';
|
||||
import { isAPIError } from '../../util/errors-ts';
|
||||
import { errorToString, isError } from '../../util/is-error';
|
||||
import { errorToString, isError } from '@vercel/error-utils';
|
||||
|
||||
const validateEmail = (data: string) =>
|
||||
regexEmail.test(data.trim()) || data.length === 0;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
import { isErrnoException, isError, errorToString } from './util/is-error';
|
||||
import { isErrnoException, isError, errorToString } from '@vercel/error-utils';
|
||||
|
||||
try {
|
||||
// Test to see if cwd has been deleted before
|
||||
|
||||
@@ -16,7 +16,7 @@ import { VERCEL_DIR } from '../projects/link';
|
||||
import { Output } from '../output';
|
||||
import readJSONFile from '../read-json-file';
|
||||
import { CantParseJSONFile } from '../errors-ts';
|
||||
import { errorToString, isErrnoException, isError } from '../is-error';
|
||||
import { errorToString, isErrnoException, isError } from '@vercel/error-utils';
|
||||
import cmd from '../output/cmd';
|
||||
import code from '../output/code';
|
||||
|
||||
|
||||
@@ -2,7 +2,7 @@ import { readFileSync } from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import Client from '../client';
|
||||
import { Cert } from '../../types';
|
||||
import { isErrnoException } from '../is-error';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
import { isAPIError } from '../errors-ts';
|
||||
|
||||
export default async function createCertFromFile(
|
||||
|
||||
@@ -2,7 +2,7 @@ import retry from 'async-retry';
|
||||
import { Cert } from '../../types';
|
||||
import Client from '../client';
|
||||
import { isAPIError } from '../errors-ts';
|
||||
import { isError } from '../is-error';
|
||||
import { isError } from '@vercel/error-utils';
|
||||
|
||||
// When it's a configuration error we should retry because of the DNS propagation
|
||||
// otherwise we bail to handle the error in the upper level
|
||||
|
||||
@@ -23,7 +23,7 @@ import type {
|
||||
} from '../types';
|
||||
import { sharedPromise } from './promise';
|
||||
import { APIError } from './errors-ts';
|
||||
import { normalizeError } from './is-error';
|
||||
import { normalizeError } from '@vercel/error-utils';
|
||||
|
||||
const isSAMLError = (v: any): v is SAMLError => {
|
||||
return v && v.saml;
|
||||
|
||||
@@ -10,7 +10,7 @@ import error from '../output/error';
|
||||
import highlight from '../output/highlight';
|
||||
import { VercelConfig } from '../dev/types';
|
||||
import { AuthConfig, GlobalConfig } from '../../types';
|
||||
import { isErrnoException, isError } from '../is-error';
|
||||
import { isErrnoException, isError } from '@vercel/error-utils';
|
||||
|
||||
const VERCEL_DIR = getGlobalPathConfig();
|
||||
const CONFIG_FILE_PATH = join(VERCEL_DIR, 'config.json');
|
||||
|
||||
@@ -5,7 +5,7 @@ import git from 'git-last-commit';
|
||||
import { exec } from 'child_process';
|
||||
import { GitMetadata, Project } from '../types';
|
||||
import { Output } from './output';
|
||||
import { errorToString } from './is-error';
|
||||
import { errorToString } from '@vercel/error-utils';
|
||||
|
||||
export async function createGitMeta(
|
||||
directory: string,
|
||||
|
||||
@@ -94,7 +94,7 @@ import {
|
||||
isErrnoException,
|
||||
isError,
|
||||
isSpawnError,
|
||||
} from '../is-error';
|
||||
} from '@vercel/error-utils';
|
||||
import isURL from './is-url';
|
||||
import { pickOverrides } from '../projects/project-settings';
|
||||
import { replaceLocalhost } from './parse-listen';
|
||||
@@ -1448,7 +1448,9 @@ export default class DevServer {
|
||||
}
|
||||
);
|
||||
|
||||
if (middlewareRes.status === 500) {
|
||||
const middlewareBody = await middlewareRes.buffer();
|
||||
|
||||
if (middlewareRes.status === 500 && middlewareBody.byteLength === 0) {
|
||||
await this.sendError(
|
||||
req,
|
||||
res,
|
||||
@@ -1493,7 +1495,6 @@ export default class DevServer {
|
||||
}
|
||||
|
||||
if (!shouldContinue) {
|
||||
const middlewareBody = await middlewareRes.buffer();
|
||||
this.setResponseHeaders(res, requestId);
|
||||
if (middlewareBody.length > 0) {
|
||||
res.setHeader('content-length', middlewareBody.length);
|
||||
|
||||
2
packages/cli/src/util/env/known-error.ts
vendored
2
packages/cli/src/util/env/known-error.ts
vendored
@@ -1,4 +1,4 @@
|
||||
import { isErrnoException } from '../is-error';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
|
||||
const knownErrorsCodes = new Set([
|
||||
'PAYMENT_REQUIRED',
|
||||
|
||||
@@ -5,7 +5,7 @@ import { NowError } from './now-error';
|
||||
import code from './output/code';
|
||||
import { getCommandName } from './pkg-name';
|
||||
import chalk from 'chalk';
|
||||
import { isError } from './is-error';
|
||||
import { isError } from '@vercel/error-utils';
|
||||
|
||||
/**
|
||||
* This error is thrown when there is an API error with a payload. The error
|
||||
|
||||
@@ -10,7 +10,7 @@ import humanizePath from './humanize-path';
|
||||
import readJSONFile from './read-json-file';
|
||||
import { VercelConfig } from './dev/types';
|
||||
import { Output } from './output';
|
||||
import { isErrnoException } from './is-error';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
|
||||
let config: VercelConfig;
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import executeLogin from './login';
|
||||
import Client from '../client';
|
||||
import { LoginResult } from './types';
|
||||
import { isAPIError } from '../errors-ts';
|
||||
import { errorToString } from '../is-error';
|
||||
import { errorToString } from '@vercel/error-utils';
|
||||
|
||||
export default async function doEmailLogin(
|
||||
client: Client,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import Client from '../client';
|
||||
import { InvalidEmail, AccountNotFound, isAPIError } from '../errors-ts';
|
||||
import { errorToString } from '../is-error';
|
||||
import { errorToString } from '@vercel/error-utils';
|
||||
import { LoginData } from './types';
|
||||
|
||||
export default async function login(
|
||||
|
||||
@@ -2,7 +2,7 @@ import chalk from 'chalk';
|
||||
import renderLink from './link';
|
||||
import wait, { StopSpinner } from './wait';
|
||||
import type { WritableTTY } from '../../types';
|
||||
import { errorToString } from '../is-error';
|
||||
import { errorToString } from '@vercel/error-utils';
|
||||
|
||||
const IS_TEST = process.env.NODE_ENV === 'test';
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ import { prependEmoji, emoji, EmojiLabel } from '../emoji';
|
||||
import { isDirectory } from '../config/global-path';
|
||||
import { NowBuildError, getPlatformEnv } from '@vercel/build-utils';
|
||||
import outputCode from '../output/code';
|
||||
import { isErrnoException, isError } from '../is-error';
|
||||
import { isErrnoException, isError } from '@vercel/error-utils';
|
||||
|
||||
const readFile = promisify(fs.readFile);
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import Client from './client';
|
||||
import getScope from './get-scope';
|
||||
import getArgs from './get-args';
|
||||
import { isError } from './is-error';
|
||||
import { isError } from '@vercel/error-utils';
|
||||
import type { Team, User } from '../types';
|
||||
|
||||
export default async function reportError(
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "latest",
|
||||
"react": "^17.0.0",
|
||||
"react-dom": "^17.0.0"
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1 +1 @@
|
||||
export default () => new Response(null, { status: 500 });
|
||||
export default () => new Response('Example Error', { status: 500 });
|
||||
|
||||
@@ -551,7 +551,7 @@ test(
|
||||
test(
|
||||
'[vercel dev] Middleware with an explicit 500 response',
|
||||
testFixtureStdio('middleware-500-response', async (testPath: any) => {
|
||||
await testPath(500, '/', /EDGE_FUNCTION_INVOCATION_FAILED/);
|
||||
await testPath(500, '/', 'Example Error');
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "12.2.13",
|
||||
"version": "12.2.14",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -43,8 +43,8 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/routing-utils": "2.0.2",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/routing-utils": "2.1.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/edge",
|
||||
"version": "0.0.5",
|
||||
"version": "0.1.0",
|
||||
"license": "MIT",
|
||||
"main": "dist/index.js",
|
||||
"module": "dist/index.mjs",
|
||||
|
||||
13
packages/error-utils/jest.config.js
Normal file
13
packages/error-utils/jest.config.js
Normal file
@@ -0,0 +1,13 @@
|
||||
/** @type {import('@ts-jest/dist/types').InitialOptionsTsJest} */
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
coverageThreshold: {
|
||||
global: {
|
||||
branches: 100,
|
||||
functions: 100,
|
||||
lines: 100,
|
||||
statements: 100,
|
||||
},
|
||||
},
|
||||
};
|
||||
26
packages/error-utils/package.json
Normal file
26
packages/error-utils/package.json
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"name": "@vercel/error-utils",
|
||||
"version": "1.0.1",
|
||||
"private": true,
|
||||
"description": "A collection of error utilities for vercel/vercel",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
"directory": "packages/error-utils"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "jest --coverage --env node --verbose",
|
||||
"test-unit": "yarn test"
|
||||
},
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"@types/jest": "29.2.1",
|
||||
"@types/node": "16.11.7",
|
||||
"jest": "29.2.2",
|
||||
"ts-jest": "29.0.3",
|
||||
"typescript": "^4.8.4"
|
||||
}
|
||||
}
|
||||
@@ -25,7 +25,6 @@ export const isError = (error: unknown): error is Error => {
|
||||
// Walk the prototype tree until we find a matching object.
|
||||
while (error) {
|
||||
if (Object.prototype.toString.call(error) === '[object Error]') return true;
|
||||
// eslint-disable-next-line no-param-reassign -- TODO: Fix eslint error following @vercel/style-guide migration
|
||||
error = Object.getPrototypeOf(error);
|
||||
}
|
||||
|
||||
148
packages/error-utils/test/index.test.ts
vendored
Normal file
148
packages/error-utils/test/index.test.ts
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
import fs from 'node:fs';
|
||||
import {
|
||||
isObject,
|
||||
isError,
|
||||
isErrnoException,
|
||||
isErrorLike,
|
||||
normalizeError,
|
||||
isSpawnError,
|
||||
errorToString,
|
||||
} from '../src';
|
||||
|
||||
const ARRAY: any[] = [];
|
||||
const BIGINT = 1n;
|
||||
const BOOLEAN = true;
|
||||
const FUNCTION = () => {};
|
||||
const NULL = null;
|
||||
const NUMBER = 0;
|
||||
const OBJECT = {};
|
||||
const STRING = '';
|
||||
const SYMBOL = Symbol('');
|
||||
const UNDEFINED = undefined;
|
||||
|
||||
class CLASS {} // `CLASS` is a function and `new CLASS()` is an Object
|
||||
|
||||
test('isObject returns true for objects only', () => {
|
||||
for (const item of [ARRAY, new CLASS(), OBJECT]) {
|
||||
expect(isObject(item)).toBe(true);
|
||||
}
|
||||
for (const item of [
|
||||
BIGINT,
|
||||
BOOLEAN,
|
||||
CLASS,
|
||||
FUNCTION,
|
||||
NULL,
|
||||
NUMBER,
|
||||
STRING,
|
||||
SYMBOL,
|
||||
UNDEFINED,
|
||||
]) {
|
||||
expect(isObject(item)).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
test('isError returns true for Error instances only', () => {
|
||||
for (const error of [
|
||||
new Error(),
|
||||
new EvalError(),
|
||||
new RangeError(),
|
||||
new ReferenceError(),
|
||||
new SyntaxError(),
|
||||
new TypeError(),
|
||||
new URIError(),
|
||||
]) {
|
||||
expect(isError(error)).toBe(true);
|
||||
}
|
||||
for (const item of [
|
||||
ARRAY,
|
||||
BIGINT,
|
||||
BOOLEAN,
|
||||
CLASS,
|
||||
new CLASS(),
|
||||
FUNCTION,
|
||||
NULL,
|
||||
NUMBER,
|
||||
OBJECT,
|
||||
STRING,
|
||||
SYMBOL,
|
||||
UNDEFINED,
|
||||
]) {
|
||||
expect(isError(item)).toBe(false);
|
||||
}
|
||||
});
|
||||
|
||||
test('isError returns true for objects with a nested Error prototype', () => {
|
||||
class Foo {}
|
||||
const err = new Error();
|
||||
Object.setPrototypeOf(err, Foo.prototype);
|
||||
expect(isError(err)).toBe(true);
|
||||
});
|
||||
|
||||
test('isErrnoException returns true for NodeJS.ErrnoException only', () => {
|
||||
try {
|
||||
fs.statSync('./i-definitely-do-not-exist');
|
||||
fail();
|
||||
} catch (err) {
|
||||
expect(isErrnoException(err)).toBe(true);
|
||||
}
|
||||
});
|
||||
|
||||
test('isErrorLike returns true when object is like an error', () => {
|
||||
expect(isErrorLike(new Error())).toBe(true);
|
||||
expect(isErrorLike({ message: '' })).toBe(true);
|
||||
expect(isErrorLike({})).toBe(false);
|
||||
});
|
||||
|
||||
describe('errorToString', () => {
|
||||
const message = 'message';
|
||||
test('return `message` when first argument is an error', () => {
|
||||
expect(errorToString(new Error(message))).toStrictEqual(message);
|
||||
});
|
||||
test('returns `message` when first argument is error like', () => {
|
||||
expect(errorToString({ message })).toStrictEqual(message);
|
||||
});
|
||||
test('returns first argument when it is a string', () => {
|
||||
expect(errorToString(message)).toStrictEqual(message);
|
||||
});
|
||||
test('returns second argument when first argument is not an error, error like, nor a string', () => {
|
||||
expect(errorToString(null, message)).toStrictEqual(message);
|
||||
});
|
||||
test('returns default fallback message when first argument is not an error, error like, nor a string, and the second argument is not provided', () => {
|
||||
expect(errorToString(null)).toStrictEqual('An unknown error has ocurred.');
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizeError', () => {
|
||||
const message = 'message';
|
||||
test('returns first argument if it is an error', () => {
|
||||
expect(normalizeError(new Error(message))).toStrictEqual(
|
||||
new Error(message)
|
||||
);
|
||||
});
|
||||
test('returns a new error if argument is not error like', () => {
|
||||
expect(normalizeError(message)).toStrictEqual(new Error(message));
|
||||
});
|
||||
test('returns a new error if argument is not error like', () => {
|
||||
expect(normalizeError({ message })).toStrictEqual(new Error(message));
|
||||
});
|
||||
test('returns a new error with fallback message if argument is not error like nor a string.', () => {
|
||||
expect(normalizeError(null)).toStrictEqual(
|
||||
new Error('An unknown error has ocurred.')
|
||||
);
|
||||
});
|
||||
test('returns an Error with the input object assigned to it', () => {
|
||||
expect(normalizeError({ message, prop: 'value' })).toStrictEqual(
|
||||
Object.assign(new Error(message), { prop: 'value' })
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('isSpawnError', () => {
|
||||
const spawnError = new Error('spawn error');
|
||||
Object.assign(spawnError, {
|
||||
code: 'SPAWN_ERROR',
|
||||
spawnargs: ['a', 'b', 'c'],
|
||||
});
|
||||
expect(isSpawnError(spawnError)).toBe(true);
|
||||
expect(isSpawnError(new Error('not spawn error'))).toBe(false);
|
||||
});
|
||||
7
packages/error-utils/test/tsconfig.json
vendored
Normal file
7
packages/error-utils/test/tsconfig.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"types": ["node", "jest"]
|
||||
},
|
||||
"include": ["*.test.ts"]
|
||||
}
|
||||
22
packages/error-utils/tsconfig.json
Normal file
22
packages/error-utils/tsconfig.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"declarationMap": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "./dist",
|
||||
"types": ["node", "jest"],
|
||||
"strict": true,
|
||||
"sourceMap": true,
|
||||
"target": "ES2020"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "1.1.8",
|
||||
"version": "1.1.9",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "2.0.2",
|
||||
"@vercel/routing-utils": "2.1.0",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/fs-detectors",
|
||||
"version": "3.4.5",
|
||||
"version": "3.4.6",
|
||||
"description": "Vercel filesystem detectors",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
@@ -19,8 +19,8 @@
|
||||
"test-unit": "yarn test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/frameworks": "1.1.8",
|
||||
"@vercel/routing-utils": "2.0.2",
|
||||
"@vercel/frameworks": "1.1.9",
|
||||
"@vercel/routing-utils": "2.1.0",
|
||||
"glob": "8.0.3",
|
||||
"js-yaml": "4.1.0",
|
||||
"json5": "2.2.1",
|
||||
|
||||
@@ -66,7 +66,7 @@ export const monorepoManagers: Array<
|
||||
value: null,
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: 'Nx default',
|
||||
value: null,
|
||||
},
|
||||
installCommand: {
|
||||
value: null,
|
||||
@@ -93,7 +93,7 @@ export const monorepoManagers: Array<
|
||||
value: null,
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: 'Rush default',
|
||||
value: null,
|
||||
},
|
||||
installCommand: {
|
||||
placeholder: 'Rush default',
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "2.2.13",
|
||||
"version": "2.2.14",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -35,7 +35,7 @@
|
||||
"@types/jest": "28.1.6",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/hydrogen",
|
||||
"version": "0.0.26",
|
||||
"version": "0.0.27",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -21,8 +21,8 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/static-config": "2.0.3",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/static-config": "2.0.4",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "3.2.6",
|
||||
"version": "3.2.7",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -44,9 +44,9 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/nft": "0.22.1",
|
||||
"@vercel/routing-utils": "2.0.2",
|
||||
"@vercel/routing-utils": "2.1.0",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
"cheerio": "1.0.0-rc.10",
|
||||
|
||||
@@ -338,6 +338,7 @@ export async function serverBuild({
|
||||
|
||||
const apiPages: string[] = [];
|
||||
const nonApiPages: string[] = [];
|
||||
const streamingPages: string[] = [];
|
||||
|
||||
lambdaPageKeys.forEach(page => {
|
||||
if (
|
||||
@@ -359,6 +360,8 @@ export async function serverBuild({
|
||||
|
||||
if (pageMatchesApi(page)) {
|
||||
apiPages.push(page);
|
||||
} else if (appDir && lambdaAppPaths[page]) {
|
||||
streamingPages.push(page);
|
||||
} else {
|
||||
nonApiPages.push(page);
|
||||
}
|
||||
@@ -546,7 +549,12 @@ export async function serverBuild({
|
||||
const compressedPages: {
|
||||
[page: string]: PseudoFile;
|
||||
} = {};
|
||||
const mergedPageKeys = [...nonApiPages, ...apiPages, ...internalPages];
|
||||
const mergedPageKeys = [
|
||||
...nonApiPages,
|
||||
...streamingPages,
|
||||
...apiPages,
|
||||
...internalPages,
|
||||
];
|
||||
const traceCache = {};
|
||||
|
||||
const getOriginalPagePath = (page: string) => {
|
||||
@@ -704,6 +712,27 @@ export async function serverBuild({
|
||||
pageExtensions,
|
||||
});
|
||||
|
||||
const streamingPageLambdaGroups = await getPageLambdaGroups({
|
||||
entryPath: requiredServerFilesManifest.appDir || entryPath,
|
||||
config,
|
||||
pages: streamingPages,
|
||||
prerenderRoutes,
|
||||
pageTraces,
|
||||
compressedPages,
|
||||
tracedPseudoLayer: tracedPseudoLayer.pseudoLayer,
|
||||
initialPseudoLayer,
|
||||
lambdaCompressedByteLimit,
|
||||
initialPseudoLayerUncompressed: uncompressedInitialSize,
|
||||
internalPages,
|
||||
pageExtensions,
|
||||
});
|
||||
|
||||
for (const group of streamingPageLambdaGroups) {
|
||||
if (!group.isPrerenders) {
|
||||
group.isStreaming = true;
|
||||
}
|
||||
}
|
||||
|
||||
const apiLambdaGroups = await getPageLambdaGroups({
|
||||
entryPath: requiredServerFilesManifest.appDir || entryPath,
|
||||
config,
|
||||
@@ -733,13 +762,23 @@ export async function serverBuild({
|
||||
pseudoLayerBytes: group.pseudoLayerBytes,
|
||||
uncompressedLayerBytes: group.pseudoLayerUncompressedBytes,
|
||||
})),
|
||||
streamingPageLambdaGroups: streamingPageLambdaGroups.map(group => ({
|
||||
pages: group.pages,
|
||||
isPrerender: group.isPrerenders,
|
||||
pseudoLayerBytes: group.pseudoLayerBytes,
|
||||
uncompressedLayerBytes: group.pseudoLayerUncompressedBytes,
|
||||
})),
|
||||
nextServerLayerSize: initialPseudoLayer.pseudoLayerBytes,
|
||||
},
|
||||
null,
|
||||
2
|
||||
)
|
||||
);
|
||||
const combinedGroups = [...pageLambdaGroups, ...apiLambdaGroups];
|
||||
const combinedGroups = [
|
||||
...pageLambdaGroups,
|
||||
...streamingPageLambdaGroups,
|
||||
...apiLambdaGroups,
|
||||
];
|
||||
|
||||
await detectLambdaLimitExceeding(
|
||||
combinedGroups,
|
||||
@@ -832,6 +871,7 @@ export async function serverBuild({
|
||||
memory: group.memory,
|
||||
runtime: nodeVersion.runtime,
|
||||
maxDuration: group.maxDuration,
|
||||
isStreaming: group.isStreaming,
|
||||
});
|
||||
|
||||
for (const page of group.pages) {
|
||||
|
||||
@@ -748,6 +748,7 @@ export async function createPseudoLayer(files: {
|
||||
|
||||
interface CreateLambdaFromPseudoLayersOptions extends LambdaOptionsWithFiles {
|
||||
layers: PseudoLayer[];
|
||||
isStreaming?: boolean;
|
||||
}
|
||||
|
||||
// measured with 1, 2, 5, 10, and `os.cpus().length || 5`
|
||||
@@ -757,6 +758,7 @@ const createLambdaSema = new Sema(1);
|
||||
export async function createLambdaFromPseudoLayers({
|
||||
files: baseFiles,
|
||||
layers,
|
||||
isStreaming,
|
||||
...lambdaOptions
|
||||
}: CreateLambdaFromPseudoLayersOptions) {
|
||||
await createLambdaSema.acquire();
|
||||
@@ -791,6 +793,11 @@ export async function createLambdaFromPseudoLayers({
|
||||
|
||||
return new NodejsLambda({
|
||||
...lambdaOptions,
|
||||
...(isStreaming
|
||||
? {
|
||||
experimentalResponseStreaming: true,
|
||||
}
|
||||
: {}),
|
||||
files,
|
||||
shouldAddHelpers: false,
|
||||
shouldAddSourcemapSupport: false,
|
||||
@@ -1273,6 +1280,7 @@ export type LambdaGroup = {
|
||||
pages: string[];
|
||||
memory?: number;
|
||||
maxDuration?: number;
|
||||
isStreaming?: boolean;
|
||||
isPrerenders?: boolean;
|
||||
pseudoLayer: PseudoLayer;
|
||||
pseudoLayerBytes: number;
|
||||
@@ -2437,6 +2445,12 @@ export async function getMiddlewareBundle({
|
||||
shortPath.replace(/^app\//, '').replace(/(^|\/)page$/, '') || 'index';
|
||||
}
|
||||
|
||||
if (routesManifest?.basePath) {
|
||||
shortPath = path.posix
|
||||
.join(routesManifest.basePath, shortPath)
|
||||
.replace(/^\//, '');
|
||||
}
|
||||
|
||||
worker.edgeFunction.name = shortPath;
|
||||
source.edgeFunctions[shortPath] = worker.edgeFunction;
|
||||
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
'use client';
|
||||
|
||||
export default function LazyComponent() {
|
||||
return (
|
||||
<>
|
||||
<p>hello from lazy</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { ClientComponent } from './test.js';
|
||||
|
||||
export default function DashboardIndexPage() {
|
||||
return (
|
||||
<>
|
||||
<p>hello from app/dashboard/index</p>
|
||||
<ClientComponent />
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { useState, lazy } from 'react';
|
||||
|
||||
const Lazy = lazy(() => import('./lazy.js'));
|
||||
|
||||
export function ClientComponent() {
|
||||
let [state] = useState('use client');
|
||||
return (
|
||||
<>
|
||||
<Lazy />
|
||||
<p className="hi">hello from modern the {state}</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
'use client';
|
||||
|
||||
export default function LazyComponent() {
|
||||
return (
|
||||
<>
|
||||
<p>hello from lazy</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { ClientComponent } from './test.js';
|
||||
|
||||
export default function DashboardIndexPage() {
|
||||
return (
|
||||
<>
|
||||
<p>hello from app/dashboard/index</p>
|
||||
<ClientComponent />
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
'use client';
|
||||
|
||||
import { useState, lazy } from 'react';
|
||||
|
||||
const Lazy = lazy(() => import('./lazy.js'));
|
||||
|
||||
export function ClientComponent() {
|
||||
let [state] = useState('use client');
|
||||
return (
|
||||
<>
|
||||
<Lazy />
|
||||
<p className="hi">hello from modern the {state}</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
@@ -7,77 +7,80 @@ const runBuildLambda = require('../../../../test/lib/run-build-lambda');
|
||||
|
||||
jest.setTimeout(360000);
|
||||
|
||||
it('should build with app-dir correctly', async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, '../fixtures/00-app-dir')
|
||||
);
|
||||
// experimental appDir currently requires Node.js >= 16
|
||||
if (parseInt(process.versions.node.split('.')[0], 10) >= 16) {
|
||||
it('should build with app-dir correctly', async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, '../fixtures/00-app-dir')
|
||||
);
|
||||
|
||||
const lambdas = new Set();
|
||||
const lambdas = new Set();
|
||||
|
||||
for (const key of Object.keys(buildResult.output)) {
|
||||
if (buildResult.output[key].type === 'Lambda') {
|
||||
lambdas.add(buildResult.output[key]);
|
||||
for (const key of Object.keys(buildResult.output)) {
|
||||
if (buildResult.output[key].type === 'Lambda') {
|
||||
lambdas.add(buildResult.output[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(lambdas.size).toBe(2);
|
||||
expect(buildResult.output['dashboard']).toBeDefined();
|
||||
expect(buildResult.output['dashboard/another']).toBeDefined();
|
||||
expect(buildResult.output['dashboard/changelog']).toBeDefined();
|
||||
expect(buildResult.output['dashboard/deployments/[id]']).toBeDefined();
|
||||
expect(lambdas.size).toBe(2);
|
||||
expect(buildResult.output['dashboard']).toBeDefined();
|
||||
expect(buildResult.output['dashboard/another']).toBeDefined();
|
||||
expect(buildResult.output['dashboard/changelog']).toBeDefined();
|
||||
expect(buildResult.output['dashboard/deployments/[id]']).toBeDefined();
|
||||
|
||||
// prefixed static generation output with `/app` under dist server files
|
||||
expect(buildResult.output['dashboard'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\.html$/
|
||||
);
|
||||
expect(buildResult.output['dashboard.rsc'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard.rsc'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\.rsc$/
|
||||
);
|
||||
expect(buildResult.output['dashboard/index/index'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard/index/index'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\/index\.html$/
|
||||
);
|
||||
expect(buildResult.output['dashboard/index.rsc'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard/index.rsc'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\/index\.rsc$/
|
||||
);
|
||||
});
|
||||
// prefixed static generation output with `/app` under dist server files
|
||||
expect(buildResult.output['dashboard'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\.html$/
|
||||
);
|
||||
expect(buildResult.output['dashboard.rsc'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard.rsc'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\.rsc$/
|
||||
);
|
||||
expect(buildResult.output['dashboard/index/index'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard/index/index'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\/index\.html$/
|
||||
);
|
||||
expect(buildResult.output['dashboard/index.rsc'].type).toBe('Prerender');
|
||||
expect(buildResult.output['dashboard/index.rsc'].fallback.fsPath).toMatch(
|
||||
/server\/app\/dashboard\/index\.rsc$/
|
||||
);
|
||||
});
|
||||
|
||||
it('should build with app-dir in edge runtime correctly', async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, '../fixtures/00-app-dir-edge')
|
||||
);
|
||||
it('should build with app-dir in edge runtime correctly', async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, '../fixtures/00-app-dir-edge')
|
||||
);
|
||||
|
||||
const edgeFunctions = new Set();
|
||||
const edgeFunctions = new Set();
|
||||
|
||||
for (const key of Object.keys(buildResult.output)) {
|
||||
if (buildResult.output[key].type === 'EdgeFunction') {
|
||||
edgeFunctions.add(buildResult.output[key]);
|
||||
for (const key of Object.keys(buildResult.output)) {
|
||||
if (buildResult.output[key].type === 'EdgeFunction') {
|
||||
edgeFunctions.add(buildResult.output[key]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
expect(edgeFunctions.size).toBe(3);
|
||||
expect(buildResult.output['edge']).toBeDefined();
|
||||
expect(buildResult.output['index']).toBeDefined();
|
||||
expect(buildResult.output['index/index']).toBeDefined();
|
||||
});
|
||||
expect(edgeFunctions.size).toBe(3);
|
||||
expect(buildResult.output['edge']).toBeDefined();
|
||||
expect(buildResult.output['index']).toBeDefined();
|
||||
expect(buildResult.output['index/index']).toBeDefined();
|
||||
});
|
||||
|
||||
it('should show error from basePath with legacy monorepo build', async () => {
|
||||
let error;
|
||||
it('should show error from basePath with legacy monorepo build', async () => {
|
||||
let error;
|
||||
|
||||
try {
|
||||
await runBuildLambda(path.join(__dirname, 'legacy-monorepo-basepath'));
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
console.error(error);
|
||||
try {
|
||||
await runBuildLambda(path.join(__dirname, 'legacy-monorepo-basepath'));
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
console.error(error);
|
||||
|
||||
expect(error.message).toBe(
|
||||
'basePath can not be used with `builds` in vercel.json, use Project Settings to configure your monorepo instead'
|
||||
);
|
||||
});
|
||||
expect(error.message).toBe(
|
||||
'basePath can not be used with `builds` in vercel.json, use Project Settings to configure your monorepo instead'
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
it('should build using server build', async () => {
|
||||
const origLog = console.log;
|
||||
|
||||
@@ -0,0 +1,39 @@
|
||||
const fs = require('fs-extra');
|
||||
const ms = require('ms');
|
||||
const path = require('path');
|
||||
const { build } = require('../../../../dist');
|
||||
const { FileFsRef } = require('@vercel/build-utils');
|
||||
|
||||
jest.setTimeout(ms('6m'));
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should normalize routes in build results output', async () => {
|
||||
const files = [
|
||||
'index.test.js',
|
||||
'next.config.js',
|
||||
'package.json',
|
||||
'tsconfig.json',
|
||||
'pages/api/hello.ts',
|
||||
'pages/index.tsx',
|
||||
].reduce((filesMap, file) => {
|
||||
const fsPath = path.join(__dirname, file);
|
||||
const { mode } = fs.statSync(fsPath);
|
||||
filesMap[path] = new FileFsRef({ mode, fsPath });
|
||||
return filesMap;
|
||||
}, {});
|
||||
|
||||
const { output } = await build({
|
||||
config: {},
|
||||
entrypoint: 'package.json',
|
||||
files,
|
||||
meta: {
|
||||
skipDownload: true,
|
||||
},
|
||||
repoRootPath: __dirname,
|
||||
workPath: __dirname,
|
||||
});
|
||||
|
||||
expect(output).toHaveProperty('test/api/hello');
|
||||
expect(output['test/api/hello'].type).toEqual('EdgeFunction');
|
||||
});
|
||||
});
|
||||
5
packages/next/test/unit/fixtures/02-edge-function-basepath/next-env.d.ts
vendored
Normal file
5
packages/next/test/unit/fixtures/02-edge-function-basepath/next-env.d.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
/// <reference types="next" />
|
||||
/// <reference types="next/image-types/global" />
|
||||
|
||||
// NOTE: This file should not be edited
|
||||
// see https://nextjs.org/docs/basic-features/typescript for more information.
|
||||
@@ -0,0 +1,7 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
reactStrictMode: true,
|
||||
basePath: '/test',
|
||||
};
|
||||
|
||||
module.exports = nextConfig;
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "02-edge-function-basepath",
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "latest",
|
||||
"react": "latest",
|
||||
"react-dom": "latest"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function handler() {
|
||||
return new Response('Hello World!');
|
||||
}
|
||||
@@ -0,0 +1,11 @@
|
||||
import type { NextPage } from 'next'
|
||||
|
||||
const Home: NextPage = () => {
|
||||
return (
|
||||
<div>
|
||||
Home
|
||||
</div>
|
||||
)
|
||||
}
|
||||
|
||||
export default Home
|
||||
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es5",
|
||||
"lib": ["dom", "dom.iterable", "esnext"],
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"strict": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"noEmit": true,
|
||||
"esModuleInterop": true,
|
||||
"module": "esnext",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"isolatedModules": true,
|
||||
"jsx": "preserve",
|
||||
"incremental": true
|
||||
},
|
||||
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -0,0 +1,191 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
"@next/env@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-13.0.1.tgz#0361e203c7bfbc7b69679ec48f7b45a8f4cb1c2c"
|
||||
integrity sha512-gK60YoFae3s8qi5UgIzbvxOhsh5gKyEaiKH5+kLBUYXLlrPyWJR2xKBj2WqvHkO7wDX7/Hed3DAqjSpU4ijIvQ==
|
||||
|
||||
"@next/swc-android-arm-eabi@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-13.0.1.tgz#7ce2a7b6576845bc6d7f55504bf9b82a0d9a2792"
|
||||
integrity sha512-M28QSbohZlNXNn//HY6lV2T3YaMzG58Jwr0YwOdVmOQv6i+7lu6xe3GqQu4kdqInqhLrBXnL+nabFuGTVSHtTg==
|
||||
|
||||
"@next/swc-android-arm64@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-android-arm64/-/swc-android-arm64-13.0.1.tgz#85a13d7667042394939741be218076e4e83a45a2"
|
||||
integrity sha512-szmO/i6GoHcPXcbhUKhwBMETWHNXH3ITz9wfxwOOFBNKdDU8pjKsHL88lg28aOiQYZSU1sxu1v1p9KY5kJIZCg==
|
||||
|
||||
"@next/swc-darwin-arm64@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-13.0.1.tgz#d615d286127bb096a8950a9d7180fcc5d307614d"
|
||||
integrity sha512-O1RxCaiDNOjGZmdAp6SQoHUITt9aVDQXoR3lZ/TloI/NKRAyAV4u0KUUofK+KaZeHOmVTnPUaQuCyZSc3i1x5Q==
|
||||
|
||||
"@next/swc-darwin-x64@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-13.0.1.tgz#f410beb8cbe0e82562226309f8ec8924cc6cb410"
|
||||
integrity sha512-8E6BY/VO+QqQkthhoWgB8mJMw1NcN9Vhl2OwEwxv8jy2r3zjeU+WNRxz4y8RLbcY0R1h+vHlXuP0mLnuac84tQ==
|
||||
|
||||
"@next/swc-freebsd-x64@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-freebsd-x64/-/swc-freebsd-x64-13.0.1.tgz#16eb9652d3f638305ca16b558408f5bc5eb6edde"
|
||||
integrity sha512-ocwoOxm2KVwF50RyoAT+2RQPLlkyoF7sAqzMUVgj+S6+DTkY3iwH+Zpo0XAk2pnqT9qguOrKnEpq9EIx//+K7Q==
|
||||
|
||||
"@next/swc-linux-arm-gnueabihf@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-13.0.1.tgz#0da0700ccf654f813b4c86d057a998598a2fd427"
|
||||
integrity sha512-yO7e3zITfGol/N6lPQnmIRi0WyuILBMXrvH6EdmWzzqMDJFfTCII6l+B6gMO5WVDCTQUGQlQRNZ7sFqWR4I71g==
|
||||
|
||||
"@next/swc-linux-arm64-gnu@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-13.0.1.tgz#f34759cd41086f5b8b582081b2af54f67dc544ae"
|
||||
integrity sha512-OEs6WDPDI8RyM8SjOqTDMqMBfOlU97VnW6ZMXUvzUTyH0K9c7NF+cn7UMu+I4tKFN0uJ9WQs/6TYaFBGkgoVVA==
|
||||
|
||||
"@next/swc-linux-arm64-musl@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-13.0.1.tgz#bcfbf1cdfb9f4d632e7ebd67fd62b768cdd08cb7"
|
||||
integrity sha512-y5ypFK0Y3urZSFoQxbtDqvKsBx026sz+Fm+xHlPWlGHNZrbs3Q812iONjcZTo09QwRMk5X86iMWBRxV18xMhaw==
|
||||
|
||||
"@next/swc-linux-x64-gnu@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-13.0.1.tgz#ed77528d4a3195d5e57d5d94d12cb2206c2b19ac"
|
||||
integrity sha512-XDIHEE6SU8VCF+dUVntD6PDv6RK31N0forx9kucZBYirbe8vCZ+Yx8hYgvtIaGrTcWtGxibxmND0pIuHDq8H5g==
|
||||
|
||||
"@next/swc-linux-x64-musl@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-13.0.1.tgz#74cda49229d2a7fa421fee6b7dcd621a57934a5e"
|
||||
integrity sha512-yxIOuuz5EOx0F1FDtsyzaLgnDym0Ysxv8CWeJyDTKKmt9BVyITg6q/cD+RP9bEkT1TQi+PYXIMATSz675Q82xw==
|
||||
|
||||
"@next/swc-win32-arm64-msvc@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-13.0.1.tgz#15d6add92aa897148d6c45749bf9d2eacee87197"
|
||||
integrity sha512-+ucLe2qgQzP+FM94jD4ns6LDGyMFaX9k3lVHqu/tsQCy2giMymbport4y4p77mYcXEMlDaHMzlHgOQyHRniWFA==
|
||||
|
||||
"@next/swc-win32-ia32-msvc@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-13.0.1.tgz#e0c57902fe75327d092abb1ef19657775fe26f85"
|
||||
integrity sha512-Krr/qGN7OB35oZuvMAZKoXDt2IapynIWLh5A5rz6AODb7f/ZJqyAuZSK12vOa2zKdobS36Qm4IlxxBqn9c00MA==
|
||||
|
||||
"@next/swc-win32-x64-msvc@13.0.1":
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-13.0.1.tgz#469dde61519f6a310874af93ee5969f1d5ff6d03"
|
||||
integrity sha512-t/0G33t/6VGWZUGCOT7rG42qqvf/x+MrFp1CU+8CN6PrjSSL57R5bqkXfubV9t4eCEnUxVP+5Hn3MoEXEebtEw==
|
||||
|
||||
"@swc/helpers@0.4.11":
|
||||
version "0.4.11"
|
||||
resolved "https://registry.yarnpkg.com/@swc/helpers/-/helpers-0.4.11.tgz#db23a376761b3d31c26502122f349a21b592c8de"
|
||||
integrity sha512-rEUrBSGIoSFuYxwBYtlUFMlE2CwGhmW+w9355/5oduSw8e5h2+Tj4UrAGNNgP9915++wj5vkQo0UuOBqOAq4nw==
|
||||
dependencies:
|
||||
tslib "^2.4.0"
|
||||
|
||||
caniuse-lite@^1.0.30001406:
|
||||
version "1.0.30001429"
|
||||
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001429.tgz#70cdae959096756a85713b36dd9cb82e62325639"
|
||||
integrity sha512-511ThLu1hF+5RRRt0zYCf2U2yRr9GPF6m5y90SBCWsvSoYoW7yAGlv/elyPaNfvGCkp6kj/KFZWU0BMA69Prsg==
|
||||
|
||||
client-only@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/client-only/-/client-only-0.0.1.tgz#38bba5d403c41ab150bff64a95c85013cf73bca1"
|
||||
integrity sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==
|
||||
|
||||
"js-tokens@^3.0.0 || ^4.0.0":
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
|
||||
integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
|
||||
|
||||
loose-envify@^1.1.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf"
|
||||
integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==
|
||||
dependencies:
|
||||
js-tokens "^3.0.0 || ^4.0.0"
|
||||
|
||||
nanoid@^3.3.4:
|
||||
version "3.3.4"
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.4.tgz#730b67e3cd09e2deacf03c027c81c9d9dbc5e8ab"
|
||||
integrity sha512-MqBkQh/OHTS2egovRtLk45wEyNXwF+cokD+1YPf9u5VfJiRdAiRwB2froX5Co9Rh20xs4siNPm8naNotSD6RBw==
|
||||
|
||||
next@latest:
|
||||
version "13.0.1"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-13.0.1.tgz#8b4fc9998e58f503bdecb92f06fe6f850ac260d0"
|
||||
integrity sha512-ErCNBPIeZMKFn6hX+ZBSlqZVgJIeitEqhGTuQUNmYXJ07/A71DZ7AJI8eyHYUdBb686LUpV1/oBdTq9RpzRVPg==
|
||||
dependencies:
|
||||
"@next/env" "13.0.1"
|
||||
"@swc/helpers" "0.4.11"
|
||||
caniuse-lite "^1.0.30001406"
|
||||
postcss "8.4.14"
|
||||
styled-jsx "5.1.0"
|
||||
use-sync-external-store "1.2.0"
|
||||
optionalDependencies:
|
||||
"@next/swc-android-arm-eabi" "13.0.1"
|
||||
"@next/swc-android-arm64" "13.0.1"
|
||||
"@next/swc-darwin-arm64" "13.0.1"
|
||||
"@next/swc-darwin-x64" "13.0.1"
|
||||
"@next/swc-freebsd-x64" "13.0.1"
|
||||
"@next/swc-linux-arm-gnueabihf" "13.0.1"
|
||||
"@next/swc-linux-arm64-gnu" "13.0.1"
|
||||
"@next/swc-linux-arm64-musl" "13.0.1"
|
||||
"@next/swc-linux-x64-gnu" "13.0.1"
|
||||
"@next/swc-linux-x64-musl" "13.0.1"
|
||||
"@next/swc-win32-arm64-msvc" "13.0.1"
|
||||
"@next/swc-win32-ia32-msvc" "13.0.1"
|
||||
"@next/swc-win32-x64-msvc" "13.0.1"
|
||||
|
||||
picocolors@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.0.0.tgz#cb5bdc74ff3f51892236eaf79d68bc44564ab81c"
|
||||
integrity sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==
|
||||
|
||||
postcss@8.4.14:
|
||||
version "8.4.14"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.14.tgz#ee9274d5622b4858c1007a74d76e42e56fd21caf"
|
||||
integrity sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==
|
||||
dependencies:
|
||||
nanoid "^3.3.4"
|
||||
picocolors "^1.0.0"
|
||||
source-map-js "^1.0.2"
|
||||
|
||||
react-dom@latest:
|
||||
version "18.2.0"
|
||||
resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.2.0.tgz#22aaf38708db2674ed9ada224ca4aa708d821e3d"
|
||||
integrity sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==
|
||||
dependencies:
|
||||
loose-envify "^1.1.0"
|
||||
scheduler "^0.23.0"
|
||||
|
||||
react@latest:
|
||||
version "18.2.0"
|
||||
resolved "https://registry.yarnpkg.com/react/-/react-18.2.0.tgz#555bd98592883255fa00de14f1151a917b5d77d5"
|
||||
integrity sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==
|
||||
dependencies:
|
||||
loose-envify "^1.1.0"
|
||||
|
||||
scheduler@^0.23.0:
|
||||
version "0.23.0"
|
||||
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.23.0.tgz#ba8041afc3d30eb206a487b6b384002e4e61fdfe"
|
||||
integrity sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==
|
||||
dependencies:
|
||||
loose-envify "^1.1.0"
|
||||
|
||||
source-map-js@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.0.2.tgz#adbc361d9c62df380125e7f161f71c826f1e490c"
|
||||
integrity sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==
|
||||
|
||||
styled-jsx@5.1.0:
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.1.0.tgz#4a5622ab9714bd3fcfaeec292aa555871f057563"
|
||||
integrity sha512-/iHaRJt9U7T+5tp6TRelLnqBqiaIT0HsO0+vgyj8hK2KUk7aejFqRrumqPUlAqDwAj8IbS/1hk3IhBAAK/FCUQ==
|
||||
dependencies:
|
||||
client-only "0.0.1"
|
||||
|
||||
tslib@^2.4.0:
|
||||
version "2.4.1"
|
||||
resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.1.tgz#0d0bfbaac2880b91e22df0768e55be9753a5b17e"
|
||||
integrity sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==
|
||||
|
||||
use-sync-external-store@1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz#7dbefd6ef3fe4e767a0cf5d7287aacfb5846928a"
|
||||
integrity sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==
|
||||
@@ -1,4 +1,10 @@
|
||||
const { URL } = require('url');
|
||||
const { request } = require('http');
|
||||
const { Socket } = require('net');
|
||||
const { createCipheriv } = require('crypto');
|
||||
const { pipeline, Transform } = require('stream');
|
||||
|
||||
const CRLF = `\r\n`;
|
||||
|
||||
/**
|
||||
* If the `http.Server` handler function throws an error asynchronously,
|
||||
@@ -17,9 +23,23 @@ process.on('unhandledRejection', err => {
|
||||
*/
|
||||
function normalizeProxyEvent(event) {
|
||||
let bodyBuffer;
|
||||
const { method, path, headers, encoding, body, payloads } = JSON.parse(
|
||||
event.body
|
||||
);
|
||||
/**
|
||||
* @type {import('./types').VercelProxyRequest}
|
||||
*/
|
||||
const payload = JSON.parse(event.body);
|
||||
const {
|
||||
method,
|
||||
path,
|
||||
headers,
|
||||
encoding,
|
||||
body,
|
||||
payloads,
|
||||
responseCallbackCipher,
|
||||
responseCallbackCipherIV,
|
||||
responseCallbackCipherKey,
|
||||
responseCallbackStream,
|
||||
responseCallbackUrl,
|
||||
} = payload;
|
||||
|
||||
/**
|
||||
*
|
||||
@@ -28,7 +48,7 @@ function normalizeProxyEvent(event) {
|
||||
*/
|
||||
const normalizeBody = b => {
|
||||
if (b) {
|
||||
if (encoding === 'base64') {
|
||||
if (typeof b === 'string' && encoding === 'base64') {
|
||||
bodyBuffer = Buffer.from(b, encoding);
|
||||
} else if (encoding === undefined) {
|
||||
bodyBuffer = Buffer.from(b);
|
||||
@@ -42,13 +62,9 @@ function normalizeProxyEvent(event) {
|
||||
};
|
||||
|
||||
if (payloads) {
|
||||
/**
|
||||
* @param {{ body: string | Buffer }} payload
|
||||
*/
|
||||
const normalizePayload = payload => {
|
||||
payload.body = normalizeBody(payload.body);
|
||||
};
|
||||
payloads.forEach(normalizePayload);
|
||||
for (const p of payloads) {
|
||||
p.body = normalizeBody(payload.body);
|
||||
}
|
||||
}
|
||||
bodyBuffer = normalizeBody(body);
|
||||
|
||||
@@ -59,6 +75,11 @@ function normalizeProxyEvent(event) {
|
||||
headers,
|
||||
body: bodyBuffer,
|
||||
payloads,
|
||||
responseCallbackCipher,
|
||||
responseCallbackCipherIV,
|
||||
responseCallbackCipherKey,
|
||||
responseCallbackStream,
|
||||
responseCallbackUrl,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -79,11 +100,23 @@ function normalizeAPIGatewayProxyEvent(event) {
|
||||
bodyBuffer = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
return { isApiGateway: true, method, path, headers, body: bodyBuffer };
|
||||
return {
|
||||
body: bodyBuffer,
|
||||
headers,
|
||||
isApiGateway: true,
|
||||
method,
|
||||
path,
|
||||
responseCallbackCipher: undefined,
|
||||
responseCallbackCipherIV: undefined,
|
||||
responseCallbackCipherKey: undefined,
|
||||
responseCallbackStream: undefined,
|
||||
responseCallbackUrl: undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import('./types').VercelProxyEvent | import('aws-lambda').APIGatewayProxyEvent} event
|
||||
* @return {import('./types').VercelProxyRequest}
|
||||
*/
|
||||
function normalizeEvent(event) {
|
||||
if ('Action' in event) {
|
||||
@@ -176,7 +209,7 @@ class Bridge {
|
||||
*
|
||||
* @param {import('./types').VercelProxyEvent | import('aws-lambda').APIGatewayProxyEvent} event
|
||||
* @param {import('aws-lambda').Context} context
|
||||
* @return {Promise<{statusCode: number, headers: import('http').IncomingHttpHeaders, body: string, encoding: 'base64'}>}
|
||||
* @return {Promise<import('./types').VercelProxyResponse>}
|
||||
*/
|
||||
async launcher(event, context) {
|
||||
context.callbackWaitsForEmptyEventLoop = false;
|
||||
@@ -268,6 +301,10 @@ class Bridge {
|
||||
encoding: 'base64',
|
||||
};
|
||||
} else {
|
||||
// TODO We expect this to error as it is possible to resolve to empty.
|
||||
// For now it is not very important as we will only pass
|
||||
// `responseCallbackUrl` in production.
|
||||
// @ts-ignore
|
||||
return this.handleEvent(normalizedEvent);
|
||||
}
|
||||
}
|
||||
@@ -275,11 +312,21 @@ class Bridge {
|
||||
/**
|
||||
*
|
||||
* @param {ReturnType<typeof normalizeEvent>} normalizedEvent
|
||||
* @return {Promise<{statusCode: number, headers: import('http').IncomingHttpHeaders, body: string, encoding: 'base64'}>}
|
||||
* @return {Promise<import('./types').VercelProxyResponse | import('./types').VercelStreamProxyResponse>}
|
||||
*/
|
||||
async handleEvent(normalizedEvent) {
|
||||
const { port } = await this.listening;
|
||||
const { isApiGateway, method, headers, body } = normalizedEvent;
|
||||
const {
|
||||
body,
|
||||
headers,
|
||||
isApiGateway,
|
||||
method,
|
||||
responseCallbackCipher,
|
||||
responseCallbackCipherIV,
|
||||
responseCallbackCipherKey,
|
||||
responseCallbackStream,
|
||||
responseCallbackUrl,
|
||||
} = normalizedEvent;
|
||||
let { path } = normalizedEvent;
|
||||
|
||||
if (this.shouldStoreEvents) {
|
||||
@@ -288,41 +335,42 @@ class Bridge {
|
||||
headers['x-now-bridge-request-id'] = reqId;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new Promise((resolve, reject) => {
|
||||
let socket;
|
||||
let cipher;
|
||||
let url;
|
||||
|
||||
if (responseCallbackUrl) {
|
||||
socket = new Socket();
|
||||
url = new URL(responseCallbackUrl);
|
||||
socket.connect(parseInt(url.port, 10), url.hostname);
|
||||
socket.write(`${responseCallbackStream}${CRLF}`);
|
||||
}
|
||||
|
||||
if (
|
||||
responseCallbackCipher &&
|
||||
responseCallbackCipherKey &&
|
||||
responseCallbackCipherIV
|
||||
) {
|
||||
cipher = createCipheriv(
|
||||
responseCallbackCipher,
|
||||
Buffer.from(responseCallbackCipherKey, 'base64'),
|
||||
Buffer.from(responseCallbackCipherIV, 'base64')
|
||||
);
|
||||
}
|
||||
|
||||
// if the path is improperly encoded we need to encode it or
|
||||
// http.request will throw an error (related check: https://github.com/nodejs/node/blob/4ece669c6205ec78abfdadfe78869bbb8411463e/lib/_http_client.js#L84)
|
||||
if (path && /[^\u0021-\u00ff]/.test(path)) {
|
||||
path = encodeURI(path);
|
||||
}
|
||||
|
||||
const opts = { hostname: '127.0.0.1', port, path, method };
|
||||
const req = request(opts, res => {
|
||||
const response = res;
|
||||
/**
|
||||
* @type {Buffer[]}
|
||||
*/
|
||||
const respBodyChunks = [];
|
||||
response.on('data', chunk => respBodyChunks.push(Buffer.from(chunk)));
|
||||
response.on('error', reject);
|
||||
response.on('end', () => {
|
||||
const bodyBuffer = Buffer.concat(respBodyChunks);
|
||||
delete response.headers.connection;
|
||||
|
||||
if (isApiGateway) {
|
||||
delete response.headers['content-length'];
|
||||
} else if (response.headers['content-length']) {
|
||||
response.headers['content-length'] = String(bodyBuffer.length);
|
||||
}
|
||||
|
||||
resolve({
|
||||
statusCode: response.statusCode || 200,
|
||||
headers: response.headers,
|
||||
body: bodyBuffer.toString('base64'),
|
||||
encoding: 'base64',
|
||||
});
|
||||
});
|
||||
});
|
||||
const req = request(
|
||||
{ hostname: '127.0.0.1', port, path, method },
|
||||
socket && url && cipher
|
||||
? getStreamResponseCallback({ url, socket, cipher, resolve, reject })
|
||||
: getResponseCallback({ isApiGateway, resolve, reject })
|
||||
);
|
||||
|
||||
req.on('error', error => {
|
||||
setTimeout(() => {
|
||||
@@ -332,16 +380,10 @@ class Bridge {
|
||||
}, 2);
|
||||
});
|
||||
|
||||
for (const [name, value] of Object.entries(headers)) {
|
||||
if (value === undefined) {
|
||||
console.error(
|
||||
`Skipping HTTP request header "${name}" because value is undefined`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
for (const [name, value] of getHeadersIterator(headers)) {
|
||||
try {
|
||||
req.setHeader(name, value);
|
||||
} catch (err) {
|
||||
} catch (/** @type any */ err) {
|
||||
console.error(`Skipping HTTP request header: "${name}: ${value}"`);
|
||||
console.error(err.message);
|
||||
}
|
||||
@@ -363,4 +405,106 @@ class Bridge {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the streaming response callback which writes in the given socket client a raw
|
||||
* HTTP Request message to later pipe the response body into the socket. It will pass request
|
||||
* headers namespace and an additional header with the status code. Once everything is
|
||||
* written it will destroy the socket and resolve to an empty object. If a cipher is given
|
||||
* it will be used to pipe bytes.
|
||||
*
|
||||
* @type {(params: {
|
||||
* url: import('url').URL,
|
||||
* socket: import('net').Socket,
|
||||
* cipher: import('crypto').Cipher
|
||||
* resolve: (result: (Record<string, never>)) => void,
|
||||
* reject: (err: Error) => void
|
||||
* }) => (response: import("http").IncomingMessage) => void}
|
||||
*/
|
||||
function getStreamResponseCallback({ url, socket, cipher, resolve, reject }) {
|
||||
return response => {
|
||||
const chunked = new Transform();
|
||||
chunked._transform = function (chunk, _, callback) {
|
||||
this.push(Buffer.byteLength(chunk).toString(16) + CRLF);
|
||||
this.push(chunk);
|
||||
this.push(CRLF);
|
||||
callback();
|
||||
};
|
||||
|
||||
let headers = `Host: ${url.host}${CRLF}`;
|
||||
headers += `transfer-encoding: chunked${CRLF}`;
|
||||
headers += `x-vercel-status-code: ${response.statusCode || 200}${CRLF}`;
|
||||
for (const [name, value] of getHeadersIterator(response.headers)) {
|
||||
if (!['connection', 'transfer-encoding'].includes(name)) {
|
||||
headers += `x-vercel-header-${name}: ${value}${CRLF}`;
|
||||
}
|
||||
}
|
||||
|
||||
cipher.write(`POST ${url.pathname} HTTP/1.1${CRLF}${headers}${CRLF}`);
|
||||
|
||||
pipeline(response, chunked, cipher, socket, err => {
|
||||
if (err) return reject(err);
|
||||
resolve({});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates the normal response callback which waits until the body is fully
|
||||
* received before resolving the promise. It caches the entire body and resolve
|
||||
* with an object that describes the response.
|
||||
*
|
||||
* @type {(params: {
|
||||
* isApiGateway: boolean,
|
||||
* resolve: (result: (import('./types').VercelProxyResponse)) => void,
|
||||
* reject: (err: Error) => void
|
||||
* }) => (response: import("http").IncomingMessage) => void}
|
||||
*/
|
||||
function getResponseCallback({ isApiGateway, resolve, reject }) {
|
||||
return response => {
|
||||
/**
|
||||
* @type {Buffer[]}
|
||||
*/
|
||||
const respBodyChunks = [];
|
||||
response.on('data', chunk => respBodyChunks.push(Buffer.from(chunk)));
|
||||
response.on('error', reject);
|
||||
response.on('end', () => {
|
||||
const bodyBuffer = Buffer.concat(respBodyChunks);
|
||||
delete response.headers.connection;
|
||||
|
||||
if (isApiGateway) {
|
||||
delete response.headers['content-length'];
|
||||
} else if (response.headers['content-length']) {
|
||||
response.headers['content-length'] = String(bodyBuffer.length);
|
||||
}
|
||||
|
||||
resolve({
|
||||
statusCode: response.statusCode || 200,
|
||||
headers: response.headers,
|
||||
body: bodyBuffer.toString('base64'),
|
||||
encoding: 'base64',
|
||||
});
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get an iterator for the headers object and yield the name and value when
|
||||
* the value is not undefined only.
|
||||
*
|
||||
* @type {(headers: import('http').IncomingHttpHeaders) =>
|
||||
* Generator<[string, string | string[]], void, unknown>}
|
||||
*/
|
||||
function* getHeadersIterator(headers) {
|
||||
for (const [name, value] of Object.entries(headers)) {
|
||||
if (value === undefined) {
|
||||
console.error(
|
||||
`Skipping HTTP request header "${name}" because value is undefined`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
yield [name, value];
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { Bridge };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node-bridge",
|
||||
"version": "3.0.0",
|
||||
"version": "3.1.0",
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"repository": {
|
||||
@@ -23,6 +23,8 @@
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "*",
|
||||
"jsonlines": "0.1.1",
|
||||
"test-listen": "1.1.0",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
}
|
||||
|
||||
185
packages/node-bridge/test/bridge.test.js
vendored
185
packages/node-bridge/test/bridge.test.js
vendored
@@ -1,6 +1,10 @@
|
||||
const assert = require('assert');
|
||||
const crypto = require('crypto');
|
||||
const jsonlines = require('jsonlines');
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('../bridge');
|
||||
const { runServer } = require('./run-test-server');
|
||||
const { runTcpServer } = require('./run-test-server');
|
||||
|
||||
test('port binding', async () => {
|
||||
const server = new Server();
|
||||
@@ -184,7 +188,7 @@ test('multi-payload handling', async () => {
|
||||
});
|
||||
|
||||
test('consumeEvent', async () => {
|
||||
const mockListener = jest.fn((req, res) => {
|
||||
const mockListener = jest.fn((_, res) => {
|
||||
res.end('hello');
|
||||
});
|
||||
|
||||
@@ -222,7 +226,7 @@ test('consumeEvent', async () => {
|
||||
});
|
||||
|
||||
test('consumeEvent and handle decoded path', async () => {
|
||||
const mockListener = jest.fn((req, res) => {
|
||||
const mockListener = jest.fn((_, res) => {
|
||||
res.end('hello');
|
||||
});
|
||||
|
||||
@@ -295,3 +299,180 @@ test('invalid request headers', async () => {
|
||||
|
||||
server.close();
|
||||
});
|
||||
|
||||
test('`NowProxyEvent` proxy streaming with a sync handler', async () => {
|
||||
const cipherParams = {
|
||||
cipher: 'aes-256-ctr',
|
||||
cipherIV: crypto.randomBytes(16),
|
||||
cipherKey: crypto.randomBytes(32),
|
||||
};
|
||||
|
||||
const effects = {
|
||||
callbackPayload: undefined,
|
||||
callbackStream: undefined,
|
||||
};
|
||||
|
||||
const { deferred, resolve } = createDeferred();
|
||||
|
||||
const httpServer = await runServer({
|
||||
handler: (req, res) => {
|
||||
const chunks = [];
|
||||
req.on('data', chunk => {
|
||||
chunks.push(chunk.toString());
|
||||
});
|
||||
req.on('close', () => {
|
||||
effects.callbackPayload = chunks;
|
||||
res.writeHead(200, 'OK', { 'content-type': 'application/json' });
|
||||
res.end();
|
||||
resolve();
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const tcpServerCallback = await runTcpServer({
|
||||
cipherParams,
|
||||
effects,
|
||||
httpServer,
|
||||
});
|
||||
|
||||
const server = new Server((req, res) => {
|
||||
res.setHeader('content-type', 'text/html');
|
||||
res.end('hello');
|
||||
});
|
||||
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
const context = { callbackWaitsForEmptyEventLoop: true };
|
||||
const result = await bridge.launcher(
|
||||
{
|
||||
Action: 'Invoke',
|
||||
body: JSON.stringify({
|
||||
method: 'POST',
|
||||
responseCallbackCipher: cipherParams.cipher,
|
||||
responseCallbackCipherIV: cipherParams.cipherIV.toString('base64'),
|
||||
responseCallbackCipherKey: cipherParams.cipherKey.toString('base64'),
|
||||
responseCallbackStream: 'abc',
|
||||
responseCallbackUrl: String(tcpServerCallback.url),
|
||||
headers: { foo: 'bar' },
|
||||
path: '/nowproxy',
|
||||
body: 'body=1',
|
||||
}),
|
||||
},
|
||||
context
|
||||
);
|
||||
|
||||
await deferred;
|
||||
|
||||
expect(result).toEqual({});
|
||||
expect(context.callbackWaitsForEmptyEventLoop).toEqual(false);
|
||||
expect(effects.callbackStream).toEqual('abc');
|
||||
expect(effects.callbackPayload).toEqual(['hello']);
|
||||
|
||||
server.close();
|
||||
await httpServer.close();
|
||||
await tcpServerCallback.close();
|
||||
});
|
||||
|
||||
test('`NowProxyEvent` proxy streaming with an async handler', async () => {
|
||||
const effects = {
|
||||
callbackHeaders: undefined,
|
||||
callbackMethod: undefined,
|
||||
callbackPayload: undefined,
|
||||
callbackStream: undefined,
|
||||
};
|
||||
|
||||
const cipherParams = {
|
||||
cipher: 'aes-256-ctr',
|
||||
cipherIV: crypto.randomBytes(16),
|
||||
cipherKey: crypto.randomBytes(32),
|
||||
};
|
||||
|
||||
const { deferred, resolve } = createDeferred();
|
||||
const jsonParser = jsonlines.parse();
|
||||
const httpServer = await runServer({
|
||||
handler: (req, res) => {
|
||||
const chunks = [];
|
||||
req.pipe(jsonParser);
|
||||
jsonParser.on('data', chunk => {
|
||||
chunks.push(chunk);
|
||||
});
|
||||
req.on('close', () => {
|
||||
effects.callbackMethod = req.method;
|
||||
effects.callbackHeaders = req.headers;
|
||||
effects.callbackPayload = chunks;
|
||||
res.writeHead(200, 'OK', { 'content-type': 'application/json' });
|
||||
res.end();
|
||||
resolve();
|
||||
});
|
||||
},
|
||||
});
|
||||
|
||||
const tcpServerCallback = await runTcpServer({
|
||||
cipherParams,
|
||||
httpServer,
|
||||
effects,
|
||||
});
|
||||
|
||||
const jsonStringifier = jsonlines.stringify();
|
||||
const server = new Server((req, res) => {
|
||||
res.setHeader('x-test', 'hello');
|
||||
res.setHeader('content-type', 'text/html');
|
||||
jsonStringifier.pipe(res);
|
||||
jsonStringifier.write({ method: req.method });
|
||||
jsonStringifier.write({ path: req.url });
|
||||
setTimeout(() => {
|
||||
jsonStringifier.write({ headers: req.headers });
|
||||
res.end();
|
||||
}, 100);
|
||||
});
|
||||
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
const context = { callbackWaitsForEmptyEventLoop: true };
|
||||
const result = await bridge.launcher(
|
||||
{
|
||||
Action: 'Invoke',
|
||||
body: JSON.stringify({
|
||||
method: 'POST',
|
||||
responseCallbackCipher: cipherParams.cipher,
|
||||
responseCallbackCipherIV: cipherParams.cipherIV.toString('base64'),
|
||||
responseCallbackCipherKey: cipherParams.cipherKey.toString('base64'),
|
||||
responseCallbackStream: 'abc',
|
||||
responseCallbackUrl: String(tcpServerCallback.url),
|
||||
headers: { foo: 'bar' },
|
||||
path: '/nowproxy',
|
||||
body: 'body=1',
|
||||
}),
|
||||
},
|
||||
context
|
||||
);
|
||||
|
||||
await deferred;
|
||||
|
||||
expect(result).toEqual({});
|
||||
expect(context.callbackWaitsForEmptyEventLoop).toEqual(false);
|
||||
expect(effects.callbackStream).toEqual('abc');
|
||||
expect(effects.callbackMethod).toEqual('POST');
|
||||
expect(effects.callbackHeaders).toMatchObject({
|
||||
'x-vercel-status-code': '200',
|
||||
'x-vercel-header-x-test': 'hello',
|
||||
'x-vercel-header-content-type': 'text/html',
|
||||
});
|
||||
expect(effects.callbackPayload).toMatchObject([
|
||||
{ method: 'POST' },
|
||||
{ path: '/nowproxy' },
|
||||
{ headers: { foo: 'bar' } },
|
||||
]);
|
||||
|
||||
server.close();
|
||||
httpServer.close();
|
||||
tcpServerCallback.close();
|
||||
});
|
||||
|
||||
function createDeferred() {
|
||||
let resolve;
|
||||
const deferred = new Promise(_resolve => {
|
||||
resolve = _resolve;
|
||||
});
|
||||
return { deferred, resolve };
|
||||
}
|
||||
|
||||
78
packages/node-bridge/test/run-test-server.js
vendored
Normal file
78
packages/node-bridge/test/run-test-server.js
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
const { createServer } = require('net');
|
||||
const { Server } = require('http');
|
||||
const { Socket } = require('net');
|
||||
const { URL } = require('url');
|
||||
const crypto = require('crypto');
|
||||
const listen = require('test-listen');
|
||||
|
||||
exports.runServer = async function runServer({ handler }) {
|
||||
const server = new Server(handler);
|
||||
const url = await listen(server);
|
||||
return { url: new URL(url), close: getKillServer(server) };
|
||||
};
|
||||
|
||||
function getKillServer(server) {
|
||||
let sockets = [];
|
||||
|
||||
server.on('connection', socket => {
|
||||
sockets.push(socket);
|
||||
socket.once('close', () => {
|
||||
sockets.splice(sockets.indexOf(socket), 1);
|
||||
});
|
||||
});
|
||||
|
||||
return () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
server.close(err => {
|
||||
if (err) {
|
||||
return reject(err);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
|
||||
sockets.forEach(function (socket) {
|
||||
socket.destroy();
|
||||
});
|
||||
|
||||
sockets = [];
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
exports.runTcpServer = async function runTcpServer({
|
||||
effects,
|
||||
httpServer,
|
||||
cipherParams,
|
||||
}) {
|
||||
const server = createServer();
|
||||
server.on('connection', connection => {
|
||||
const socket = new Socket();
|
||||
socket.connect(parseInt(httpServer.url.port, 10), httpServer.hostname);
|
||||
const decipher = crypto.createDecipheriv(
|
||||
cipherParams.cipher,
|
||||
cipherParams.cipherKey,
|
||||
cipherParams.cipherIV
|
||||
);
|
||||
|
||||
decipher.pipe(socket);
|
||||
|
||||
const CRLF = Buffer.from('\r\n');
|
||||
let accBuffer = Buffer.from([]);
|
||||
connection.on('data', function onConnectionData(chunk) {
|
||||
accBuffer = Buffer.concat([accBuffer, chunk]);
|
||||
const idx = accBuffer.indexOf(CRLF);
|
||||
if (idx !== -1) {
|
||||
effects.callbackStream = accBuffer.slice(0, idx).toString();
|
||||
connection.off('data', onConnectionData);
|
||||
decipher.write(accBuffer.slice(idx + 2));
|
||||
connection.pipe(decipher);
|
||||
decipher.on('close', () => {
|
||||
socket.end();
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
const url = await listen(server);
|
||||
return { url: new URL(url), close: getKillServer(server) };
|
||||
};
|
||||
@@ -1,5 +1,6 @@
|
||||
/// <reference types="node" />
|
||||
import {
|
||||
import type { CipherCCMTypes } from 'crypto';
|
||||
import type {
|
||||
Server,
|
||||
IncomingHttpHeaders,
|
||||
OutgoingHttpHeaders,
|
||||
@@ -11,12 +12,18 @@ export interface VercelProxyEvent {
|
||||
body: string;
|
||||
}
|
||||
export interface VercelProxyRequest {
|
||||
isApiGateway?: boolean;
|
||||
isApiGateway: boolean;
|
||||
method: string;
|
||||
path: string;
|
||||
headers: IncomingHttpHeaders;
|
||||
body: Buffer;
|
||||
encoding?: string;
|
||||
payloads?: Array<VercelProxyRequest>;
|
||||
responseCallbackCipher?: CipherCCMTypes;
|
||||
responseCallbackCipherIV?: string;
|
||||
responseCallbackCipherKey?: string;
|
||||
responseCallbackStream?: string;
|
||||
responseCallbackUrl?: string;
|
||||
}
|
||||
export interface VercelProxyResponse {
|
||||
statusCode: number;
|
||||
@@ -24,6 +31,7 @@ export interface VercelProxyResponse {
|
||||
body: string;
|
||||
encoding: BufferEncoding;
|
||||
}
|
||||
export type VercelStreamProxyResponse = Record<string, never>;
|
||||
export interface ServerLike {
|
||||
timeout?: number;
|
||||
listen: (
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "2.5.26",
|
||||
"version": "2.6.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -31,9 +31,9 @@
|
||||
"dependencies": {
|
||||
"@edge-runtime/vm": "2.0.0",
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/node-bridge": "3.0.0",
|
||||
"@vercel/static-config": "2.0.3",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/node-bridge": "3.1.0",
|
||||
"@vercel/static-config": "2.0.4",
|
||||
"edge-runtime": "2.0.0",
|
||||
"esbuild": "0.14.47",
|
||||
"exit-hook": "2.2.1",
|
||||
|
||||
@@ -465,6 +465,9 @@ export const build: BuildV3 = async ({
|
||||
config.helpers === false || process.env.NODEJS_HELPERS === '0'
|
||||
);
|
||||
|
||||
const experimentalResponseStreaming =
|
||||
staticConfig?.experimentalResponseStreaming === true ? true : undefined;
|
||||
|
||||
output = new NodejsLambda({
|
||||
files: preparedFiles,
|
||||
handler,
|
||||
@@ -472,6 +475,7 @@ export const build: BuildV3 = async ({
|
||||
shouldAddHelpers,
|
||||
shouldAddSourcemapSupport,
|
||||
awsLambdaHandler,
|
||||
experimentalResponseStreaming,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/python",
|
||||
"version": "3.1.22",
|
||||
"version": "3.1.23",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
|
||||
@@ -22,7 +22,7 @@
|
||||
"devDependencies": {
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/jest": "27.4.1",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"execa": "^1.0.0",
|
||||
"typescript": "4.3.4"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/redwood",
|
||||
"version": "1.0.31",
|
||||
"version": "1.0.32",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -20,13 +20,13 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.22.1",
|
||||
"@vercel/routing-utils": "2.0.2",
|
||||
"@vercel/routing-utils": "2.1.0",
|
||||
"semver": "6.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "*",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "5.5.5"
|
||||
"@vercel/build-utils": "5.5.6"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/remix",
|
||||
"version": "1.0.32",
|
||||
"version": "1.0.33",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -25,7 +25,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/routing-utils",
|
||||
"version": "2.0.2",
|
||||
"version": "2.1.0",
|
||||
"description": "Vercel routing utilities",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
|
||||
@@ -26,6 +26,14 @@ export function appendRoutesToPhase({
|
||||
|
||||
if (isInPhase) {
|
||||
routes.push(...newRoutes);
|
||||
} else if (phase === null) {
|
||||
// If the phase is null, we want to insert the routes at the beginning
|
||||
const lastPhase = routes.findIndex(r => isHandler(r) && r.handle);
|
||||
if (lastPhase === -1) {
|
||||
routes.push(...newRoutes);
|
||||
} else {
|
||||
routes.splice(lastPhase, 0, ...newRoutes);
|
||||
}
|
||||
} else if (insertIndex > -1) {
|
||||
routes.splice(insertIndex, 0, ...newRoutes);
|
||||
} else {
|
||||
|
||||
@@ -122,6 +122,7 @@ export interface AppendRoutesToPhaseProps {
|
||||
newRoutes: Route[] | null;
|
||||
/**
|
||||
* The phase to append the routes such as `filesystem`.
|
||||
* If the phase is `null`, the routes will be appended prior to the first handle being found.
|
||||
*/
|
||||
phase: HandleValue;
|
||||
phase: HandleValue | null;
|
||||
}
|
||||
|
||||
73
packages/routing-utils/test/append.spec.ts
vendored
73
packages/routing-utils/test/append.spec.ts
vendored
@@ -128,3 +128,76 @@ test('appendRoutesToPhase one routes before, two routes in phase, two routes in
|
||||
];
|
||||
deepEqual(actual, expected);
|
||||
});
|
||||
|
||||
test('appendRoutesToPhase to null phase', () => {
|
||||
const routes: Route[] = [
|
||||
{ src: '/first', dest: '/one' },
|
||||
{ src: '/second', dest: '/two' },
|
||||
{ handle: 'filesystem' },
|
||||
{ src: '/third', dest: '/three' },
|
||||
];
|
||||
const newRoutes = [{ src: '/new', dest: '/to' }];
|
||||
const phase = null;
|
||||
const actual = appendRoutesToPhase({ routes, newRoutes, phase });
|
||||
const expected = [
|
||||
{ src: '/first', dest: '/one' },
|
||||
{ src: '/second', dest: '/two' },
|
||||
{ src: '/new', dest: '/to' },
|
||||
{ handle: 'filesystem' },
|
||||
{ src: '/third', dest: '/three' },
|
||||
];
|
||||
|
||||
deepEqual(actual, expected);
|
||||
});
|
||||
|
||||
test('appendRoutesToPhase to null phase with no handle', () => {
|
||||
const routes: Route[] = [
|
||||
{ src: '/first', dest: '/one' },
|
||||
{ src: '/second', dest: '/two' },
|
||||
];
|
||||
const newRoutes = [{ src: '/new', dest: '/to' }];
|
||||
const phase = null;
|
||||
const actual = appendRoutesToPhase({ routes, newRoutes, phase });
|
||||
const expected = [
|
||||
{ src: '/first', dest: '/one' },
|
||||
{ src: '/second', dest: '/two' },
|
||||
{ src: '/new', dest: '/to' },
|
||||
];
|
||||
|
||||
deepEqual(actual, expected);
|
||||
});
|
||||
|
||||
test('appendRoutesToPhase to null phase with two new routes ', () => {
|
||||
const routes: Route[] = [
|
||||
{ src: '/first', dest: '/one' },
|
||||
{ src: '/second', dest: '/two' },
|
||||
{ handle: 'filesystem' },
|
||||
{ src: '/third', dest: '/three' },
|
||||
];
|
||||
const newRoutes = [
|
||||
{ src: '/new1', dest: '/to1' },
|
||||
{ src: '/new2', dest: '/to2' },
|
||||
];
|
||||
const phase = null;
|
||||
const actual = appendRoutesToPhase({ routes, newRoutes, phase });
|
||||
const expected = [
|
||||
{ src: '/first', dest: '/one' },
|
||||
{ src: '/second', dest: '/two' },
|
||||
{ src: '/new1', dest: '/to1' },
|
||||
{ src: '/new2', dest: '/to2' },
|
||||
{ handle: 'filesystem' },
|
||||
{ src: '/third', dest: '/three' },
|
||||
];
|
||||
|
||||
deepEqual(actual, expected);
|
||||
});
|
||||
|
||||
test('appendRoutesToPhase to null phase `routes=[]`', () => {
|
||||
const routes: Route[] = [];
|
||||
const newRoutes = [{ src: '/new', dest: '/to' }];
|
||||
const phase = null;
|
||||
const actual = appendRoutesToPhase({ routes, newRoutes, phase });
|
||||
const expected = [{ src: '/new', dest: '/to' }];
|
||||
|
||||
deepEqual(actual, expected);
|
||||
});
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@vercel/ruby",
|
||||
"author": "Nathan Cahill <nathan@nathancahill.com>",
|
||||
"version": "1.3.39",
|
||||
"version": "1.3.40",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/ruby",
|
||||
@@ -22,7 +22,7 @@
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "8.0.0",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"execa": "2.0.4",
|
||||
"fs-extra": "^7.0.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/static-build",
|
||||
"version": "1.0.32",
|
||||
"version": "1.0.33",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/build-step",
|
||||
@@ -36,11 +36,11 @@
|
||||
"@types/ms": "0.7.31",
|
||||
"@types/node-fetch": "2.5.4",
|
||||
"@types/promise-timeout": "1.3.0",
|
||||
"@vercel/build-utils": "5.5.5",
|
||||
"@vercel/frameworks": "1.1.8",
|
||||
"@vercel/build-utils": "5.5.6",
|
||||
"@vercel/frameworks": "1.1.9",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/routing-utils": "2.0.2",
|
||||
"@vercel/static-config": "2.0.3",
|
||||
"@vercel/routing-utils": "2.1.0",
|
||||
"@vercel/static-config": "2.0.4",
|
||||
"fs-extra": "10.0.0",
|
||||
"get-port": "5.0.0",
|
||||
"is-port-reachable": "2.0.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/static-config",
|
||||
"version": "2.0.3",
|
||||
"version": "2.0.4",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"repository": {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
// bust cache
|
||||
const assert = require('assert');
|
||||
const { createHash } = require('crypto');
|
||||
const path = require('path');
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://turborepo.org/schema.json",
|
||||
"baseBranch": "origin/main",
|
||||
"globalDependencies": ["$RUNNER_OS", "test/lib/**"],
|
||||
"globalDependencies": ["turbo-cache-key.json", "test/lib/**"],
|
||||
"pipeline": {
|
||||
"build": {
|
||||
"dependsOn": ["^build"],
|
||||
|
||||
15
utils/gen.js
vendored
Normal file
15
utils/gen.js
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* This script generates a cache key before invoking turbo
|
||||
* so that we never accidentally use the wrong cache.
|
||||
*/
|
||||
const { writeFileSync } = require('fs');
|
||||
const { join } = require('path');
|
||||
|
||||
const { versions, platform, arch } = process;
|
||||
const file = join(__dirname, '..', 'turbo-cache-key.json');
|
||||
const node = versions.node.split('.')[0];
|
||||
const str = JSON.stringify({ node, platform, arch });
|
||||
console.log(`Generating cache key: ${str}`);
|
||||
writeFileSync(file, str);
|
||||
Reference in New Issue
Block a user