mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 04:22:13 +00:00
Compare commits
49 Commits
@vercel/cl
...
vercel-plu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
32664cd13b | ||
|
|
db468c489a | ||
|
|
edd9bb506c | ||
|
|
a72549a290 | ||
|
|
4aa6a13912 | ||
|
|
81ea0082f1 | ||
|
|
6dff0875f5 | ||
|
|
30aa392c0a | ||
|
|
c4fc060030 | ||
|
|
3fa08bf64f | ||
|
|
43056bde1f | ||
|
|
a49966b9b4 | ||
|
|
7f55de71bb | ||
|
|
db8e36e04c | ||
|
|
82924bb5c4 | ||
|
|
18b5fac93e | ||
|
|
a6012e600b | ||
|
|
c3abf73f58 | ||
|
|
4873b8b379 | ||
|
|
6248139281 | ||
|
|
507a5de3cd | ||
|
|
be1c78e72f | ||
|
|
c277c649c6 | ||
|
|
ed1dacd276 | ||
|
|
144e890bfa | ||
|
|
af097c2c06 | ||
|
|
873a582986 | ||
|
|
986b4c0b1a | ||
|
|
14071819ac | ||
|
|
2a8588a0c5 | ||
|
|
0f7e89f76c | ||
|
|
e68ed33a88 | ||
|
|
d3e98cdb73 | ||
|
|
bf4e77110f | ||
|
|
5b5197d2c5 | ||
|
|
a6ccf6c180 | ||
|
|
8d848ebe8b | ||
|
|
6ef2c16d63 | ||
|
|
6c71ceaaeb | ||
|
|
1dcb6dfc6f | ||
|
|
4fd24575e5 | ||
|
|
8714f1905e | ||
|
|
2e69f2513d | ||
|
|
979e4b674a | ||
|
|
07fa47bcfb | ||
|
|
307c4fc377 | ||
|
|
44868d79b6 | ||
|
|
df9a4afa5c | ||
|
|
8a6869bae2 |
@@ -1,11 +1,10 @@
|
||||
node_modules
|
||||
dist
|
||||
examples
|
||||
packages/build-utils/test/fixtures
|
||||
packages/*/test/fixtures
|
||||
packages/cli/@types
|
||||
packages/cli/download
|
||||
packages/cli/dist
|
||||
packages/cli/test/fixtures
|
||||
packages/cli/test/dev/fixtures
|
||||
packages/cli/bin
|
||||
packages/cli/link
|
||||
@@ -13,7 +12,6 @@ packages/cli/src/util/dev/templates/*.ts
|
||||
packages/client/tests/fixtures
|
||||
packages/client/lib
|
||||
packages/node/src/bridge.ts
|
||||
packages/node/test/fixtures
|
||||
packages/node-bridge/bridge.js
|
||||
packages/node-bridge/launcher.js
|
||||
packages/static-config/test/fixtures
|
||||
packages/middleware/src/entries.js
|
||||
|
||||
2
.github/workflows/test-integration-dev.yml
vendored
2
.github/workflows/test-integration-dev.yml
vendored
@@ -11,7 +11,7 @@ on:
|
||||
jobs:
|
||||
test:
|
||||
name: Dev
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 75
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -27,3 +27,4 @@ test/lib/deployment/failed-page.txt
|
||||
/public
|
||||
__pycache__
|
||||
.vercel
|
||||
.output
|
||||
|
||||
@@ -14,8 +14,6 @@ const frameworks = (_frameworks as Framework[])
|
||||
sort: undefined,
|
||||
dependency: undefined,
|
||||
defaultRoutes: undefined,
|
||||
devCommand: undefined,
|
||||
buildCommand: undefined,
|
||||
};
|
||||
|
||||
if (framework.logo) {
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"eslint-plugin-jest": "24.3.6",
|
||||
"husky": "6.0.0",
|
||||
"jest": "27.0.6",
|
||||
"jest": "27.3.1",
|
||||
"json5": "2.1.1",
|
||||
"lint-staged": "9.2.5",
|
||||
"node-fetch": "2.6.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "2.12.3-canary.14",
|
||||
"version": "2.12.3-canary.19",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/async-retry": "^1.2.1",
|
||||
"@types/cross-spawn": "6.0.0",
|
||||
"@types/end-of-stream": "^1.4.0",
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/jest": "27.0.1",
|
||||
"@types/js-yaml": "3.12.1",
|
||||
@@ -30,7 +30,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "^2.4.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.10",
|
||||
"@vercel/frameworks": "0.5.1-canary.12",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
@@ -38,7 +38,7 @@
|
||||
"boxen": "4.2.0",
|
||||
"cross-spawn": "6.0.5",
|
||||
"end-of-stream": "1.4.1",
|
||||
"fs-extra": "7.0.0",
|
||||
"fs-extra": "10.0.0",
|
||||
"glob": "7.1.3",
|
||||
"into-stream": "5.0.0",
|
||||
"js-yaml": "3.13.1",
|
||||
|
||||
171
packages/build-utils/src/convert-runtime-to-plugin.ts
Normal file
171
packages/build-utils/src/convert-runtime-to-plugin.ts
Normal file
@@ -0,0 +1,171 @@
|
||||
import fs from 'fs-extra';
|
||||
import { join, dirname, relative } from 'path';
|
||||
import glob from './fs/glob';
|
||||
import { normalizePath } from './fs/normalize-path';
|
||||
import { FILES_SYMBOL, getLambdaOptionsFromFunction, Lambda } from './lambda';
|
||||
import type FileBlob from './file-blob';
|
||||
import type { BuilderFunctions, BuildOptions, Files } from './types';
|
||||
import minimatch from 'minimatch';
|
||||
|
||||
/**
|
||||
* Convert legacy Runtime to a Plugin.
|
||||
* @param buildRuntime - a legacy build() function from a Runtime
|
||||
* @param ext - the file extension, for example `.py`
|
||||
*/
|
||||
export function convertRuntimeToPlugin(
|
||||
buildRuntime: (options: BuildOptions) => Promise<{ output: Lambda }>,
|
||||
ext: string
|
||||
) {
|
||||
return async function build({ workPath }: { workPath: string }) {
|
||||
const opts = { cwd: workPath };
|
||||
const files = await glob('**', opts);
|
||||
delete files['vercel.json']; // Builders/Runtimes didn't have vercel.json
|
||||
const entrypoints = await glob(`api/**/*${ext}`, opts);
|
||||
const pages: { [key: string]: any } = {};
|
||||
const { functions = {} } = await readVercelConfig(workPath);
|
||||
const traceDir = join(workPath, '.output', 'runtime-traced-files');
|
||||
await fs.ensureDir(traceDir);
|
||||
|
||||
for (const entrypoint of Object.keys(entrypoints)) {
|
||||
const key =
|
||||
Object.keys(functions).find(
|
||||
src => src === entrypoint || minimatch(entrypoint, src)
|
||||
) || '';
|
||||
const config = functions[key] || {};
|
||||
|
||||
const { output } = await buildRuntime({
|
||||
files,
|
||||
entrypoint,
|
||||
workPath,
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
includeFiles: config.includeFiles,
|
||||
excludeFiles: config.excludeFiles,
|
||||
},
|
||||
});
|
||||
|
||||
pages[entrypoint] = {
|
||||
handler: output.handler,
|
||||
runtime: output.runtime,
|
||||
memory: output.memory,
|
||||
maxDuration: output.maxDuration,
|
||||
environment: output.environment,
|
||||
allowQuery: output.allowQuery,
|
||||
regions: output.regions,
|
||||
};
|
||||
|
||||
// @ts-ignore This symbol is a private API
|
||||
const lambdaFiles: Files = output[FILES_SYMBOL];
|
||||
|
||||
const entry = join(workPath, '.output', 'server', 'pages', entrypoint);
|
||||
await fs.ensureDir(dirname(entry));
|
||||
await linkOrCopy(files[entrypoint].fsPath, entry);
|
||||
|
||||
const tracedFiles: {
|
||||
absolutePath: string;
|
||||
relativePath: string;
|
||||
}[] = [];
|
||||
|
||||
Object.entries(lambdaFiles).forEach(async ([relPath, file]) => {
|
||||
const newPath = join(traceDir, relPath);
|
||||
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
||||
if (file.fsPath) {
|
||||
await linkOrCopy(file.fsPath, newPath);
|
||||
} else if (file.type === 'FileBlob') {
|
||||
const { data, mode } = file as FileBlob;
|
||||
await fs.writeFile(newPath, data, { mode });
|
||||
} else {
|
||||
throw new Error(`Unknown file type: ${file.type}`);
|
||||
}
|
||||
});
|
||||
|
||||
const nft = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'server',
|
||||
'pages',
|
||||
`${entrypoint}.nft.json`
|
||||
);
|
||||
const json = JSON.stringify({
|
||||
version: 1,
|
||||
files: tracedFiles.map(f => ({
|
||||
input: normalizePath(relative(nft, f.absolutePath)),
|
||||
output: normalizePath(f.relativePath),
|
||||
})),
|
||||
});
|
||||
|
||||
await fs.ensureDir(dirname(nft));
|
||||
await fs.writeFile(nft, json);
|
||||
}
|
||||
|
||||
await updateFunctionsManifest({ workPath, pages });
|
||||
};
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
} catch (err: any) {
|
||||
if (err.code !== 'EEXIST') {
|
||||
await fs.copyFile(existingPath, newPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
|
||||
try {
|
||||
const str = await fs.readFile(filePath, 'utf8');
|
||||
return JSON.parse(str);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function readVercelConfig(
|
||||
workPath: string
|
||||
): Promise<{ functions?: BuilderFunctions; regions?: string[] }> {
|
||||
const vercelJsonPath = join(workPath, 'vercel.json');
|
||||
return readJson(vercelJsonPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* If `.output/functions-manifest.json` exists, append to the pages
|
||||
* property. Otherwise write a new file. This will also read `vercel.json`
|
||||
* and apply relevant `functions` property config.
|
||||
*/
|
||||
export async function updateFunctionsManifest({
|
||||
workPath,
|
||||
pages,
|
||||
}: {
|
||||
workPath: string;
|
||||
pages: { [key: string]: any };
|
||||
}) {
|
||||
const functionsManifestPath = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'functions-manifest.json'
|
||||
);
|
||||
const vercelConfig = await readVercelConfig(workPath);
|
||||
const functionsManifest = await readJson(functionsManifestPath);
|
||||
|
||||
if (!functionsManifest.version) functionsManifest.version = 1;
|
||||
if (!functionsManifest.pages) functionsManifest.pages = {};
|
||||
|
||||
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
||||
const fnConfig = await getLambdaOptionsFromFunction({
|
||||
sourceFile: pageKey,
|
||||
config: vercelConfig,
|
||||
});
|
||||
functionsManifest.pages[pageKey] = {
|
||||
...pageConfig,
|
||||
memory: fnConfig.memory || pageConfig.memory,
|
||||
maxDuration: fnConfig.maxDuration || pageConfig.maxDuration,
|
||||
regions: vercelConfig.regions || pageConfig.regions,
|
||||
};
|
||||
}
|
||||
|
||||
await fs.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import assert from 'assert';
|
||||
import vanillaGlob_ from 'glob';
|
||||
import { promisify } from 'util';
|
||||
import { lstat, Stats } from 'fs-extra';
|
||||
import { normalizePath } from './normalize-path';
|
||||
import FileFsRef from '../file-fs-ref';
|
||||
|
||||
export type GlobOptions = vanillaGlob_.IOptions;
|
||||
@@ -45,7 +46,7 @@ export default async function glob(
|
||||
const files = await vanillaGlob(pattern, options);
|
||||
|
||||
for (const relativePath of files) {
|
||||
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||
const fsPath = normalizePath(path.join(options.cwd!, relativePath));
|
||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||
assert(
|
||||
stat,
|
||||
|
||||
8
packages/build-utils/src/fs/normalize-path.ts
Normal file
8
packages/build-utils/src/fs/normalize-path.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
const isWin = process.platform === 'win32';
|
||||
|
||||
/**
|
||||
* Convert Windows separators to Unix separators.
|
||||
*/
|
||||
export function normalizePath(p: string): string {
|
||||
return isWin ? p.replace(/\\/g, '/') : p;
|
||||
}
|
||||
@@ -81,6 +81,11 @@ export {
|
||||
export { detectFramework } from './detect-framework';
|
||||
export { DetectorFilesystem } from './detectors/filesystem';
|
||||
export { readConfigFile } from './fs/read-config-file';
|
||||
export { normalizePath } from './fs/normalize-path';
|
||||
export {
|
||||
convertRuntimeToPlugin,
|
||||
updateFunctionsManifest,
|
||||
} from './convert-runtime-to-plugin';
|
||||
|
||||
export * from './schemas';
|
||||
export * from './types';
|
||||
|
||||
@@ -36,9 +36,11 @@ interface CreateLambdaOptions {
|
||||
|
||||
interface GetLambdaOptionsFromFunctionOptions {
|
||||
sourceFile: string;
|
||||
config?: Config;
|
||||
config?: Pick<Config, 'functions'>;
|
||||
}
|
||||
|
||||
export const FILES_SYMBOL = Symbol('files');
|
||||
|
||||
export class Lambda {
|
||||
public type: 'Lambda';
|
||||
public zipBuffer: Buffer;
|
||||
@@ -118,7 +120,7 @@ export async function createLambda({
|
||||
|
||||
try {
|
||||
const zipBuffer = await createZip(files);
|
||||
return new Lambda({
|
||||
const lambda = new Lambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
@@ -127,6 +129,9 @@ export async function createLambda({
|
||||
environment,
|
||||
regions,
|
||||
});
|
||||
// @ts-ignore This symbol is a private API
|
||||
lambda[FILES_SYMBOL] = files;
|
||||
return lambda;
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ export interface File {
|
||||
mode: number;
|
||||
contentType?: string;
|
||||
toStream: () => NodeJS.ReadableStream;
|
||||
toStreamAsync?: () => Promise<NodeJS.ReadableStream>;
|
||||
/**
|
||||
* The absolute path to the file in the filesystem
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "package.json", "use": "@vercel/static-build" }],
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 7" }]
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 8" }]
|
||||
}
|
||||
|
||||
182
packages/build-utils/test/unit.convert-runtime-to-plugin.test.ts
vendored
Normal file
182
packages/build-utils/test/unit.convert-runtime-to-plugin.test.ts
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
import { join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import { BuildOptions, createLambda } from '../src';
|
||||
import { convertRuntimeToPlugin } from '../src/convert-runtime-to-plugin';
|
||||
|
||||
async function fsToJson(dir: string, output: Record<string, any> = {}) {
|
||||
const files = await fs.readdir(dir);
|
||||
for (const file of files) {
|
||||
const fsPath = join(dir, file);
|
||||
const stat = await fs.stat(fsPath);
|
||||
if (stat.isDirectory()) {
|
||||
output[file] = {};
|
||||
await fsToJson(fsPath, output[file]);
|
||||
} else {
|
||||
output[file] = await fs.readFile(fsPath, 'utf8');
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
const workPath = join(__dirname, 'walk', 'python-api');
|
||||
|
||||
describe('convert-runtime-to-plugin', () => {
|
||||
afterEach(async () => {
|
||||
await fs.remove(join(workPath, '.output'));
|
||||
});
|
||||
|
||||
it('should create correct fileystem for python', async () => {
|
||||
const lambdaOptions = {
|
||||
handler: 'index.handler',
|
||||
runtime: 'python3.9',
|
||||
memory: 512,
|
||||
maxDuration: 5,
|
||||
environment: {},
|
||||
regions: ['sfo1'],
|
||||
};
|
||||
|
||||
const buildRuntime = async (opts: BuildOptions) => {
|
||||
const lambda = await createLambda({
|
||||
files: opts.files,
|
||||
...lambdaOptions,
|
||||
});
|
||||
return { output: lambda };
|
||||
};
|
||||
|
||||
const lambdaFiles = await fsToJson(workPath);
|
||||
delete lambdaFiles['vercel.json'];
|
||||
const build = await convertRuntimeToPlugin(buildRuntime, '.py');
|
||||
|
||||
await build({ workPath });
|
||||
|
||||
const output = await fsToJson(join(workPath, '.output'));
|
||||
expect(output).toMatchObject({
|
||||
'functions-manifest.json': expect.stringContaining('{'),
|
||||
'runtime-traced-files': lambdaFiles,
|
||||
server: {
|
||||
pages: {
|
||||
api: {
|
||||
'index.py': expect.stringContaining('index'),
|
||||
'index.py.nft.json': expect.stringContaining('{'),
|
||||
users: {
|
||||
'get.py': expect.stringContaining('get'),
|
||||
'get.py.nft.json': expect.stringContaining('{'),
|
||||
'post.py': expect.stringContaining('post'),
|
||||
'post.py.nft.json': expect.stringContaining('{'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const funcManifest = JSON.parse(output['functions-manifest.json']);
|
||||
expect(funcManifest).toMatchObject({
|
||||
version: 1,
|
||||
pages: {
|
||||
'api/index.py': lambdaOptions,
|
||||
'api/users/get.py': lambdaOptions,
|
||||
'api/users/post.py': { ...lambdaOptions, memory: 3008 },
|
||||
},
|
||||
});
|
||||
|
||||
const indexJson = JSON.parse(output.server.pages.api['index.py.nft.json']);
|
||||
expect(indexJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: '../../../../runtime-traced-files/api/index.py',
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/api/users/get.py',
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/api/users/post.py',
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/file.txt',
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/util/date.py',
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/util/math.py',
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const getJson = JSON.parse(
|
||||
output.server.pages.api.users['get.py.nft.json']
|
||||
);
|
||||
expect(getJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/index.py',
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/get.py',
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/post.py',
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/file.txt',
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/date.py',
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/math.py',
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const postJson = JSON.parse(
|
||||
output.server.pages.api.users['post.py.nft.json']
|
||||
);
|
||||
expect(postJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/index.py',
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/get.py',
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/post.py',
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/file.txt',
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/date.py',
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/math.py',
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(output.server.pages['file.txt']).toBeUndefined();
|
||||
expect(output.server.pages.api['file.txt']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
1
packages/build-utils/test/walk/python-api/api/index.py
Normal file
1
packages/build-utils/test/walk/python-api/api/index.py
Normal file
@@ -0,0 +1 @@
|
||||
# index
|
||||
@@ -0,0 +1 @@
|
||||
# get
|
||||
@@ -0,0 +1 @@
|
||||
# post
|
||||
1
packages/build-utils/test/walk/python-api/file.txt
Normal file
1
packages/build-utils/test/walk/python-api/file.txt
Normal file
@@ -0,0 +1 @@
|
||||
This file should also be included
|
||||
1
packages/build-utils/test/walk/python-api/util/date.py
Normal file
1
packages/build-utils/test/walk/python-api/util/date.py
Normal file
@@ -0,0 +1 @@
|
||||
# date
|
||||
1
packages/build-utils/test/walk/python-api/util/math.py
Normal file
1
packages/build-utils/test/walk/python-api/util/math.py
Normal file
@@ -0,0 +1 @@
|
||||
# math
|
||||
10
packages/build-utils/test/walk/python-api/vercel.json
Normal file
10
packages/build-utils/test/walk/python-api/vercel.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/users/post.py": {
|
||||
"memory": 3008
|
||||
},
|
||||
"api/not-matching-anything.py": {
|
||||
"memory": 768
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "23.1.3-canary.17",
|
||||
"version": "23.1.3-canary.35",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -43,14 +43,14 @@
|
||||
"node": ">= 12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.14",
|
||||
"@vercel/go": "1.2.4-canary.3",
|
||||
"@vercel/node": "1.12.2-canary.4",
|
||||
"@vercel/python": "2.0.6-canary.4",
|
||||
"@vercel/ruby": "1.2.8-canary.3",
|
||||
"@vercel/build-utils": "2.12.3-canary.19",
|
||||
"@vercel/go": "1.2.4-canary.4",
|
||||
"@vercel/node": "1.12.2-canary.6",
|
||||
"@vercel/python": "2.0.6-canary.6",
|
||||
"@vercel/ruby": "1.2.8-canary.4",
|
||||
"update-notifier": "4.1.0",
|
||||
"vercel-plugin-middleware": "0.0.0-canary.3",
|
||||
"vercel-plugin-node": "1.12.2-plugin.0"
|
||||
"vercel-plugin-middleware": "0.0.0-canary.7",
|
||||
"vercel-plugin-node": "1.12.2-canary.10"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/env": "11.1.2",
|
||||
@@ -90,7 +90,7 @@
|
||||
"@types/update-notifier": "5.1.0",
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.10",
|
||||
"@vercel/frameworks": "0.5.1-canary.12",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.17.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
|
||||
@@ -23,19 +23,14 @@ import handleError from '../util/handle-error';
|
||||
import confirm from '../util/input/confirm';
|
||||
import { isSettingValue } from '../util/is-setting-value';
|
||||
import cmd from '../util/output/cmd';
|
||||
import code from '../util/output/code';
|
||||
import { getColorForPkgName } from '../util/output/color-name-cache';
|
||||
import logo from '../util/output/logo';
|
||||
import param from '../util/output/param';
|
||||
import stamp from '../util/output/stamp';
|
||||
import cliPkgJson from '../util/pkg';
|
||||
import { getCommandName, getPkgName } from '../util/pkg-name';
|
||||
import { loadCliPlugins } from '../util/plugins';
|
||||
import { findFramework } from '../util/projects/find-framework';
|
||||
import { VERCEL_DIR } from '../util/projects/link';
|
||||
import {
|
||||
ProjectLinkAndSettings,
|
||||
readProjectSettings,
|
||||
} from '../util/projects/project-settings';
|
||||
import { readProjectSettings } from '../util/projects/project-settings';
|
||||
import pull from './pull';
|
||||
|
||||
const sema = new Sema(16, {
|
||||
@@ -69,18 +64,24 @@ const help = () => {
|
||||
};
|
||||
|
||||
const OUTPUT_DIR = '.output';
|
||||
const VERCEL_PLUGIN_PREFIX = 'vercel-plugin-';
|
||||
|
||||
const fields: {
|
||||
name: string;
|
||||
value: keyof ProjectLinkAndSettings['settings'];
|
||||
}[] = [
|
||||
{ name: 'Build Command', value: 'buildCommand' },
|
||||
{ name: 'Output Directory', value: 'outputDirectory' },
|
||||
{ name: 'Root Directory', value: 'rootDirectory' },
|
||||
];
|
||||
|
||||
export default async function main(client: Client) {
|
||||
if (process.env.__VERCEL_BUILD_RUNNING) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} build`
|
||||
)} must not recursively invoke itself. Check the Build Command in the Project Settings or the ${cmd(
|
||||
'build'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
return 1;
|
||||
} else {
|
||||
process.env.__VERCEL_BUILD_RUNNING = '1';
|
||||
}
|
||||
|
||||
let argv;
|
||||
const buildStamp = stamp();
|
||||
try {
|
||||
@@ -120,6 +121,9 @@ export default async function main(client: Client) {
|
||||
project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
}
|
||||
|
||||
// If `rootDirectory` exists, then `baseDir` will be the repo's root directory.
|
||||
const baseDir = cwd;
|
||||
|
||||
cwd = project.settings.rootDirectory
|
||||
? join(cwd, project.settings.rootDirectory)
|
||||
: cwd;
|
||||
@@ -152,47 +156,57 @@ export default async function main(client: Client) {
|
||||
}
|
||||
|
||||
const buildState = { ...project.settings };
|
||||
|
||||
client.output.log(`Retrieved Project Settings:`);
|
||||
client.output.print(
|
||||
chalk.dim(` - ${chalk.bold(`Framework Preset:`)} ${framework.name}\n`)
|
||||
const formatSetting = (
|
||||
name: string,
|
||||
override: string | null | undefined,
|
||||
defaults: typeof framework.settings.outputDirectory
|
||||
) =>
|
||||
` - ${chalk.bold(`${name}:`)} ${`${
|
||||
override
|
||||
? override + ` (override)`
|
||||
: 'placeholder' in defaults
|
||||
? chalk.italic(`${defaults.placeholder}`)
|
||||
: defaults.value
|
||||
}`}`;
|
||||
console.log(`Retrieved Project Settings:`);
|
||||
console.log(
|
||||
chalk.dim(` - ${chalk.bold(`Framework Preset:`)} ${framework.name}`)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Build Command',
|
||||
project.settings.buildCommand,
|
||||
framework.settings.buildCommand
|
||||
)
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Output Directory',
|
||||
project.settings.outputDirectory,
|
||||
framework.settings.outputDirectory
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
for (let field of fields) {
|
||||
const defaults = (framework.settings as any)[field.value];
|
||||
if (defaults) {
|
||||
client.output.print(
|
||||
chalk.dim(
|
||||
` - ${chalk.bold(`${field.name}:`)} ${`${
|
||||
project.settings[field.value]
|
||||
? project.settings[field.value] + ` (override)`
|
||||
: isSettingValue(defaults)
|
||||
? defaults.value
|
||||
: chalk.italic(`${defaults.placeholder}`)
|
||||
}`}\n`
|
||||
)
|
||||
);
|
||||
}
|
||||
if (field.value != 'buildCommand') {
|
||||
(buildState as any)[field.value] = project.settings[field.value]
|
||||
? project.settings[field.value]
|
||||
: defaults
|
||||
? isSettingValue(defaults)
|
||||
? defaults.value
|
||||
: null
|
||||
: null;
|
||||
}
|
||||
}
|
||||
buildState.outputDirectory =
|
||||
project.settings.outputDirectory ||
|
||||
(isSettingValue(framework.settings.outputDirectory)
|
||||
? framework.settings.outputDirectory.value
|
||||
: null);
|
||||
buildState.rootDirectory = project.settings.rootDirectory;
|
||||
|
||||
if (loadedEnvFiles.length > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Loaded Environment Variables from ${loadedEnvFiles.length} ${pluralize(
|
||||
'file',
|
||||
loadedEnvFiles.length
|
||||
)}:`
|
||||
);
|
||||
for (let envFile of loadedEnvFiles) {
|
||||
client.output.print(chalk.dim(` - ${envFile.path}\n`));
|
||||
console.log(chalk.dim(` - ${envFile.path}`));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -200,7 +214,7 @@ export default async function main(client: Client) {
|
||||
const debug = argv['--debug'];
|
||||
let plugins;
|
||||
try {
|
||||
plugins = await loadCliPlugins(client, cwd);
|
||||
plugins = await loadCliPlugins(cwd, client.output);
|
||||
} catch (error) {
|
||||
client.output.error('Failed to load CLI Plugins');
|
||||
handleError(error, { debug });
|
||||
@@ -223,7 +237,7 @@ export default async function main(client: Client) {
|
||||
};
|
||||
|
||||
if (plugins?.pluginCount && plugins?.pluginCount > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Loaded ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
@@ -231,7 +245,7 @@ export default async function main(client: Client) {
|
||||
);
|
||||
// preBuild Plugins
|
||||
if (plugins.preBuildPlugins.length > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
@@ -268,38 +282,37 @@ export default async function main(client: Client) {
|
||||
|
||||
// Clean the output directory
|
||||
fs.removeSync(join(cwd, OUTPUT_DIR));
|
||||
let result: boolean;
|
||||
|
||||
// Yarn v2 PnP mode may be activated, so force
|
||||
// "node-modules" linker style
|
||||
const env = {
|
||||
YARN_NODE_LINKER: 'node-modules',
|
||||
...spawnOpts.env,
|
||||
};
|
||||
|
||||
if (typeof buildState.buildCommand === 'string') {
|
||||
client.output.log(`Running Build Command: ${cmd(buildState.buildCommand)}`);
|
||||
result = await execCommand(buildState.buildCommand, {
|
||||
console.log(`Running Build Command: ${cmd(buildState.buildCommand)}`);
|
||||
await execCommand(buildState.buildCommand, {
|
||||
...spawnOpts,
|
||||
// Yarn v2 PnP mode may be activated, so force
|
||||
// "node-modules" linker style
|
||||
env: {
|
||||
YARN_NODE_LINKER: 'node-modules',
|
||||
...spawnOpts.env,
|
||||
},
|
||||
cwd: cwd,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
} else if (fs.existsSync(join(cwd, 'package.json'))) {
|
||||
result = await runPackageJsonScript(
|
||||
await runPackageJsonScript(
|
||||
client,
|
||||
cwd,
|
||||
['vercel-build', 'now-build', 'build'],
|
||||
spawnOpts
|
||||
);
|
||||
} else {
|
||||
// no package.json exists and no build command present
|
||||
result = true;
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
client.output.error(
|
||||
`Missing required "${cmd(
|
||||
buildState.buildCommand || 'vercel-build' || 'build'
|
||||
)}" script in ${param(cwd)}"\n`
|
||||
} else if (typeof framework.settings.buildCommand.value === 'string') {
|
||||
console.log(
|
||||
`Running Build Command: ${cmd(framework.settings.buildCommand.value)}`
|
||||
);
|
||||
return 1;
|
||||
await execCommand(framework.settings.buildCommand.value, {
|
||||
...spawnOpts,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
}
|
||||
|
||||
if (!fs.existsSync(join(cwd, OUTPUT_DIR))) {
|
||||
@@ -318,6 +331,9 @@ export default async function main(client: Client) {
|
||||
ignore: [
|
||||
'node_modules/**',
|
||||
'.vercel/**',
|
||||
'.env',
|
||||
'.env.*',
|
||||
'.*ignore',
|
||||
'_middleware.ts',
|
||||
'_middleware.mts',
|
||||
'_middleware.cts',
|
||||
@@ -326,6 +342,7 @@ export default async function main(client: Client) {
|
||||
'_middleware.js',
|
||||
'api/**',
|
||||
'.git/**',
|
||||
'.next/cache/**',
|
||||
],
|
||||
nodir: true,
|
||||
dot: true,
|
||||
@@ -344,7 +361,7 @@ export default async function main(client: Client) {
|
||||
)
|
||||
);
|
||||
client.output.stopSpinner();
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Copied ${files.length.toLocaleString()} files from ${param(
|
||||
distDir
|
||||
)} to ${param(outputDir)} ${copyStamp()}`
|
||||
@@ -398,6 +415,36 @@ export default async function main(client: Client) {
|
||||
join(cwd, OUTPUT_DIR, 'static', '_next', 'static')
|
||||
);
|
||||
|
||||
// Next.js might reference files from the `static` directory in `middleware-manifest.json`.
|
||||
// Since we move all files from `static` to `static/_next/static`, we'll need to change
|
||||
// those references as well and update the manifest file.
|
||||
const middlewareManifest = join(
|
||||
cwd,
|
||||
OUTPUT_DIR,
|
||||
'server',
|
||||
'middleware-manifest.json'
|
||||
);
|
||||
if (fs.existsSync(middlewareManifest)) {
|
||||
const manifest = await fs.readJSON(middlewareManifest);
|
||||
Object.keys(manifest.middleware).forEach(key => {
|
||||
const files = manifest.middleware[key].files.map((f: string) => {
|
||||
if (f.startsWith('static/')) {
|
||||
const next = f.replace(/^static\//gm, 'static/_next/static/');
|
||||
client.output.debug(
|
||||
`Replacing file in \`middleware-manifest.json\`: ${f} => ${next}`
|
||||
);
|
||||
return next;
|
||||
}
|
||||
|
||||
return f;
|
||||
});
|
||||
|
||||
manifest.middleware[key].files = files;
|
||||
});
|
||||
|
||||
await fs.writeJSON(middlewareManifest, manifest);
|
||||
}
|
||||
|
||||
// We want to pick up directories for user-provided static files into `.`output/static`.
|
||||
// More specifically, the static directory contents would then be mounted to `output/static/static`,
|
||||
// and the public directory contents would be mounted to `output/static`. Old Next.js versions
|
||||
@@ -486,7 +533,7 @@ export default async function main(client: Client) {
|
||||
fileList.delete(relative(cwd, f));
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
cwd,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f.replace(ext, '.js.nft.json'),
|
||||
nft: {
|
||||
@@ -502,7 +549,7 @@ export default async function main(client: Client) {
|
||||
const json = await fs.readJson(f);
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
cwd,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f,
|
||||
nft: json,
|
||||
@@ -520,17 +567,22 @@ export default async function main(client: Client) {
|
||||
await fs.writeJSON(requiredServerFilesPath, {
|
||||
...requiredServerFilesJson,
|
||||
appDir: '.',
|
||||
files: requiredServerFilesJson.files.map((i: string) => ({
|
||||
input: i.replace('.next', '.output'),
|
||||
output: i,
|
||||
})),
|
||||
files: requiredServerFilesJson.files.map((i: string) => {
|
||||
const absolutePath = join(cwd, i.replace('.next', '.output'));
|
||||
const output = relative(baseDir, absolutePath);
|
||||
|
||||
return {
|
||||
input: i.replace('.next', '.output'),
|
||||
output,
|
||||
};
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Build Plugins
|
||||
if (plugins?.buildPlugins && plugins.buildPlugins.length > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
@@ -547,7 +599,9 @@ export default async function main(client: Client) {
|
||||
console.log = (...args: any[]) => prefixedLog(prefix, args, origLog);
|
||||
console.error = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origErr);
|
||||
await plugin.build();
|
||||
await plugin.build({
|
||||
workPath: cwd,
|
||||
});
|
||||
client.output.debug(
|
||||
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
|
||||
);
|
||||
@@ -563,13 +617,13 @@ export default async function main(client: Client) {
|
||||
}
|
||||
}
|
||||
|
||||
client.output.print(
|
||||
console.log(
|
||||
`${prependEmoji(
|
||||
`Build Completed in ${chalk.bold(OUTPUT_DIR)} ${chalk.gray(
|
||||
buildStamp()
|
||||
)}`,
|
||||
emoji('success')
|
||||
)}\n`
|
||||
)}`
|
||||
);
|
||||
|
||||
return 0;
|
||||
@@ -615,71 +669,37 @@ export async function runPackageJsonScript(
|
||||
}
|
||||
}
|
||||
|
||||
client.output.log(`Running Build Command: ${cmd(opts.prettyCommand)}\n`);
|
||||
console.log(`Running Build Command: ${cmd(opts.prettyCommand)}\n`);
|
||||
await spawnAsync(cliType, ['run', scriptName], opts);
|
||||
client.output.print('\n'); // give it some room
|
||||
console.log(); // give it some room
|
||||
client.output.debug(`Script complete [${Date.now() - runScriptTime}ms]`);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function loadCliPlugins(client: Client, cwd: string) {
|
||||
const { packageJson } = await scanParentDirs(cwd, true);
|
||||
|
||||
let pluginCount = 0;
|
||||
const preBuildPlugins = [];
|
||||
const buildPlugins = [];
|
||||
const deps = new Set(
|
||||
[
|
||||
...Object.keys(packageJson?.dependencies || {}),
|
||||
...Object.keys(packageJson?.devDependencies || {}),
|
||||
...Object.keys(cliPkgJson.dependencies),
|
||||
].filter(dep => dep.startsWith(VERCEL_PLUGIN_PREFIX))
|
||||
);
|
||||
|
||||
for (let dep of deps) {
|
||||
pluginCount++;
|
||||
const resolved = require.resolve(dep, {
|
||||
paths: [cwd, process.cwd(), __dirname],
|
||||
});
|
||||
let plugin;
|
||||
try {
|
||||
plugin = require(resolved);
|
||||
const color = getColorForPkgName(dep);
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
preBuildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.build === 'function') {
|
||||
buildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
client.output.error(`Failed to import ${code(dep)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return { pluginCount, preBuildPlugins, buildPlugins };
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
if (
|
||||
newPath.endsWith('.nft.json') ||
|
||||
newPath.endsWith('middleware-manifest.json') ||
|
||||
newPath.endsWith('required-server-files.json')
|
||||
) {
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} else {
|
||||
await fs.createSymlink(existingPath, newPath, 'file');
|
||||
}
|
||||
} catch (err: any) {
|
||||
// eslint-disable-line
|
||||
// If a hard link to the same file already exists
|
||||
// If a symlink to the same file already exists
|
||||
// then trying to copy it will make an empty file from it.
|
||||
if (err['code'] === 'EEXIST') return;
|
||||
// In some VERY rare cases (1 in a thousand), hard-link creation fails on Windows.
|
||||
// In some VERY rare cases (1 in a thousand), symlink creation fails on Windows.
|
||||
// In that case, we just fall back to copying.
|
||||
// This issue is reproducible with "pnpm add @material-ui/icons@4.9.1"
|
||||
await fs.copyFile(existingPath, newPath);
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -723,13 +743,13 @@ interface NftFile {
|
||||
// properly with `vc --prebuilt`.
|
||||
async function resolveNftToOutput({
|
||||
client,
|
||||
cwd,
|
||||
baseDir,
|
||||
outputDir,
|
||||
nftFileName,
|
||||
nft,
|
||||
}: {
|
||||
client: Client;
|
||||
cwd: string;
|
||||
baseDir: string;
|
||||
outputDir: string;
|
||||
nftFileName: string;
|
||||
nft: NftFile;
|
||||
@@ -749,9 +769,15 @@ async function resolveNftToOutput({
|
||||
const newFilePath = join(outputDir, 'inputs', hash(raw) + ext);
|
||||
smartCopy(client, fullInput, newFilePath);
|
||||
|
||||
// We have to use `baseDir` instead of `cwd`, because we want to
|
||||
// mount everything from there (especially `node_modules`).
|
||||
// This is important for NPM Workspaces where `node_modules` is not
|
||||
// in the directory of the workspace.
|
||||
const output = relative(baseDir, fullInput).replace('.output', '.next');
|
||||
|
||||
newFilesList.push({
|
||||
input: relative(parse(nftFileName).dir, newFilePath),
|
||||
output: relative(cwd, fullInput).replace('.output', '.next'),
|
||||
output,
|
||||
});
|
||||
} else {
|
||||
newFilesList.push(relativeInput);
|
||||
|
||||
@@ -6,7 +6,6 @@ import { ProjectEnvVariable } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import { getLinkedProject } from '../../util/projects/link';
|
||||
import { getFrameworks } from '../../util/get-frameworks';
|
||||
import { isSettingValue } from '../../util/is-setting-value';
|
||||
import { ProjectSettings } from '../../types';
|
||||
import getDecryptedEnvRecords from '../../util/get-decrypted-env-records';
|
||||
import setupAndLink from '../../util/link/setup-and-link';
|
||||
@@ -71,9 +70,9 @@ export default async function dev(
|
||||
frameworkSlug = framework.slug;
|
||||
}
|
||||
|
||||
const defaults = framework.settings.devCommand;
|
||||
if (isSettingValue(defaults)) {
|
||||
devCommand = defaults.value;
|
||||
const defaults = framework.settings.devCommand.value;
|
||||
if (defaults) {
|
||||
devCommand = defaults;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,6 +48,22 @@ const help = () => {
|
||||
};
|
||||
|
||||
export default async function main(client: Client) {
|
||||
if (process.env.__VERCEL_DEV_RUNNING) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} dev`
|
||||
)} must not recursively invoke itself. Check the Development Command in the Project Settings or the ${cmd(
|
||||
'dev'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
return 1;
|
||||
} else {
|
||||
process.env.__VERCEL_DEV_RUNNING = '1';
|
||||
}
|
||||
|
||||
let argv;
|
||||
let args;
|
||||
const { output } = client;
|
||||
@@ -90,22 +106,21 @@ export default async function main(client: Client) {
|
||||
if (pkg) {
|
||||
const { scripts } = pkg as PackageJson;
|
||||
|
||||
if (scripts && scripts.dev && /\bnow\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
`The ${cmd('dev')} script in ${cmd(
|
||||
'package.json'
|
||||
)} must not contain ${cmd('now dev')}`
|
||||
if (
|
||||
scripts &&
|
||||
scripts.dev &&
|
||||
/\b(now|vercel)\b\W+\bdev\b/.test(scripts.dev)
|
||||
) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} dev`
|
||||
)} must not recursively invoke itself. Check the Development Command in the Project Settings or the ${cmd(
|
||||
'dev'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
output.error(`Learn More: http://err.sh/vercel/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
if (scripts && scripts.dev && /\bvercel\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
`The ${cmd('dev')} script in ${cmd(
|
||||
'package.json'
|
||||
)} must not contain ${cmd('vercel dev')}`
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
output.error(`Learn More: http://err.sh/vercel/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +87,8 @@ export default async function main(client: Client) {
|
||||
|
||||
const { project, org } = link;
|
||||
|
||||
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
|
||||
|
||||
const result = await pull(
|
||||
client,
|
||||
project,
|
||||
|
||||
@@ -160,24 +160,26 @@ const main = async () => {
|
||||
// * a path to deploy (as in: `vercel path/`)
|
||||
// * a subcommand (as in: `vercel ls`)
|
||||
const targetOrSubcommand = argv._[2];
|
||||
const isBuildOrDev =
|
||||
targetOrSubcommand === 'build' || targetOrSubcommand === 'dev';
|
||||
|
||||
output.print(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${pkg.version}${
|
||||
targetOrSubcommand === 'dev'
|
||||
? ' dev (beta)'
|
||||
: targetOrSubcommand === 'build'
|
||||
? ' build (beta)'
|
||||
: ''
|
||||
}${
|
||||
isCanary ||
|
||||
targetOrSubcommand === 'dev' ||
|
||||
targetOrSubcommand === 'build'
|
||||
? ' — https://vercel.com/feedback'
|
||||
: ''
|
||||
}`
|
||||
)}\n`
|
||||
);
|
||||
if (isBuildOrDev) {
|
||||
console.log(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${
|
||||
pkg.version
|
||||
} ${targetOrSubcommand} (beta) — https://vercel.com/feedback`
|
||||
)}`
|
||||
);
|
||||
} else {
|
||||
output.print(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${pkg.version}${
|
||||
isCanary ? ' — https://vercel.com/feedback' : ''
|
||||
}`
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// Handle `--version` directly
|
||||
if (!targetOrSubcommand && argv['--version']) {
|
||||
|
||||
@@ -18,12 +18,8 @@ export const isDirectory = (path: string): boolean => {
|
||||
const getGlobalPathConfig = (): string => {
|
||||
let customPath: string | undefined;
|
||||
|
||||
try {
|
||||
const argv = getArgs(process.argv.slice(2), {});
|
||||
customPath = argv['--global-config'];
|
||||
} catch (_error) {
|
||||
// args are optional so consume error
|
||||
}
|
||||
const argv = getArgs(process.argv.slice(2), {}, { permissive: true });
|
||||
customPath = argv['--global-config'];
|
||||
|
||||
const vercelDirectories = XDGAppPaths('com.vercel.cli').dataDirs();
|
||||
|
||||
|
||||
@@ -7,12 +7,8 @@ import getArgs from '../../util/get-args';
|
||||
export default function getLocalPathConfig(prefix: string) {
|
||||
let customPath: string | undefined;
|
||||
|
||||
try {
|
||||
const argv = getArgs(process.argv.slice(2), {});
|
||||
customPath = argv['--local-config'];
|
||||
} catch (_error) {
|
||||
// args are optional so consume error
|
||||
}
|
||||
const argv = getArgs(process.argv.slice(2), {}, { permissive: true });
|
||||
customPath = argv['--local-config'];
|
||||
|
||||
// If `--local-config` flag was specified, then that takes priority
|
||||
if (customPath) {
|
||||
|
||||
@@ -89,6 +89,7 @@ import {
|
||||
} from './types';
|
||||
import { ProjectEnvVariable, ProjectSettings } from '../../types';
|
||||
import exposeSystemEnvs from './expose-system-envs';
|
||||
import { loadCliPlugins } from '../plugins';
|
||||
|
||||
const frontendRuntimeSet = new Set(
|
||||
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
|
||||
@@ -1349,6 +1350,30 @@ export default class DevServer {
|
||||
return false;
|
||||
};
|
||||
|
||||
runDevMiddleware = async (
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse
|
||||
) => {
|
||||
const { devMiddlewarePlugins } = await loadCliPlugins(
|
||||
this.cwd,
|
||||
this.output
|
||||
);
|
||||
try {
|
||||
for (let plugin of devMiddlewarePlugins) {
|
||||
const result = await plugin.plugin.runDevMiddleware(req, res, this.cwd);
|
||||
if (result.finished) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return { finished: false };
|
||||
} catch (e) {
|
||||
return {
|
||||
finished: true,
|
||||
error: e,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serve project directory as a v2 deployment.
|
||||
*/
|
||||
@@ -1416,6 +1441,36 @@ export default class DevServer {
|
||||
let prevUrl = req.url;
|
||||
let prevHeaders: HttpHeadersConfig = {};
|
||||
|
||||
const middlewareResult = await this.runDevMiddleware(req, res);
|
||||
|
||||
if (middlewareResult) {
|
||||
if (middlewareResult.error) {
|
||||
this.sendError(
|
||||
req,
|
||||
res,
|
||||
requestId,
|
||||
'EDGE_FUNCTION_INVOCATION_FAILED',
|
||||
500
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (middlewareResult.finished) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (middlewareResult.pathname) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
origUrl.pathname = middlewareResult.pathname;
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
if (middlewareResult.query && prevUrl) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
delete origUrl.search;
|
||||
Object.assign(origUrl.query, middlewareResult.query);
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
}
|
||||
|
||||
for (const phase of phases) {
|
||||
statusCode = undefined;
|
||||
|
||||
@@ -2106,7 +2161,10 @@ export default class DevServer {
|
||||
process.stdout.write(data.replace(proxyPort, devPort));
|
||||
});
|
||||
|
||||
p.on('exit', () => {
|
||||
p.on('exit', (code: number) => {
|
||||
if (code > 0) {
|
||||
process.exit(code);
|
||||
}
|
||||
this.devProcessPort = undefined;
|
||||
});
|
||||
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { Stats } from 'fs';
|
||||
import { sep, dirname, join, resolve } from 'path';
|
||||
import { readJSON, lstat, readlink, readFile, realpath } from 'fs-extra';
|
||||
import { lstat, readlink, readFile, realpath } from 'fs-extra';
|
||||
import { isCanary } from './is-canary';
|
||||
import { getPkgName } from './pkg-name';
|
||||
|
||||
// `npm` tacks a bunch of extra properties on the `package.json` file,
|
||||
// so check for one of them to determine yarn vs. npm.
|
||||
async function isYarn(): Promise<boolean> {
|
||||
let s: Stats;
|
||||
let binPath = process.argv[1];
|
||||
@@ -20,8 +18,12 @@ async function isYarn(): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
const pkgPath = join(dirname(binPath), '..', 'package.json');
|
||||
const pkg = await readJSON(pkgPath).catch(() => ({}));
|
||||
return !('_id' in pkg);
|
||||
/*
|
||||
* Generally, pkgPath looks like:
|
||||
* "/Users/username/.config/yarn/global/node_modules/vercel/package.json"
|
||||
* "/usr/local/share/.config/yarn/global/node_modules/vercel/package.json"
|
||||
*/
|
||||
return pkgPath.includes(join('yarn', 'global'));
|
||||
}
|
||||
|
||||
async function getConfigPrefix() {
|
||||
|
||||
@@ -125,10 +125,14 @@ export class Output {
|
||||
this.debug(`Spinner invoked (${message}) with a ${delay}ms delay`);
|
||||
return;
|
||||
}
|
||||
if (this._spinner) {
|
||||
this._spinner.text = message;
|
||||
if (this.isTTY) {
|
||||
if (this._spinner) {
|
||||
this._spinner.text = message;
|
||||
} else {
|
||||
this._spinner = wait(message, delay);
|
||||
}
|
||||
} else {
|
||||
this._spinner = wait(message, delay);
|
||||
this.print(`${message}\n`);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
import { relative as nativeRelative } from 'path';
|
||||
|
||||
const isWin = process.platform === 'win32';
|
||||
import { normalizePath } from '@vercel/build-utils';
|
||||
|
||||
export function relative(a: string, b: string): string {
|
||||
let p = nativeRelative(a, b);
|
||||
if (isWin) {
|
||||
p = p.replace(/\\/g, '/');
|
||||
}
|
||||
return p;
|
||||
return normalizePath(nativeRelative(a, b));
|
||||
}
|
||||
|
||||
76
packages/cli/src/util/plugins.ts
Normal file
76
packages/cli/src/util/plugins.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import code from '../util/output/code';
|
||||
import { getColorForPkgName } from '../util/output/color-name-cache';
|
||||
import cliPkgJson from '../util/pkg';
|
||||
import { scanParentDirs } from '@vercel/build-utils';
|
||||
import { Output } from './output';
|
||||
|
||||
const VERCEL_PLUGIN_PREFIX = 'vercel-plugin-';
|
||||
|
||||
export async function loadCliPlugins(cwd: string, output: Output) {
|
||||
const { packageJson } = await scanParentDirs(cwd, true);
|
||||
|
||||
let pluginCount = 0;
|
||||
const preBuildPlugins = [];
|
||||
const buildPlugins = [];
|
||||
const devServerPlugins = [];
|
||||
const devMiddlewarePlugins = [];
|
||||
const deps = new Set(
|
||||
[
|
||||
...Object.keys(packageJson?.dependencies || {}),
|
||||
...Object.keys(packageJson?.devDependencies || {}),
|
||||
...Object.keys(cliPkgJson.dependencies),
|
||||
].filter(dep => dep.startsWith(VERCEL_PLUGIN_PREFIX))
|
||||
);
|
||||
|
||||
for (let dep of deps) {
|
||||
pluginCount++;
|
||||
const resolved = require.resolve(dep, {
|
||||
paths: [cwd, process.cwd(), __dirname],
|
||||
});
|
||||
let plugin;
|
||||
try {
|
||||
plugin = require(resolved);
|
||||
|
||||
const color = getColorForPkgName(dep);
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
preBuildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.build === 'function') {
|
||||
buildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.startDevServer === 'function') {
|
||||
devServerPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.runDevMiddleware === 'function') {
|
||||
devMiddlewarePlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
output.error(`Failed to import ${code(dep)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
pluginCount,
|
||||
preBuildPlugins,
|
||||
buildPlugins,
|
||||
devServerPlugins,
|
||||
devMiddlewarePlugins,
|
||||
};
|
||||
}
|
||||
@@ -244,20 +244,27 @@ export async function linkFolderToProject(
|
||||
try {
|
||||
const gitIgnorePath = join(path, '.gitignore');
|
||||
|
||||
const gitIgnore = await readFile(gitIgnorePath, 'utf8').catch(() => null);
|
||||
const EOL = gitIgnore && gitIgnore.includes('\r\n') ? '\r\n' : os.EOL;
|
||||
let gitIgnore =
|
||||
(await readFile(gitIgnorePath, 'utf8').catch(() => null)) ?? '';
|
||||
const EOL = gitIgnore.includes('\r\n') ? '\r\n' : os.EOL;
|
||||
let contentModified = false;
|
||||
|
||||
if (
|
||||
!gitIgnore ||
|
||||
!gitIgnore.split(EOL).includes(VERCEL_DIR) ||
|
||||
!gitIgnore.split(EOL).includes(VERCEL_OUTPUT_DIR)
|
||||
) {
|
||||
await writeFile(
|
||||
gitIgnorePath,
|
||||
gitIgnore
|
||||
? `${gitIgnore}${EOL}${VERCEL_DIR}${EOL}${VERCEL_OUTPUT_DIR}${EOL}`
|
||||
: `${VERCEL_DIR}${EOL}${VERCEL_OUTPUT_DIR}${EOL}`
|
||||
);
|
||||
if (!gitIgnore.split(EOL).includes(VERCEL_DIR)) {
|
||||
gitIgnore += `${
|
||||
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
|
||||
}${VERCEL_DIR}${EOL}`;
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (!gitIgnore.split(EOL).includes(VERCEL_OUTPUT_DIR)) {
|
||||
gitIgnore += `${
|
||||
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
|
||||
}${VERCEL_OUTPUT_DIR}${EOL}`;
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (contentModified) {
|
||||
await writeFile(gitIgnorePath, gitIgnore);
|
||||
isGitIgnoreUpdated = true;
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware-error/_middleware.js
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware-error/_middleware.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export default () => {
|
||||
throw new Error('asdf');
|
||||
};
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware-invalid-response/_middleware.js
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware-invalid-response/_middleware.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export default function () {
|
||||
return 'freecandy';
|
||||
}
|
||||
5
packages/cli/test/fixtures/unit/edge-middleware-ts/_middleware.ts
vendored
Normal file
5
packages/cli/test/fixtures/unit/edge-middleware-ts/_middleware.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { response } from './response';
|
||||
|
||||
export default () => {
|
||||
return new Response(response);
|
||||
};
|
||||
1
packages/cli/test/fixtures/unit/edge-middleware-ts/response.ts
vendored
Normal file
1
packages/cli/test/fixtures/unit/edge-middleware-ts/response.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export const response = 'response';
|
||||
10
packages/cli/test/fixtures/unit/edge-middleware/_middleware.js
vendored
Normal file
10
packages/cli/test/fixtures/unit/edge-middleware/_middleware.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import response from './response.json';
|
||||
|
||||
export default function () {
|
||||
return new Response(JSON.stringify(response), {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/edge-middleware/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/edge-middleware/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
not hello world
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware/response.json
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware/response.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"text": "hello world"
|
||||
}
|
||||
@@ -19,7 +19,7 @@ const getRevertAliasConfigFile = () => {
|
||||
],
|
||||
});
|
||||
};
|
||||
module.exports = async function prepare(session) {
|
||||
module.exports = async function prepare(session, binaryPath) {
|
||||
const spec = {
|
||||
'static-single-file': {
|
||||
'first.png': getImageFile(session, { size: 30 }),
|
||||
@@ -114,6 +114,23 @@ module.exports = async function prepare(session) {
|
||||
2
|
||||
),
|
||||
},
|
||||
'dev-fail-on-recursion-command': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'build-fail-on-recursion-command': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'build-fail-on-recursion-script': {
|
||||
'package.json': JSON.stringify(
|
||||
{
|
||||
scripts: {
|
||||
build: `${binaryPath} build`,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2
|
||||
),
|
||||
},
|
||||
'static-deployment': {
|
||||
'index.txt': 'Hello World',
|
||||
},
|
||||
@@ -348,6 +365,10 @@ module.exports = async function prepare(session) {
|
||||
'project-link-dev': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'project-link-gitignore': {
|
||||
'package.json': '{}',
|
||||
'.gitignore': '.output',
|
||||
},
|
||||
'project-link-legacy': {
|
||||
'index.html': 'Hello',
|
||||
'vercel.json': '{"builds":[{"src":"*.html","use":"@vercel/static"}]}',
|
||||
|
||||
396
packages/cli/test/integration.js
vendored
396
packages/cli/test/integration.js
vendored
@@ -252,10 +252,69 @@ const createUser = async () => {
|
||||
|
||||
const getConfigAuthPath = () => path.join(globalDir, 'auth.json');
|
||||
|
||||
async function setupProject(process, projectName, overrides) {
|
||||
await waitForPrompt(process, chunk => /Set up [^?]+\?/.test(chunk));
|
||||
process.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(process, chunk => /Which scope [^?]+\?/.test(chunk));
|
||||
process.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
process.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
process.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
process.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
|
||||
if (overrides) {
|
||||
process.stdin.write('yes\n');
|
||||
|
||||
const { buildCommand, outputDirectory, devCommand } = overrides;
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
process.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
process.stdin.write(`${buildCommand ?? ''}\n`);
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
process.stdin.write(`${outputDirectory ?? ''}\n`);
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
process.stdin.write(`${devCommand ?? ''}\n`);
|
||||
} else {
|
||||
process.stdin.write('no\n');
|
||||
}
|
||||
|
||||
await waitForPrompt(process, chunk => chunk.includes('Linked to'));
|
||||
}
|
||||
|
||||
test.before(async () => {
|
||||
try {
|
||||
await createUser();
|
||||
await prepareFixtures(contextName);
|
||||
await prepareFixtures(contextName, binaryPath);
|
||||
} catch (err) {
|
||||
console.log('Failed `test.before`');
|
||||
console.log(err);
|
||||
@@ -2227,13 +2286,93 @@ test('whoami', async t => {
|
||||
t.is(stdout, contextName, formatOutput({ stdout, stderr }));
|
||||
});
|
||||
|
||||
test('fail `now dev` dev script without now.json', async t => {
|
||||
test('[vercel dev] fails when dev script calls vercel dev recursively', async t => {
|
||||
const deploymentPath = fixture('now-dev-fail-dev-script');
|
||||
const { exitCode, stderr } = await execute(['dev', deploymentPath]);
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not contain `now dev`'),
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel dev] fails when development commad calls vercel dev recursively', async t => {
|
||||
const dir = fixture('dev-fail-on-recursion-command');
|
||||
const projectName = `dev-fail-on-recursion-command-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const dev = execa(binaryPath, ['dev', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await setupProject(dev, projectName, {
|
||||
devCommand: `${binaryPath} dev`,
|
||||
});
|
||||
|
||||
const { exitCode, stderr } = await dev;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel build] fails when build commad calls vercel build recursively', async t => {
|
||||
const dir = fixture('build-fail-on-recursion-command');
|
||||
const projectName = `build-fail-on-recursion-command-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const build = execa(binaryPath, ['build', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await waitForPrompt(build, chunk =>
|
||||
chunk.includes('No Project Settings found locally')
|
||||
);
|
||||
build.stdin.write('yes\n');
|
||||
|
||||
await setupProject(build, projectName, {
|
||||
buildCommand: `${binaryPath} build`,
|
||||
});
|
||||
|
||||
const { exitCode, stderr } = await build;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel build] fails when build script calls vercel build recursively', async t => {
|
||||
const dir = fixture('build-fail-on-recursion-script');
|
||||
const projectName = `build-fail-on-recursion-script-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const build = execa(binaryPath, ['build', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await waitForPrompt(build, chunk =>
|
||||
chunk.includes('No Project Settings found locally')
|
||||
);
|
||||
build.stdin.write('yes\n');
|
||||
|
||||
await setupProject(build, projectName);
|
||||
|
||||
const { exitCode, stderr } = await build;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
@@ -2544,7 +2683,7 @@ test('deploy a Lambda with 3 seconds of maxDuration', async t => {
|
||||
const url = new URL(output.stdout);
|
||||
|
||||
// Should time out
|
||||
url.pathname = '/api/wait-for/4';
|
||||
url.pathname = '/api/wait-for/5';
|
||||
const response1 = await fetch(url.href);
|
||||
t.is(
|
||||
response1.status,
|
||||
@@ -2553,7 +2692,7 @@ test('deploy a Lambda with 3 seconds of maxDuration', async t => {
|
||||
);
|
||||
|
||||
// Should not time out
|
||||
url.pathname = '/api/wait-for/2';
|
||||
url.pathname = '/api/wait-for/1';
|
||||
const response2 = await fetch(url.href);
|
||||
t.is(
|
||||
response2.status,
|
||||
@@ -2683,59 +2822,10 @@ test('should show prompts to set up project during first deploy', async t => {
|
||||
|
||||
const now = execa(binaryPath, [dir, ...defaultArgs]);
|
||||
|
||||
await waitForPrompt(now, chunk => /Set up and deploy [^?]+\?/.test(chunk));
|
||||
now.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('Which scope do you want to deploy to?')
|
||||
);
|
||||
now.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
now.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
now.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
now.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
now.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
now.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
now.stdin.write(
|
||||
`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`
|
||||
);
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
now.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
now.stdin.write(`\n`);
|
||||
|
||||
await waitForPrompt(now, chunk => chunk.includes('Linked to'));
|
||||
await setupProject(now, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
const output = await now;
|
||||
|
||||
@@ -3301,55 +3391,10 @@ test('[vc link] should show prompts to set up project', async t => {
|
||||
|
||||
const vc = execa(binaryPath, ['link', ...defaultArgs], { cwd: dir });
|
||||
|
||||
await waitForPrompt(vc, chunk => /Set up [^?]+\?/.test(chunk));
|
||||
vc.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('Which scope should contain your project?')
|
||||
);
|
||||
vc.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(vc, chunk => chunk.includes('Link to existing project?'));
|
||||
vc.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
vc.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
vc.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
vc.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
vc.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
vc.stdin.write(`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
vc.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
vc.stdin.write(`\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk => chunk.includes('Linked to'));
|
||||
await setupProject(vc, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
const output = await vc;
|
||||
|
||||
@@ -3408,6 +3453,29 @@ test('[vc link --confirm] should not show prompts and autolink', async t => {
|
||||
);
|
||||
});
|
||||
|
||||
test('[vc link] should not duplicate paths in .gitignore', async t => {
|
||||
const dir = fixture('project-link-gitignore');
|
||||
|
||||
// remove previously linked project if it exists
|
||||
await remove(path.join(dir, '.vercel'));
|
||||
|
||||
const { exitCode, stderr, stdout } = await execa(
|
||||
binaryPath,
|
||||
['link', '--confirm', ...defaultArgs],
|
||||
{ cwd: dir, reject: false }
|
||||
);
|
||||
|
||||
// Ensure the exit code is right
|
||||
t.is(exitCode, 0, formatOutput({ stderr, stdout }));
|
||||
|
||||
// Ensure the message is correct pattern
|
||||
t.regex(stderr, /Linked to /m);
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.output\n.vercel\n');
|
||||
});
|
||||
|
||||
test('[vc dev] should show prompts to set up project', async t => {
|
||||
const dir = fixture('project-link-dev');
|
||||
const port = 58352;
|
||||
@@ -3422,59 +3490,10 @@ test('[vc dev] should show prompts to set up project', async t => {
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
await waitForPrompt(dev, chunk => /Set up and develop [^?]+\?/.test(chunk));
|
||||
dev.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Which scope should contain your project?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
dev.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
dev.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
dev.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
dev.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
dev.stdin.write(
|
||||
`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`
|
||||
);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
dev.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
dev.stdin.write(`\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk => chunk.includes('Linked to'));
|
||||
await setupProject(dev, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
@@ -3574,59 +3593,12 @@ test('[vc dev] should send the platform proxy request headers to frontend dev se
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
await waitForPrompt(dev, chunk => /Set up and develop [^?]+\?/.test(chunk));
|
||||
dev.stdin.write('yes\n');
|
||||
await setupProject(dev, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
devCommand: 'node server.js',
|
||||
});
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Which scope should contain your project?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
dev.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
dev.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
dev.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
dev.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
dev.stdin.write(
|
||||
`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`
|
||||
);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
dev.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
dev.stdin.write(`node server.js\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk => chunk.includes('Linked to'));
|
||||
await waitForPrompt(dev, chunk => chunk.includes('Ready! Available at'));
|
||||
|
||||
// Ensure that `vc dev` also works
|
||||
|
||||
@@ -335,4 +335,54 @@ describe('DevServer', () => {
|
||||
expect(body).toEqual('The page could not be found.\n\nNOT_FOUND\n');
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should support edge middleware',
|
||||
testFixture('edge-middleware', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.json();
|
||||
expect(body).toEqual(
|
||||
JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.join(
|
||||
__dirname,
|
||||
'../../fixtures/unit/edge-middleware/response.json'
|
||||
),
|
||||
'utf8'
|
||||
)
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should work with middleware written in typescript',
|
||||
testFixture('edge-middleware-ts', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.text();
|
||||
expect(body).toStrictEqual('response');
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should render an error page when the middleware throws',
|
||||
testFixture('edge-middleware-error', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.text();
|
||||
expect(body).toStrictEqual(
|
||||
'A server error has occurred\n\nEDGE_FUNCTION_INVOCATION_FAILED\n'
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should render an error page when the middleware returns not a Response',
|
||||
testFixture('edge-middleware-invalid-response', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.text();
|
||||
expect(body).toStrictEqual(
|
||||
'A server error has occurred\n\nEDGE_FUNCTION_INVOCATION_FAILED\n'
|
||||
);
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ describe('getUpdateCommand', () => {
|
||||
it('should detect update command', async () => {
|
||||
const updateCommand = await getUpdateCommand();
|
||||
expect(updateCommand).toEqual(
|
||||
`yarn add vercel@${isCanary() ? 'canary' : 'latest'}`
|
||||
`npm i vercel@${isCanary() ? 'canary' : 'latest'}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "10.2.3-canary.15",
|
||||
"version": "10.2.3-canary.20",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -40,7 +40,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.14",
|
||||
"@vercel/build-utils": "2.12.3-canary.19",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "0.5.1-canary.10",
|
||||
"version": "0.5.1-canary.12",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "1.11.4-canary.5",
|
||||
"@vercel/routing-utils": "1.11.4-canary.6",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `blitz build`',
|
||||
value: 'blitz build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'blitz start',
|
||||
@@ -51,8 +52,6 @@ export const frameworks = [
|
||||
placeholder: 'Next.js default',
|
||||
},
|
||||
},
|
||||
devCommand: 'blitz start',
|
||||
buildCommand: 'blitz build',
|
||||
getFsOutputDir: async () => '.next',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -83,6 +82,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `next build`',
|
||||
value: 'next build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'next dev --port $PORT',
|
||||
@@ -98,8 +98,6 @@ export const frameworks = [
|
||||
dependencies: ['next-plugin-sentry', 'next-sentry-source-maps'],
|
||||
},
|
||||
],
|
||||
devCommand: 'next dev --port $PORT',
|
||||
buildCommand: 'next build',
|
||||
getFsOutputDir: async () => '.next',
|
||||
getOutputDirName: async () => 'public',
|
||||
cachePattern: '.next/cache/**',
|
||||
@@ -131,6 +129,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `gatsby build`',
|
||||
value: 'gatsby build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'gatsby develop --port $PORT',
|
||||
@@ -141,8 +140,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'gatsby',
|
||||
devCommand: 'gatsby develop --port $PORT',
|
||||
buildCommand: 'gatsby build',
|
||||
getOutputDirName: async () => 'public',
|
||||
getFsOutputDir: async () => 'public',
|
||||
defaultRoutes: async (dirPrefix: string) => {
|
||||
@@ -219,6 +216,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `hexo generate`',
|
||||
value: 'hexo generate',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'hexo server --port $PORT',
|
||||
@@ -229,8 +227,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'hexo',
|
||||
devCommand: 'hexo server --port $PORT',
|
||||
buildCommand: 'hexo generate',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -259,6 +255,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `npx @11ty/eleventy`',
|
||||
value: 'npx @11ty/eleventy',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'npx @11ty/eleventy --serve --watch --port $PORT',
|
||||
@@ -269,8 +266,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@11ty/eleventy',
|
||||
devCommand: 'npx @11ty/eleventy --serve --watch --port $PORT',
|
||||
buildCommand: 'npx @11ty/eleventy',
|
||||
getFsOutputDir: async () => '_site',
|
||||
getOutputDirName: async () => '_site',
|
||||
cachePattern: '.cache/**',
|
||||
@@ -300,6 +295,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `docusaurus build`',
|
||||
value: 'docusaurus build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'docusaurus start --port $PORT',
|
||||
@@ -310,8 +306,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@docusaurus/core',
|
||||
devCommand: 'docusaurus start --port $PORT',
|
||||
buildCommand: 'docusaurus build',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -456,6 +450,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `docusaurus-build`',
|
||||
value: 'docusaurus-build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'docusaurus-start --port $PORT',
|
||||
@@ -466,8 +461,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'docusaurus',
|
||||
devCommand: 'docusaurus-start --port $PORT',
|
||||
buildCommand: 'docusaurus-build',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -523,6 +516,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `preact build`',
|
||||
value: 'preact build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'preact watch --port $PORT',
|
||||
@@ -533,8 +527,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'preact-cli',
|
||||
devCommand: 'preact watch --port $PORT',
|
||||
buildCommand: 'preact build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
@@ -581,6 +573,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `dojo build`',
|
||||
value: 'dojo build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'dojo build -m dev -w -s -p $PORT',
|
||||
@@ -591,8 +584,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@dojo/cli',
|
||||
devCommand: 'dojo build -m dev -w -s -p $PORT',
|
||||
buildCommand: 'dojo build',
|
||||
getFsOutputDir: async () => 'output/dist',
|
||||
getOutputDirName: async () => join('output', 'dist'),
|
||||
defaultRoutes: [
|
||||
@@ -649,6 +640,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ember build`',
|
||||
value: 'ember build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ember serve --port $PORT',
|
||||
@@ -659,8 +651,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'ember-cli',
|
||||
devCommand: 'ember serve --port $PORT',
|
||||
buildCommand: 'ember build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -705,6 +695,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `vue-cli-service build`',
|
||||
value: 'vue-cli-service build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'vue-cli-service serve --port $PORT',
|
||||
@@ -715,8 +706,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@vue/cli-service',
|
||||
devCommand: 'vue-cli-service serve --port $PORT',
|
||||
buildCommand: 'vue-cli-service build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -783,6 +772,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ng build && scully`',
|
||||
value: 'ng build && scully',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ng serve --port $PORT',
|
||||
@@ -793,8 +783,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@scullyio/init',
|
||||
devCommand: 'ng serve --port $PORT',
|
||||
buildCommand: 'ng build && scully',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist/static',
|
||||
},
|
||||
@@ -822,6 +810,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ng build`',
|
||||
value: 'ng build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ng serve --port $PORT',
|
||||
@@ -831,8 +820,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/angular',
|
||||
devCommand: 'ng serve --port $PORT',
|
||||
buildCommand: 'ng build',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
@@ -876,6 +863,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ng build`',
|
||||
value: 'ng build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ng serve --port $PORT',
|
||||
@@ -886,8 +874,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@angular/cli',
|
||||
devCommand: 'ng serve --port $PORT',
|
||||
buildCommand: 'ng build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'dist';
|
||||
@@ -945,6 +931,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `polymer build`',
|
||||
value: 'polymer build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'polymer serve --port $PORT',
|
||||
@@ -955,8 +942,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'polymer-cli',
|
||||
devCommand: 'polymer serve --port $PORT',
|
||||
buildCommand: 'polymer build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
@@ -1016,6 +1001,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `rollup -c`',
|
||||
value: 'rollup -c',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'rollup -c -w',
|
||||
@@ -1025,8 +1011,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sirv-cli',
|
||||
devCommand: 'rollup -c -w',
|
||||
buildCommand: 'rollup -c',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
@@ -1070,6 +1054,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `svelte-kit build`',
|
||||
value: 'svelte-kit build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'svelte-kit dev --port $PORT',
|
||||
@@ -1079,8 +1064,6 @@ export const frameworks = [
|
||||
placeholder: 'public',
|
||||
},
|
||||
},
|
||||
devCommand: 'svelte-kit dev --port $PORT',
|
||||
buildCommand: 'svelte-kit build',
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -1108,6 +1091,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `react-scripts build`',
|
||||
value: 'react-scripts build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'react-scripts start',
|
||||
@@ -1117,8 +1101,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/react',
|
||||
devCommand: 'react-scripts start',
|
||||
buildCommand: 'react-scripts build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
@@ -1216,6 +1198,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `react-scripts build`',
|
||||
value: 'react-scripts build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'react-scripts start',
|
||||
@@ -1225,8 +1208,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'react-scripts',
|
||||
devCommand: 'react-scripts start',
|
||||
buildCommand: 'react-scripts build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
@@ -1318,6 +1299,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `gridsome build`',
|
||||
value: 'gridsome build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'gridsome develop -p $PORT',
|
||||
@@ -1328,8 +1310,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'gridsome',
|
||||
devCommand: 'gridsome develop -p $PORT',
|
||||
buildCommand: 'gridsome build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
@@ -1357,6 +1337,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `umi build`',
|
||||
value: 'umi build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'umi dev --port $PORT',
|
||||
@@ -1367,8 +1348,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'umi',
|
||||
devCommand: 'umi dev --port $PORT',
|
||||
buildCommand: 'umi build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -1412,6 +1391,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `sapper export`',
|
||||
value: 'sapper export',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'sapper dev --port $PORT',
|
||||
@@ -1422,8 +1402,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sapper',
|
||||
devCommand: 'sapper dev --port $PORT',
|
||||
buildCommand: 'sapper export',
|
||||
getFsOutputDir: async () => '__sapper__/export',
|
||||
getOutputDirName: async () => '__sapper__/export',
|
||||
},
|
||||
@@ -1451,6 +1429,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `saber build`',
|
||||
value: 'saber build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'saber --port $PORT',
|
||||
@@ -1461,8 +1440,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'saber',
|
||||
devCommand: 'saber --port $PORT',
|
||||
buildCommand: 'saber build',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
@@ -1521,6 +1498,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `stencil build`',
|
||||
value: 'stencil build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'stencil build --dev --watch --serve --port $PORT',
|
||||
@@ -1531,8 +1509,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@stencil/core',
|
||||
devCommand: 'stencil build --dev --watch --serve --port $PORT',
|
||||
buildCommand: 'stencil build',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
@@ -1611,6 +1587,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `nuxt generate`',
|
||||
value: 'nuxt generate',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'nuxt',
|
||||
@@ -1620,8 +1597,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'nuxt',
|
||||
devCommand: 'nuxt',
|
||||
buildCommand: 'nuxt generate',
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'dist',
|
||||
cachePattern: '.nuxt/**',
|
||||
@@ -1680,8 +1655,6 @@ export const frameworks = [
|
||||
placeholder: 'RedwoodJS default',
|
||||
},
|
||||
},
|
||||
devCommand: 'yarn rw dev --fwd="--port=$PORT --open=false',
|
||||
buildCommand: 'yarn rw deploy vercel',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -1717,6 +1690,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `hugo -D --gc`',
|
||||
value: 'hugo -D --gc',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'hugo server -D -w -p $PORT',
|
||||
@@ -1726,8 +1700,6 @@ export const frameworks = [
|
||||
placeholder: '`public` or `publishDir` from the `config` file',
|
||||
},
|
||||
},
|
||||
devCommand: 'hugo server -D -w -p $PORT',
|
||||
buildCommand: 'hugo -D --gc',
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type HugoConfig = { publishDir?: string };
|
||||
const config = await readConfigFile<HugoConfig>(
|
||||
@@ -1772,6 +1744,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `jekyll build`',
|
||||
value: 'jekyll build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'bundle exec jekyll serve --watch --port $PORT',
|
||||
@@ -1781,8 +1754,6 @@ export const frameworks = [
|
||||
placeholder: '`_site` or `destination` from `_config.yml`',
|
||||
},
|
||||
},
|
||||
devCommand: 'bundle exec jekyll serve --watch --port $PORT',
|
||||
buildCommand: 'jekyll build',
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type JekyllConfig = { destination?: string };
|
||||
const config = await readConfigFile<JekyllConfig>(
|
||||
@@ -1821,6 +1792,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `brunch build --production`',
|
||||
value: 'brunch build --production',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'brunch watch --server --port $PORT',
|
||||
@@ -1830,8 +1802,6 @@ export const frameworks = [
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
devCommand: 'brunch watch --server --port $PORT',
|
||||
buildCommand: 'brunch build --production',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -1856,18 +1826,17 @@ export const frameworks = [
|
||||
value: 'bundle install',
|
||||
},
|
||||
buildCommand: {
|
||||
value: '`npm run build` or `bundle exec middleman build`',
|
||||
placeholder: '`npm run build` or `bundle exec middleman build`',
|
||||
value: 'bundle exec middleman build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'bundle exec middleman server -p $PORT',
|
||||
placeholder: 'bundle exec middleman server',
|
||||
value: 'bundle exec middleman server -p $PORT',
|
||||
},
|
||||
outputDirectory: {
|
||||
value: 'build',
|
||||
},
|
||||
},
|
||||
devCommand: 'bundle exec middleman server -p $PORT',
|
||||
buildCommand: 'bundle exec middleman build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
cachePattern: '{vendor/bin,vendor/cache,vendor/bundle}/**',
|
||||
@@ -1896,15 +1865,13 @@ export const frameworks = [
|
||||
value: 'zola build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'zola serve --port $PORT',
|
||||
placeholder: 'zola serve',
|
||||
value: 'zola serve --port $PORT',
|
||||
},
|
||||
outputDirectory: {
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
devCommand: 'zola serve --port $PORT',
|
||||
buildCommand: 'zola build',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultVersion: '0.13.0',
|
||||
@@ -1934,17 +1901,17 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `vite build`',
|
||||
value: 'vite build',
|
||||
},
|
||||
devCommand: {
|
||||
placeholder: 'vite',
|
||||
value: 'vite',
|
||||
},
|
||||
outputDirectory: {
|
||||
value: 'dist',
|
||||
},
|
||||
},
|
||||
dependency: 'vite',
|
||||
devCommand: 'vite',
|
||||
buildCommand: 'vite build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
@@ -1972,17 +1939,17 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `parcel build`',
|
||||
value: 'parcel build',
|
||||
},
|
||||
devCommand: {
|
||||
placeholder: 'parcel',
|
||||
value: 'parcel',
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: 'dist',
|
||||
value: 'dist',
|
||||
},
|
||||
},
|
||||
dependency: 'parcel',
|
||||
devCommand: 'parcel',
|
||||
buildCommand: 'parcel build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -2016,16 +1983,16 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run vercel-build` or `npm run build`',
|
||||
value: null,
|
||||
},
|
||||
devCommand: {
|
||||
placeholder: 'None',
|
||||
value: null,
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: '`public` if it exists, or `.`',
|
||||
},
|
||||
},
|
||||
devCommand: null,
|
||||
buildCommand: null,
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
// Public if it exists or `.`
|
||||
let base = 'public';
|
||||
|
||||
@@ -26,7 +26,7 @@ export interface SettingValue {
|
||||
* A predefined setting for the detected framework
|
||||
* @example "next dev --port $PORT"
|
||||
*/
|
||||
value: string;
|
||||
value: string | null;
|
||||
placeholder?: string;
|
||||
}
|
||||
|
||||
@@ -129,11 +129,11 @@ export interface Framework {
|
||||
/**
|
||||
* Default Build Command or a placeholder
|
||||
*/
|
||||
buildCommand: Setting;
|
||||
buildCommand: SettingValue;
|
||||
/**
|
||||
* Default Development Command or a placeholder
|
||||
*/
|
||||
devCommand: Setting;
|
||||
devCommand: SettingValue;
|
||||
/**
|
||||
* Default Output Directory
|
||||
*/
|
||||
@@ -157,6 +157,7 @@ export interface Framework {
|
||||
/**
|
||||
* Name of a dependency in `package.json` to detect this framework.
|
||||
* @example "hexo"
|
||||
* @deprecated use `detectors` instead (new frameworks should not use this prop)
|
||||
*/
|
||||
dependency?: string;
|
||||
/**
|
||||
@@ -201,16 +202,6 @@ export interface Framework {
|
||||
* @example ".cache/**"
|
||||
*/
|
||||
cachePattern?: string;
|
||||
/**
|
||||
* The default build command for the framework.
|
||||
* @example "next build"
|
||||
*/
|
||||
buildCommand: string | null;
|
||||
/**
|
||||
* The default development command for the framework.
|
||||
* @example "next dev"
|
||||
*/
|
||||
devCommand: string | null;
|
||||
/**
|
||||
* The default version of the framework command that is available within the
|
||||
* build image. Usually an environment variable can be set to override this.
|
||||
|
||||
14
packages/frameworks/test/frameworks.unit.test.ts
vendored
14
packages/frameworks/test/frameworks.unit.test.ts
vendored
@@ -34,7 +34,7 @@ const SchemaSettings = {
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
value: {
|
||||
type: 'string',
|
||||
type: ['string', 'null'],
|
||||
},
|
||||
placeholder: {
|
||||
type: 'string',
|
||||
@@ -58,15 +58,7 @@ const Schema = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
required: [
|
||||
'name',
|
||||
'slug',
|
||||
'logo',
|
||||
'description',
|
||||
'settings',
|
||||
'buildCommand',
|
||||
'devCommand',
|
||||
],
|
||||
required: ['name', 'slug', 'logo', 'description', 'settings'],
|
||||
properties: {
|
||||
name: { type: 'string' },
|
||||
slug: { type: ['string', 'null'] },
|
||||
@@ -138,8 +130,6 @@ const Schema = {
|
||||
|
||||
dependency: { type: 'string' },
|
||||
cachePattern: { type: 'string' },
|
||||
buildCommand: { type: ['string', 'null'] },
|
||||
devCommand: { type: ['string', 'null'] },
|
||||
defaultVersion: { type: 'string' },
|
||||
},
|
||||
},
|
||||
|
||||
@@ -6,7 +6,8 @@ import { join } from 'path';
|
||||
import stringArgv from 'string-argv';
|
||||
import { debug } from '@vercel/build-utils';
|
||||
const versionMap = new Map([
|
||||
['1.16', '1.16'],
|
||||
['1.17', '1.17.3'],
|
||||
['1.16', '1.16.10'],
|
||||
['1.15', '1.15.8'],
|
||||
['1.14', '1.14.15'],
|
||||
['1.13', '1.13.15'],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "1.2.4-canary.3",
|
||||
"version": "1.2.4-canary.4",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
{ "src": "subdirectory/index.go", "use": "@vercel/go" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:go1.16:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/", "mustContain": "cow:go1.17.3:RANDOMNESS_PLACEHOLDER" },
|
||||
{
|
||||
"path": "/subdirectory",
|
||||
"mustContain": "subcow:go1.16:RANDOMNESS_PLACEHOLDER"
|
||||
"mustContain": "subcow:go1.17.3:RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module with-nested
|
||||
|
||||
go 1.12
|
||||
go 1.16
|
||||
|
||||
2
packages/middleware/.eslintignore
Normal file
2
packages/middleware/.eslintignore
Normal file
@@ -0,0 +1,2 @@
|
||||
entries.js
|
||||
dist
|
||||
2
packages/middleware/.gitignore
vendored
Normal file
2
packages/middleware/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/dist
|
||||
/test/fixtures/*/.output
|
||||
30
packages/middleware/build.js
Normal file
30
packages/middleware/build.js
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require('fs-extra');
|
||||
const execa = require('execa');
|
||||
const { join } = require('path');
|
||||
|
||||
async function main() {
|
||||
const srcDir = join(__dirname, 'src');
|
||||
const outDir = join(__dirname, 'dist');
|
||||
|
||||
// Start fresh
|
||||
await fs.remove(outDir);
|
||||
|
||||
await execa(
|
||||
'ncc',
|
||||
['build', join(srcDir, 'index.ts'), '-o', outDir, '--external', 'esbuild'],
|
||||
{
|
||||
stdio: 'inherit',
|
||||
}
|
||||
);
|
||||
|
||||
await fs.copyFile(
|
||||
join(__dirname, 'src/entries.js'),
|
||||
join(outDir, 'entries.js')
|
||||
);
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
58
packages/middleware/package.json
Normal file
58
packages/middleware/package.json
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"name": "vercel-plugin-middleware",
|
||||
"version": "0.0.0-canary.7",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
"directory": "packages/middleware"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "node build",
|
||||
"test-unit": "jest",
|
||||
"prepublishOnly": "node build"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"esbuild": "0.13.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@peculiar/webcrypto": "1.2.0",
|
||||
"@types/cookie": "0.4.1",
|
||||
"@types/glob": "7.2.0",
|
||||
"@types/http-proxy": "1.17.7",
|
||||
"@types/jest": "27.0.2",
|
||||
"@types/node": "16.11.6",
|
||||
"@types/node-fetch": "^2",
|
||||
"@types/ua-parser-js": "0.7.36",
|
||||
"@types/uuid": "8.3.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"cookie": "0.4.1",
|
||||
"formdata-node": "4.3.1",
|
||||
"glob": "7.2.0",
|
||||
"http-proxy": "1.18.1",
|
||||
"node-fetch": "^2",
|
||||
"ua-parser-js": "1.0.2",
|
||||
"url": "0.11.0",
|
||||
"uuid": "8.3.2",
|
||||
"web-streams-polyfill": "3.1.1"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
"globals": {
|
||||
"ts-jest": {
|
||||
"diagnostics": false,
|
||||
"isolatedModules": true
|
||||
}
|
||||
},
|
||||
"verbose": false,
|
||||
"testEnvironment": "node",
|
||||
"testMatch": [
|
||||
"<rootDir>/test/**/*.test.ts"
|
||||
]
|
||||
}
|
||||
}
|
||||
18
packages/middleware/src/entries.js
Normal file
18
packages/middleware/src/entries.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import * as middleware from './_middleware';
|
||||
_ENTRIES = typeof _ENTRIES === 'undefined' ? {} : _ENTRIES;
|
||||
_ENTRIES['middleware_pages/_middleware'] = {
|
||||
default: async function (ev) {
|
||||
const result = await middleware.default(ev.request, ev);
|
||||
return {
|
||||
promise: Promise.resolve(),
|
||||
waitUntil: Promise.resolve(),
|
||||
response:
|
||||
result ||
|
||||
new Response(null, {
|
||||
headers: {
|
||||
'x-middleware-next': 1,
|
||||
},
|
||||
}),
|
||||
};
|
||||
},
|
||||
};
|
||||
323
packages/middleware/src/index.ts
Normal file
323
packages/middleware/src/index.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
import util from 'util';
|
||||
import { extname, join, basename } from 'path';
|
||||
import * as esbuild from 'esbuild';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
import libGlob from 'glob';
|
||||
import Proxy from 'http-proxy';
|
||||
|
||||
import { run } from './websandbox';
|
||||
import type { FetchEventResult } from './websandbox/types';
|
||||
|
||||
import { ParsedUrlQuery, stringify as stringifyQs } from 'querystring';
|
||||
import {
|
||||
format as formatUrl,
|
||||
parse as parseUrl,
|
||||
UrlWithParsedQuery,
|
||||
} from 'url';
|
||||
import { toNodeHeaders } from './websandbox/utils';
|
||||
|
||||
const glob = util.promisify(libGlob);
|
||||
const SUPPORTED_EXTENSIONS = ['.js', '.ts'];
|
||||
|
||||
// File name of the `entries.js` file that gets copied into the
|
||||
// project directory. Use a name that is unlikely to conflict.
|
||||
const ENTRIES_NAME = '___vc_entries.js';
|
||||
|
||||
async function getMiddlewareFile(workingDirectory: string) {
|
||||
// Only the root-level `_middleware.*` files are considered.
|
||||
// For more granular routing, the Project's Framework (i.e. Next.js)
|
||||
// middleware support should be used.
|
||||
const middlewareFiles = await glob(join(workingDirectory, '_middleware.*'));
|
||||
|
||||
if (middlewareFiles.length === 0) {
|
||||
// No middleware file at the root of the project, so bail...
|
||||
return;
|
||||
}
|
||||
|
||||
if (middlewareFiles.length > 1) {
|
||||
throw new Error(
|
||||
`Only one middleware file is allowed. Found: ${middlewareFiles.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
const ext = extname(middlewareFiles[0]);
|
||||
if (!SUPPORTED_EXTENSIONS.includes(ext)) {
|
||||
throw new Error(`Unsupported file type: ${ext}`);
|
||||
}
|
||||
|
||||
return middlewareFiles[0];
|
||||
}
|
||||
|
||||
export async function build({ workPath }: { workPath: string }) {
|
||||
const entriesPath = join(workPath, ENTRIES_NAME);
|
||||
const middlewareFile = await getMiddlewareFile(workPath);
|
||||
if (!middlewareFile) return;
|
||||
|
||||
console.log('Compiling middleware file: %j', middlewareFile);
|
||||
|
||||
// Create `_ENTRIES` wrapper
|
||||
await fsp.copyFile(join(__dirname, 'entries.js'), entriesPath);
|
||||
|
||||
// Build
|
||||
try {
|
||||
await esbuild.build({
|
||||
entryPoints: [entriesPath],
|
||||
bundle: true,
|
||||
absWorkingDir: workPath,
|
||||
outfile: join(workPath, '.output/server/pages/_middleware.js'),
|
||||
});
|
||||
} finally {
|
||||
await fsp.unlink(entriesPath);
|
||||
}
|
||||
|
||||
// Write middleware manifest
|
||||
const middlewareManifest = {
|
||||
version: 1,
|
||||
sortedMiddleware: ['/'],
|
||||
middleware: {
|
||||
'/': {
|
||||
env: [],
|
||||
files: ['server/pages/_middleware.js'],
|
||||
name: 'pages/_middleware',
|
||||
page: '/',
|
||||
regexp: '^/.*$',
|
||||
},
|
||||
},
|
||||
};
|
||||
const middlewareManifestData = JSON.stringify(middlewareManifest, null, 2);
|
||||
const middlewareManifestPath = join(
|
||||
workPath,
|
||||
'.output/server/middleware-manifest.json'
|
||||
);
|
||||
await fsp.writeFile(middlewareManifestPath, middlewareManifestData);
|
||||
}
|
||||
|
||||
const stringifyQuery = (req: IncomingMessage, query: ParsedUrlQuery) => {
|
||||
const initialQueryValues = Object.values((req as any).__NEXT_INIT_QUERY);
|
||||
|
||||
return stringifyQs(query, undefined, undefined, {
|
||||
encodeURIComponent(value: any) {
|
||||
if (initialQueryValues.some(val => val === value)) {
|
||||
return encodeURIComponent(value);
|
||||
}
|
||||
return value;
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// eslint-disable-next-line
|
||||
async function runMiddlewareCatchAll(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
requestId: string,
|
||||
name: string,
|
||||
path: string
|
||||
) {
|
||||
let result: FetchEventResult | null = null;
|
||||
const parsedUrl = parseUrl(req.url!, true);
|
||||
try {
|
||||
result = await runMiddleware({
|
||||
request: req,
|
||||
response: res,
|
||||
name: name,
|
||||
path,
|
||||
requestId: requestId,
|
||||
parsedUrl,
|
||||
parsed: parseUrl(req.url!, true),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return { finished: true, error: err };
|
||||
}
|
||||
|
||||
if (result === null) {
|
||||
return { finished: true };
|
||||
}
|
||||
|
||||
if (
|
||||
!result.response.headers.has('x-middleware-rewrite') &&
|
||||
!result.response.headers.has('x-middleware-next') &&
|
||||
!result.response.headers.has('Location')
|
||||
) {
|
||||
result.response.headers.set('x-middleware-refresh', '1');
|
||||
}
|
||||
|
||||
result.response.headers.delete('x-middleware-next');
|
||||
|
||||
for (const [key, value] of Object.entries(
|
||||
toNodeHeaders(result.response.headers)
|
||||
)) {
|
||||
if (key !== 'content-encoding' && value !== undefined) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const preflight =
|
||||
req.method === 'HEAD' && req.headers['x-middleware-preflight'];
|
||||
|
||||
if (preflight) {
|
||||
res.writeHead(200);
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
res.statusCode = result.response.status;
|
||||
res.statusMessage = result.response.statusText;
|
||||
|
||||
const location = result.response.headers.get('Location');
|
||||
if (location) {
|
||||
res.statusCode = result.response.status;
|
||||
if (res.statusCode === 308) {
|
||||
res.setHeader('Refresh', `0;url=${location}`);
|
||||
}
|
||||
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
if (result.response.headers.has('x-middleware-rewrite')) {
|
||||
const rewrite = result.response.headers.get('x-middleware-rewrite')!;
|
||||
const rewriteParsed = parseUrl(rewrite, true);
|
||||
if (rewriteParsed.protocol) {
|
||||
return proxyRequest(req, res, rewriteParsed);
|
||||
}
|
||||
|
||||
(req as any)._nextRewroteUrl = rewrite;
|
||||
(req as any)._nextDidRewrite = (req as any)._nextRewroteUrl !== req.url;
|
||||
|
||||
return {
|
||||
finished: false,
|
||||
pathname: rewriteParsed.pathname,
|
||||
query: {
|
||||
...parsedUrl.query,
|
||||
...rewriteParsed.query,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (result.response.headers.has('x-middleware-refresh')) {
|
||||
res.writeHead(result.response.status);
|
||||
|
||||
if (result.response.body instanceof Buffer) {
|
||||
res.write(result.response.body);
|
||||
} else {
|
||||
//@ts-ignore
|
||||
for await (const chunk of result.response.body || []) {
|
||||
res.write(chunk);
|
||||
}
|
||||
}
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
finished: false,
|
||||
};
|
||||
}
|
||||
|
||||
const proxyRequest = async (
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
parsedUrl: UrlWithParsedQuery
|
||||
) => {
|
||||
const { query } = parsedUrl;
|
||||
delete (parsedUrl as any).query;
|
||||
parsedUrl.search = stringifyQuery(req, query);
|
||||
|
||||
const target = formatUrl(parsedUrl);
|
||||
const proxy = new Proxy({
|
||||
target,
|
||||
changeOrigin: true,
|
||||
ignorePath: true,
|
||||
xfwd: true,
|
||||
proxyTimeout: 30_000, // limit proxying to 30 seconds
|
||||
});
|
||||
|
||||
await new Promise((proxyResolve, proxyReject) => {
|
||||
let finished = false;
|
||||
|
||||
proxy.on('proxyReq', (proxyReq: any) => {
|
||||
proxyReq.on('close', () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyResolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
proxy.on('error', (err: any) => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyReject(err);
|
||||
}
|
||||
});
|
||||
proxy.web(req, res);
|
||||
});
|
||||
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
};
|
||||
|
||||
async function runMiddleware(params: {
|
||||
request: IncomingMessage;
|
||||
response: ServerResponse;
|
||||
parsedUrl: UrlWithParsedQuery;
|
||||
parsed: UrlWithParsedQuery;
|
||||
requestId: string;
|
||||
name: string;
|
||||
path: string;
|
||||
}): Promise<FetchEventResult | null> {
|
||||
const page: { name?: string; params?: { [key: string]: string } } = {};
|
||||
let result: FetchEventResult | null = null;
|
||||
|
||||
result = await run({
|
||||
name: params.name,
|
||||
path: params.path,
|
||||
request: {
|
||||
headers: params.request.headers,
|
||||
method: params.request.method || 'GET',
|
||||
url: params.request.url!,
|
||||
// url: (params.request as any).__NEXT_INIT_URL,
|
||||
page,
|
||||
},
|
||||
});
|
||||
|
||||
result.waitUntil.catch((error: any) => {
|
||||
console.error(`Uncaught: middleware waitUntil errored`, error);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Should run the middleware in the `vm` sandbox and return the result
|
||||
// back to `vercel dev`. If no middleware file exists then this function
|
||||
// should return `finished: false` (very quickly, since this is being
|
||||
// invoked for every HTTP request!).
|
||||
export async function runDevMiddleware(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
workingDirectory: string
|
||||
): ReturnType<typeof runMiddlewareCatchAll> {
|
||||
const middlewareFile = await getMiddlewareFile(workingDirectory);
|
||||
if (!middlewareFile) {
|
||||
return {
|
||||
finished: false,
|
||||
};
|
||||
}
|
||||
return runMiddlewareCatchAll(
|
||||
req,
|
||||
res,
|
||||
'',
|
||||
basename(middlewareFile),
|
||||
middlewareFile
|
||||
);
|
||||
}
|
||||
65
packages/middleware/src/websandbox/adapter.ts
Normal file
65
packages/middleware/src/websandbox/adapter.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { RequestData, FetchEventResult } from './types';
|
||||
import { DeprecationError } from './error';
|
||||
import { fromNodeHeaders } from './utils';
|
||||
import { NextFetchEvent } from './spec-extension/fetch-event';
|
||||
import { NextRequest, RequestInit } from './spec-extension/request';
|
||||
import { SpecResponse } from './spec-extension/response';
|
||||
import { waitUntilSymbol } from './spec-compliant/fetch-event';
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
export async function adapter(params: {
|
||||
handler: (request: NextRequest, event: NextFetchEvent) => Promise<Response>;
|
||||
page: string;
|
||||
request: RequestData;
|
||||
}): Promise<FetchEventResult> {
|
||||
const url = params.request.url.startsWith('/')
|
||||
? `https://${params.request.headers.host}${params.request.url}`
|
||||
: params.request.url;
|
||||
|
||||
const request = new NextRequestHint({
|
||||
page: params.page,
|
||||
input: url,
|
||||
init: {
|
||||
geo: params.request.geo,
|
||||
//@ts-ignore
|
||||
headers: fromNodeHeaders(params.request.headers),
|
||||
ip: params.request.ip,
|
||||
method: params.request.method,
|
||||
page: params.request.page,
|
||||
},
|
||||
});
|
||||
|
||||
const event = new NextFetchEvent({ request, page: params.page });
|
||||
const original = await params.handler(request, event);
|
||||
|
||||
return {
|
||||
response: original || SpecResponse.next(),
|
||||
waitUntil: Promise.all(event[waitUntilSymbol]),
|
||||
};
|
||||
}
|
||||
|
||||
class NextRequestHint extends NextRequest {
|
||||
sourcePage: string;
|
||||
|
||||
constructor(params: {
|
||||
init: RequestInit;
|
||||
input: Request | string;
|
||||
page: string;
|
||||
}) {
|
||||
//@ts-ignore
|
||||
super(params.input, params.init);
|
||||
this.sourcePage = params.page;
|
||||
}
|
||||
|
||||
get request() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
|
||||
respondWith() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
|
||||
waitUntil() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
}
|
||||
12
packages/middleware/src/websandbox/error.ts
Normal file
12
packages/middleware/src/websandbox/error.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export class DeprecationError extends Error {
|
||||
constructor({ page }: { page: string }) {
|
||||
super(`The middleware "${page}" accepts an async API directly with the form:
|
||||
|
||||
export function middleware(request, event) {
|
||||
return new Response("Hello " + request.url)
|
||||
}
|
||||
|
||||
Read more: https://nextjs.org/docs/messages/middleware-new-signature
|
||||
`);
|
||||
}
|
||||
}
|
||||
76
packages/middleware/src/websandbox/form-data.ts
Normal file
76
packages/middleware/src/websandbox/form-data.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { isBlob } from './is';
|
||||
import { streamToIterator } from './utils';
|
||||
|
||||
const carriage = '\r\n';
|
||||
const dashes = '--';
|
||||
const carriageLength = 2;
|
||||
|
||||
function escape(str: string) {
|
||||
return str.replace(/"/g, '\\"');
|
||||
}
|
||||
|
||||
function getFooter(boundary: string) {
|
||||
return `${dashes}${boundary}${dashes}${carriage.repeat(2)}`;
|
||||
}
|
||||
|
||||
function getHeader(boundary: string, name: string, field: FormDataEntryValue) {
|
||||
let header = '';
|
||||
header += `${dashes}${boundary}${carriage}`;
|
||||
header += `Content-Disposition: form-data; name="${escape(name)}"`;
|
||||
|
||||
if (isBlob(field)) {
|
||||
header += `; filename="${escape(field.name)}"${carriage}`;
|
||||
header += `Content-Type: ${field.type || 'application/octet-stream'}`;
|
||||
}
|
||||
|
||||
return `${header}${carriage.repeat(2)}`;
|
||||
}
|
||||
|
||||
export function getBoundary() {
|
||||
const array = new Uint8Array(32);
|
||||
crypto.getRandomValues(array);
|
||||
|
||||
let str = '';
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
str += array[i].toString(16).padStart(2, '0');
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
export async function* formDataIterator(
|
||||
form: FormData,
|
||||
boundary: string
|
||||
): AsyncIterableIterator<Uint8Array> {
|
||||
const encoder = new TextEncoder();
|
||||
//@ts-ignore
|
||||
for (const [name, value] of form) {
|
||||
yield encoder.encode(getHeader(boundary, name, value));
|
||||
|
||||
if (isBlob(value)) {
|
||||
// @ts-ignore /shrug
|
||||
const stream: ReadableStream<Uint8Array> = value.stream();
|
||||
yield* streamToIterator(stream);
|
||||
} else {
|
||||
yield encoder.encode(value);
|
||||
}
|
||||
|
||||
yield encoder.encode(carriage);
|
||||
}
|
||||
|
||||
yield encoder.encode(getFooter(boundary));
|
||||
}
|
||||
|
||||
export function getFormDataLength(form: FormData, boundary: string) {
|
||||
let length = 0;
|
||||
|
||||
//@ts-ignore
|
||||
for (const [name, value] of form) {
|
||||
length += Buffer.byteLength(getHeader(boundary, name, value));
|
||||
length += isBlob(value) ? value.size : Buffer.byteLength(String(value));
|
||||
length += carriageLength;
|
||||
}
|
||||
|
||||
length += Buffer.byteLength(getFooter(boundary));
|
||||
return length;
|
||||
}
|
||||
1
packages/middleware/src/websandbox/index.ts
Normal file
1
packages/middleware/src/websandbox/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './sandbox/sandbox';
|
||||
80
packages/middleware/src/websandbox/is.ts
Normal file
80
packages/middleware/src/websandbox/is.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* The ArrayBuffer object is used to represent a generic, fixed-length raw
|
||||
* binary data buffer. It is an array of bytes, often referred to in other
|
||||
* languages as a "byte array". You cannot directly manipulate the contents of
|
||||
* an ArrayBuffer; instead, you create one of the typed array objects or a
|
||||
* DataView object which represents the buffer in a specific format, and use
|
||||
* that to read and write the contents of the buffer.
|
||||
*/
|
||||
export function isArrayBuffer(value: any): value is ArrayBuffer {
|
||||
return Object.prototype.isPrototypeOf.call(ArrayBuffer, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* ArrayBufferView is a helper type representing any of the following JS
|
||||
* TypedArray types which correspond to the list below. It is checked by duck
|
||||
* typing the provided object.
|
||||
*/
|
||||
export function isArrayBufferView(value: any): value is ArrayBufferView {
|
||||
return ArrayBuffer.isView(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The DataView view provides a low-level interface for reading and writing
|
||||
* multiple number types in a binary ArrayBuffer, without having to care about
|
||||
* the platform's endianness.
|
||||
*/
|
||||
export function isDataView(value: any): value is DataView {
|
||||
return Object.prototype.isPrototypeOf.call(DataView, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The URLSearchParams interface defines utility methods to work with the
|
||||
* query string of a URL.
|
||||
*/
|
||||
export function isURLSearchParams(value: any): value is URLSearchParams {
|
||||
return Object.prototype.isPrototypeOf.call(URLSearchParams, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The Blob object represents a blob, which is a file-like object of immutable,
|
||||
* raw data; they can be read as text or binary data. Blobs can represent data
|
||||
* that isn't necessarily in a JavaScript-native format.
|
||||
*/
|
||||
export function isBlob(value: any): value is Blob {
|
||||
return Object.prototype.isPrototypeOf.call(Blob, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The FormData interface provides a way to easily construct a set of key/value
|
||||
* pairs representing form fields and their values, which can then be easily
|
||||
* sent using the XMLHttpRequest.send() method. It uses the same format a
|
||||
* form would use if the encoding type were set to "multipart/form-data".
|
||||
*/
|
||||
export function isFormData(value: any): value is FormData {
|
||||
return Object.prototype.isPrototypeOf.call(FormData, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The ReadableStream interface of the Streams API represents a readable stream
|
||||
* of byte data. Because we want to allow alternative implementations we also
|
||||
* duck type here.
|
||||
*/
|
||||
export function isReadableStream(value: any): value is ReadableStream {
|
||||
return (
|
||||
value &&
|
||||
(Object.prototype.isPrototypeOf.call(ReadableStream, value) ||
|
||||
(value.constructor.name === 'ReadableStream' && 'getReader' in value))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks in an object implements an Iterable interface
|
||||
*/
|
||||
export function isIterable(object: any): object is Iterable<unknown> {
|
||||
return (
|
||||
object &&
|
||||
Symbol.iterator in object &&
|
||||
typeof object[Symbol.iterator] === 'function'
|
||||
);
|
||||
}
|
||||
127
packages/middleware/src/websandbox/sandbox/polyfills.ts
Normal file
127
packages/middleware/src/websandbox/sandbox/polyfills.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { Crypto as WebCrypto } from '@peculiar/webcrypto';
|
||||
import { TransformStream } from 'web-streams-polyfill';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import crypto from 'crypto';
|
||||
|
||||
export function atob(b64Encoded: string) {
|
||||
return Buffer.from(b64Encoded, 'base64').toString('binary');
|
||||
}
|
||||
|
||||
export function btoa(str: string) {
|
||||
return Buffer.from(str, 'binary').toString('base64');
|
||||
}
|
||||
|
||||
class TextEncoderRuntime {
|
||||
encoder: TextEncoder;
|
||||
|
||||
constructor() {
|
||||
this.encoder = new TextEncoder();
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this.encoder.encoding;
|
||||
}
|
||||
|
||||
public encode(input: string) {
|
||||
return this.encoder.encode(input);
|
||||
}
|
||||
}
|
||||
|
||||
class TextDecoderRuntime {
|
||||
decoder: TextDecoder;
|
||||
|
||||
constructor() {
|
||||
this.decoder = new TextDecoder();
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this.decoder.encoding;
|
||||
}
|
||||
|
||||
get fatal() {
|
||||
return this.decoder.fatal;
|
||||
}
|
||||
|
||||
get ignoreBOM() {
|
||||
return this.decoder.ignoreBOM;
|
||||
}
|
||||
|
||||
public decode(input: BufferSource, options?: TextDecodeOptions) {
|
||||
return this.decoder.decode(input, options);
|
||||
}
|
||||
}
|
||||
|
||||
export { TextDecoderRuntime as TextDecoder };
|
||||
export { TextEncoderRuntime as TextEncoder };
|
||||
|
||||
export class Crypto extends WebCrypto {
|
||||
// @ts-ignore Remove once types are updated and we deprecate node 12
|
||||
randomUUID = crypto.randomUUID || uuid;
|
||||
}
|
||||
|
||||
export class ReadableStream<T> {
|
||||
constructor(opts: UnderlyingSource = {}) {
|
||||
let closed = false;
|
||||
let pullPromise: any;
|
||||
|
||||
let transformController: TransformStreamDefaultController;
|
||||
const { readable, writable } = new TransformStream(
|
||||
{
|
||||
start: (controller: TransformStreamDefaultController) => {
|
||||
transformController = controller;
|
||||
},
|
||||
},
|
||||
undefined,
|
||||
{
|
||||
highWaterMark: 1,
|
||||
}
|
||||
);
|
||||
|
||||
const writer = writable.getWriter();
|
||||
const encoder = new TextEncoder();
|
||||
const controller: ReadableStreamController<T> = {
|
||||
get desiredSize() {
|
||||
return transformController.desiredSize;
|
||||
},
|
||||
close: () => {
|
||||
if (!closed) {
|
||||
closed = true;
|
||||
writer.close();
|
||||
}
|
||||
},
|
||||
enqueue: (chunk: T) => {
|
||||
writer.write(typeof chunk === 'string' ? encoder.encode(chunk) : chunk);
|
||||
pull();
|
||||
},
|
||||
error: (reason: any) => {
|
||||
transformController.error(reason);
|
||||
},
|
||||
};
|
||||
|
||||
const pull = () => {
|
||||
if (opts.pull) {
|
||||
if (!pullPromise) {
|
||||
pullPromise = Promise.resolve().then(() => {
|
||||
pullPromise = 0;
|
||||
opts.pull!(controller);
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (opts.start) {
|
||||
opts.start(controller);
|
||||
}
|
||||
|
||||
if (opts.cancel) {
|
||||
readable.cancel = (reason: any) => {
|
||||
opts.cancel!(reason);
|
||||
return readable.cancel(reason);
|
||||
};
|
||||
}
|
||||
|
||||
pull();
|
||||
|
||||
return readable;
|
||||
}
|
||||
}
|
||||
226
packages/middleware/src/websandbox/sandbox/sandbox.ts
Normal file
226
packages/middleware/src/websandbox/sandbox/sandbox.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import type { RequestData, FetchEventResult, NodeHeaders } from '../types';
|
||||
import { Blob, File, FormData } from 'formdata-node';
|
||||
import { dirname, extname, resolve } from 'path';
|
||||
import { readFileSync } from 'fs';
|
||||
import { TransformStream } from 'web-streams-polyfill';
|
||||
import * as polyfills from './polyfills';
|
||||
import cookie from 'cookie';
|
||||
import vm from 'vm';
|
||||
import fetch, {
|
||||
Headers,
|
||||
RequestInit,
|
||||
Response,
|
||||
Request,
|
||||
RequestInfo,
|
||||
} from 'node-fetch';
|
||||
import { adapter } from '../adapter';
|
||||
import * as esbuild from 'esbuild';
|
||||
import m from 'module';
|
||||
|
||||
interface URLLike {
|
||||
href: string;
|
||||
}
|
||||
|
||||
let cache:
|
||||
| {
|
||||
context: { [key: string]: any };
|
||||
paths: Map<string, string>;
|
||||
require: Map<string, any>;
|
||||
sandbox: vm.Context;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
const WEBPACK_HASH_REGEX =
|
||||
/__webpack_require__\.h = function\(\) \{ return "[0-9a-f]+"; \}/g;
|
||||
|
||||
/**
|
||||
* The cache is cleared when a path is cached and the content has changed. The
|
||||
* hack ignores changes than only change the compilation hash. Instead it is
|
||||
* probably better to disable HMR for middleware entries.
|
||||
*/
|
||||
export function clearSandboxCache(path: string, content: Buffer | string) {
|
||||
const prev = cache?.paths.get(path)?.replace(WEBPACK_HASH_REGEX, '');
|
||||
if (prev === undefined) return;
|
||||
if (prev === content.toString().replace(WEBPACK_HASH_REGEX, '')) return;
|
||||
cache = undefined;
|
||||
}
|
||||
|
||||
export async function run(params: {
|
||||
name: string;
|
||||
path: string;
|
||||
request: RequestData;
|
||||
}): Promise<FetchEventResult> {
|
||||
if (cache === undefined) {
|
||||
const context: { [key: string]: any } = {
|
||||
atob: polyfills.atob,
|
||||
Blob,
|
||||
btoa: polyfills.btoa,
|
||||
clearInterval,
|
||||
clearTimeout,
|
||||
console: {
|
||||
assert: console.assert.bind(console),
|
||||
error: console.error.bind(console),
|
||||
info: console.info.bind(console),
|
||||
log: console.log.bind(console),
|
||||
time: console.time.bind(console),
|
||||
timeEnd: console.timeEnd.bind(console),
|
||||
timeLog: console.timeLog.bind(console),
|
||||
warn: console.warn.bind(console),
|
||||
},
|
||||
Crypto: polyfills.Crypto,
|
||||
crypto: new polyfills.Crypto(),
|
||||
Response,
|
||||
Headers,
|
||||
Request,
|
||||
fetch: (input: RequestInfo, init: RequestInit = {}) => {
|
||||
const url = getFetchURL(input, params.request.headers);
|
||||
init.headers = getFetchHeaders(params.name, init);
|
||||
if (isRequestLike(input)) {
|
||||
return fetch(url, {
|
||||
...init,
|
||||
headers: {
|
||||
...Object.fromEntries(input.headers),
|
||||
...Object.fromEntries(init.headers),
|
||||
},
|
||||
});
|
||||
}
|
||||
return fetch(url, init);
|
||||
},
|
||||
File,
|
||||
FormData,
|
||||
process: { env: { ...process.env } },
|
||||
ReadableStream: polyfills.ReadableStream,
|
||||
setInterval,
|
||||
setTimeout,
|
||||
TextDecoder: polyfills.TextDecoder,
|
||||
TextEncoder: polyfills.TextEncoder,
|
||||
TransformStream,
|
||||
URL,
|
||||
URLSearchParams,
|
||||
};
|
||||
|
||||
context.self = context;
|
||||
|
||||
cache = {
|
||||
context,
|
||||
require: new Map<string, any>([
|
||||
[require.resolve('cookie'), { exports: cookie }],
|
||||
]),
|
||||
paths: new Map<string, string>(),
|
||||
sandbox: vm.createContext(context),
|
||||
};
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(params.path, 'utf-8');
|
||||
const esBuildResult = esbuild.transformSync(content, {
|
||||
format: 'cjs',
|
||||
});
|
||||
const x = vm.runInNewContext(m.wrap(esBuildResult.code), cache.sandbox, {
|
||||
filename: params.path,
|
||||
});
|
||||
const module = {
|
||||
exports: {},
|
||||
loaded: false,
|
||||
id: params.path,
|
||||
};
|
||||
x(
|
||||
module.exports,
|
||||
sandboxRequire.bind(null, params.path),
|
||||
module,
|
||||
dirname(params.path),
|
||||
params.path
|
||||
);
|
||||
const adapterResult = await adapter({
|
||||
request: params.request,
|
||||
// @ts-ignore
|
||||
handler: module.exports.default,
|
||||
page: params.path,
|
||||
});
|
||||
return adapterResult;
|
||||
} catch (error) {
|
||||
cache = undefined;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function sandboxRequire(referrer: string, specifier: string) {
|
||||
const resolved = require.resolve(specifier, {
|
||||
paths: [resolve(dirname(referrer))],
|
||||
});
|
||||
|
||||
const cached = cache?.require.get(resolved);
|
||||
if (cached !== undefined) {
|
||||
return cached.exports;
|
||||
}
|
||||
|
||||
const module = {
|
||||
exports: {},
|
||||
loaded: false,
|
||||
id: resolved,
|
||||
};
|
||||
|
||||
cache?.require.set(resolved, module);
|
||||
|
||||
const transformOptions: esbuild.TransformOptions = {
|
||||
format: 'cjs',
|
||||
};
|
||||
if (extname(resolved) === '.json') {
|
||||
transformOptions.loader = 'json';
|
||||
}
|
||||
const transformedContent = esbuild.transformSync(
|
||||
readFileSync(resolved, 'utf-8'),
|
||||
transformOptions
|
||||
).code;
|
||||
const fn = vm.runInContext(
|
||||
`(function(module,exports,require,__dirname,__filename) {${transformedContent}\n})`,
|
||||
cache!.sandbox
|
||||
);
|
||||
|
||||
try {
|
||||
fn(
|
||||
module,
|
||||
module.exports,
|
||||
sandboxRequire.bind(null, resolved),
|
||||
dirname(resolved),
|
||||
resolved
|
||||
);
|
||||
} finally {
|
||||
cache?.require.delete(resolved);
|
||||
}
|
||||
module.loaded = true;
|
||||
return module.exports;
|
||||
}
|
||||
|
||||
function getFetchHeaders(middleware: string, init: RequestInit) {
|
||||
const headers = new Headers(init.headers ?? {});
|
||||
const prevsub = headers.get(`x-middleware-subrequest`) || '';
|
||||
const value = prevsub.split(':').concat(middleware).join(':');
|
||||
headers.set(`x-middleware-subrequest`, value);
|
||||
headers.set(`user-agent`, `Next.js Middleware`);
|
||||
return headers;
|
||||
}
|
||||
|
||||
function getFetchURL(input: RequestInfo, headers: NodeHeaders = {}): string {
|
||||
const initurl = isRequestLike(input)
|
||||
? input.url
|
||||
: isURLLike(input)
|
||||
? input.href
|
||||
: input;
|
||||
if (initurl.startsWith('/')) {
|
||||
const host = headers.host?.toString();
|
||||
const localhost =
|
||||
host === '127.0.0.1' ||
|
||||
host === 'localhost' ||
|
||||
host?.startsWith('localhost:');
|
||||
return `${localhost ? 'http' : 'https'}://${host}${initurl}`;
|
||||
}
|
||||
return initurl;
|
||||
}
|
||||
|
||||
function isURLLike(obj: unknown): obj is URLLike {
|
||||
return Boolean(obj && typeof obj === 'object' && 'href' in obj);
|
||||
}
|
||||
|
||||
function isRequestLike(obj: unknown): obj is Request {
|
||||
return Boolean(obj && typeof obj === 'object' && 'url' in obj);
|
||||
}
|
||||
237
packages/middleware/src/websandbox/spec-compliant/body.ts
Normal file
237
packages/middleware/src/websandbox/spec-compliant/body.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
import { formDataIterator, getBoundary } from '../form-data';
|
||||
import { streamToIterator } from '../utils';
|
||||
import * as util from '../is';
|
||||
import { URLSearchParams } from 'url';
|
||||
|
||||
const INTERNALS = Symbol('internal body');
|
||||
|
||||
abstract class BaseBody implements Body {
|
||||
abstract headers: Headers;
|
||||
|
||||
[INTERNALS]: {
|
||||
bodyInit?: BodyInit;
|
||||
boundary?: string;
|
||||
disturbed: boolean;
|
||||
stream?: ReadableStream<Uint8Array> | null;
|
||||
};
|
||||
|
||||
constructor(bodyInit?: BodyInit) {
|
||||
this[INTERNALS] = {
|
||||
bodyInit: bodyInit,
|
||||
disturbed: false,
|
||||
};
|
||||
|
||||
if (util.isFormData(bodyInit)) {
|
||||
this[INTERNALS].boundary = getBoundary();
|
||||
}
|
||||
}
|
||||
|
||||
get body(): ReadableStream<Uint8Array> | null {
|
||||
const body = this[INTERNALS].bodyInit;
|
||||
if (!body) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
const that = this;
|
||||
if (!this[INTERNALS].stream) {
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
if (typeof body === 'string') {
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(body));
|
||||
} else if (util.isBlob(body)) {
|
||||
const buffer = await body.arrayBuffer();
|
||||
controller.enqueue(new Uint8Array(buffer));
|
||||
} else if (util.isDataView(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isArrayBuffer(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isArrayBufferView(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isURLSearchParams(body)) {
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(body.toString()));
|
||||
} else if (util.isFormData(body)) {
|
||||
for await (const chunk of formDataIterator(
|
||||
body,
|
||||
that[INTERNALS].boundary!
|
||||
)) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
} else if (util.isReadableStream(body)) {
|
||||
for await (const chunk of streamToIterator(body)) {
|
||||
if (chunk.length) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const text = Object.prototype.toString.call(body);
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(text));
|
||||
}
|
||||
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Spy on reading chunks to set the stream as disturbed
|
||||
const getReader = readable.getReader.bind(readable);
|
||||
readable.getReader = () => {
|
||||
const reader = getReader();
|
||||
const read = reader.read.bind(reader);
|
||||
reader.read = () => {
|
||||
this[INTERNALS].disturbed = true;
|
||||
return read();
|
||||
};
|
||||
return reader;
|
||||
};
|
||||
|
||||
this[INTERNALS].stream = readable;
|
||||
}
|
||||
|
||||
return this[INTERNALS].stream!;
|
||||
}
|
||||
|
||||
get bodyUsed(): boolean {
|
||||
return this[INTERNALS].disturbed;
|
||||
}
|
||||
|
||||
_consume() {
|
||||
if (this[INTERNALS].disturbed) {
|
||||
return Promise.reject(
|
||||
new TypeError(
|
||||
`Body has already been used. It can only be used once. Use tee() first if you need to read it twice.`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
this[INTERNALS].disturbed = true;
|
||||
const body = this.body;
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
let buffer = new Uint8Array(0);
|
||||
if (!body) {
|
||||
return resolve(buffer);
|
||||
}
|
||||
|
||||
const reader = body.getReader();
|
||||
(function pump() {
|
||||
reader.read().then(({ value, done }) => {
|
||||
if (done) {
|
||||
return resolve(buffer);
|
||||
} else if (value) {
|
||||
const merge = new Uint8Array(buffer.length + value.length);
|
||||
merge.set(buffer);
|
||||
merge.set(value, buffer.length);
|
||||
buffer = merge;
|
||||
}
|
||||
|
||||
pump();
|
||||
}, reject);
|
||||
})();
|
||||
});
|
||||
}
|
||||
|
||||
async arrayBuffer() {
|
||||
const buffer = await this._consume();
|
||||
const arrayBuffer = new ArrayBuffer(buffer.length);
|
||||
const view = new Uint8Array(arrayBuffer);
|
||||
|
||||
for (let i = 0; i < buffer.length; ++i) {
|
||||
view[i] = buffer[i];
|
||||
}
|
||||
|
||||
return arrayBuffer;
|
||||
}
|
||||
|
||||
async blob() {
|
||||
const buffer = await this._consume();
|
||||
return new Blob([buffer]);
|
||||
}
|
||||
|
||||
async formData() {
|
||||
const bodyInit = this[INTERNALS].bodyInit;
|
||||
if (util.isURLSearchParams(bodyInit)) {
|
||||
const form = new FormData();
|
||||
for (const [key, value] of bodyInit) {
|
||||
form.append(key, value);
|
||||
}
|
||||
return form;
|
||||
} else if (util.isFormData(bodyInit)) {
|
||||
return bodyInit;
|
||||
} else {
|
||||
throw new TypeError(
|
||||
`Unrecognized Content-Type header value. FormData can only parse the following MIME types: multipart/form-data, application/x-www-form-urlencoded.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async text() {
|
||||
const decoder = new TextDecoder();
|
||||
const buffer = await this._consume();
|
||||
return decoder.decode(buffer);
|
||||
}
|
||||
|
||||
async json() {
|
||||
const text = await this.text();
|
||||
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
} catch (err: any) {
|
||||
throw new TypeError(`invalid json body reason: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { BaseBody as Body };
|
||||
|
||||
export type BodyInit =
|
||||
| null
|
||||
| string
|
||||
| Blob
|
||||
| BufferSource
|
||||
| FormData
|
||||
| URLSearchParams
|
||||
| ReadableStream<Uint8Array>;
|
||||
|
||||
export function extractContentType(instance: BaseBody) {
|
||||
const body = instance[INTERNALS].bodyInit;
|
||||
if (typeof body === 'string') {
|
||||
return 'text/plain;charset=UTF-8';
|
||||
} else if (util.isBlob(body)) {
|
||||
return body.type;
|
||||
} else if (util.isDataView(body)) {
|
||||
return null;
|
||||
} else if (util.isArrayBuffer(body)) {
|
||||
return null;
|
||||
} else if (util.isArrayBufferView(body)) {
|
||||
return null;
|
||||
} else if (util.isURLSearchParams(body)) {
|
||||
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
||||
} else if (util.isFormData(body)) {
|
||||
return `multipart/form-data;boundary=${instance[INTERNALS].boundary}`;
|
||||
} else if (util.isReadableStream(body)) {
|
||||
return null;
|
||||
} else {
|
||||
return 'text/plain;charset=UTF-8';
|
||||
}
|
||||
}
|
||||
|
||||
export function cloneBody(instance: BaseBody) {
|
||||
if (instance.bodyUsed) {
|
||||
throw new Error('cannot clone body after it is used');
|
||||
}
|
||||
|
||||
const body = instance[INTERNALS].bodyInit;
|
||||
if (util.isReadableStream(body)) {
|
||||
const [r1, r2] = body.tee();
|
||||
instance[INTERNALS].bodyInit = r1;
|
||||
return r2;
|
||||
}
|
||||
|
||||
return body || null;
|
||||
}
|
||||
|
||||
export function getInstanceBody(instance: BaseBody) {
|
||||
return instance[INTERNALS].bodyInit;
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
export const responseSymbol = Symbol('response');
|
||||
export const passThroughSymbol = Symbol('passThrough');
|
||||
export const waitUntilSymbol = Symbol('waitUntil');
|
||||
|
||||
export class FetchEvent {
|
||||
readonly [waitUntilSymbol]: Promise<any>[] = [];
|
||||
[responseSymbol]?: Promise<Response>;
|
||||
[passThroughSymbol] = false;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
|
||||
constructor() {}
|
||||
|
||||
respondWith(response: Response | Promise<Response>): void {
|
||||
if (!this[responseSymbol]) {
|
||||
this[responseSymbol] = Promise.resolve(response);
|
||||
}
|
||||
}
|
||||
|
||||
passThroughOnException(): void {
|
||||
this[passThroughSymbol] = true;
|
||||
}
|
||||
|
||||
waitUntil(promise: Promise<any>): void {
|
||||
this[waitUntilSymbol].push(promise);
|
||||
}
|
||||
}
|
||||
238
packages/middleware/src/websandbox/spec-compliant/headers.ts
Normal file
238
packages/middleware/src/websandbox/spec-compliant/headers.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
import { isIterable } from '../is';
|
||||
|
||||
const MAP = Symbol('map');
|
||||
const INTERNAL = Symbol('internal');
|
||||
const INVALID_TOKEN_REGEX = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
||||
const INVALID_HEADER_CHAR_REGEX = /[^\t\x20-\x7e\x80-\xff]/;
|
||||
|
||||
class BaseHeaders implements Headers {
|
||||
[MAP]: { [k: string]: string[] } = {};
|
||||
|
||||
constructor(init?: HeadersInit) {
|
||||
if (init instanceof BaseHeaders) {
|
||||
const rawHeaders = init.raw();
|
||||
for (const headerName of Object.keys(rawHeaders)) {
|
||||
for (const value of rawHeaders[headerName]) {
|
||||
this.append(headerName, value);
|
||||
}
|
||||
}
|
||||
} else if (isIterable(init)) {
|
||||
const pairs = [];
|
||||
for (const pair of init) {
|
||||
if (!isIterable(pair)) {
|
||||
throw new TypeError('Each header pair must be iterable');
|
||||
}
|
||||
pairs.push(Array.from(pair));
|
||||
}
|
||||
|
||||
for (const pair of pairs) {
|
||||
if (pair.length !== 2) {
|
||||
throw new TypeError('Each header pair must be a name/value tuple');
|
||||
}
|
||||
this.append(pair[0], pair[1]);
|
||||
}
|
||||
} else if (typeof init === 'object') {
|
||||
for (const key of Object.keys(init)) {
|
||||
// @ts-ignore
|
||||
this.append(key, init[key]);
|
||||
}
|
||||
} else if (init) {
|
||||
throw new TypeError('Provided initializer must be an object');
|
||||
}
|
||||
}
|
||||
|
||||
get(name: string) {
|
||||
const _name = `${name}`;
|
||||
validateName(_name);
|
||||
const key = find(this[MAP], _name);
|
||||
if (key === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this[MAP][key].join(', ');
|
||||
}
|
||||
|
||||
forEach(
|
||||
callback: (value: string, name: string, parent: BaseHeaders) => void,
|
||||
thisArg: any = undefined
|
||||
): void {
|
||||
let pairs = getHeaders(this);
|
||||
let i = 0;
|
||||
while (i < pairs.length) {
|
||||
const [name, value] = pairs[i];
|
||||
callback.call(thisArg, value, name, this);
|
||||
pairs = getHeaders(this);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
set(name: string, value: string) {
|
||||
name = `${name}`;
|
||||
value = `${value}`;
|
||||
validateName(name);
|
||||
validateValue(value);
|
||||
const key = find(this[MAP], name);
|
||||
this[MAP][key !== undefined ? key : name] = [value];
|
||||
}
|
||||
|
||||
append(name: string, value: string) {
|
||||
name = `${name}`;
|
||||
value = `${value}`;
|
||||
validateName(name);
|
||||
validateValue(value);
|
||||
const key = find(this[MAP], name);
|
||||
if (key !== undefined) {
|
||||
this[MAP][key].push(value);
|
||||
} else {
|
||||
this[MAP][name] = [value];
|
||||
}
|
||||
}
|
||||
|
||||
has(name: string) {
|
||||
name = `${name}`;
|
||||
validateName(name);
|
||||
return find(this[MAP], name) !== undefined;
|
||||
}
|
||||
|
||||
delete(name: string) {
|
||||
name = `${name}`;
|
||||
validateName(name);
|
||||
const key = find(this[MAP], name);
|
||||
if (key !== undefined) {
|
||||
delete this[MAP][key];
|
||||
}
|
||||
}
|
||||
|
||||
raw() {
|
||||
return this[MAP];
|
||||
}
|
||||
|
||||
keys() {
|
||||
return createHeadersIterator(this, 'key');
|
||||
}
|
||||
|
||||
values() {
|
||||
return createHeadersIterator(this, 'value');
|
||||
}
|
||||
|
||||
entries() {
|
||||
return createHeadersIterator(this, 'key+value');
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return createHeadersIterator(this, 'key+value');
|
||||
}
|
||||
}
|
||||
|
||||
function createHeadersIterator(
|
||||
target: BaseHeaders,
|
||||
kind: 'key' | 'value' | 'key+value'
|
||||
) {
|
||||
const iterator = Object.create(HeadersIteratorPrototype);
|
||||
iterator[INTERNAL] = {
|
||||
target,
|
||||
kind,
|
||||
index: 0,
|
||||
};
|
||||
return iterator;
|
||||
}
|
||||
|
||||
function validateName(name: string) {
|
||||
name = `${name}`;
|
||||
if (INVALID_TOKEN_REGEX.test(name)) {
|
||||
throw new TypeError(`${name} is not a legal HTTP header name`);
|
||||
}
|
||||
}
|
||||
|
||||
function validateValue(value: string) {
|
||||
value = `${value}`;
|
||||
if (INVALID_HEADER_CHAR_REGEX.test(value)) {
|
||||
throw new TypeError(`${value} is not a legal HTTP header value`);
|
||||
}
|
||||
}
|
||||
|
||||
function find(
|
||||
map: { [k: string]: string[] },
|
||||
name: string
|
||||
): string | undefined {
|
||||
name = name.toLowerCase();
|
||||
for (const key in map) {
|
||||
if (key.toLowerCase() === name) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
Object.defineProperty(BaseHeaders.prototype, Symbol.toStringTag, {
|
||||
value: 'Headers',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
Object.defineProperties(BaseHeaders.prototype, {
|
||||
append: { enumerable: true },
|
||||
delete: { enumerable: true },
|
||||
entries: { enumerable: true },
|
||||
forEach: { enumerable: true },
|
||||
get: { enumerable: true },
|
||||
has: { enumerable: true },
|
||||
keys: { enumerable: true },
|
||||
raw: { enumerable: false },
|
||||
set: { enumerable: true },
|
||||
values: { enumerable: true },
|
||||
});
|
||||
|
||||
function getHeaders(
|
||||
headers: BaseHeaders,
|
||||
kind: 'key' | 'value' | 'key+value' = 'key+value'
|
||||
) {
|
||||
const fn =
|
||||
kind === 'key'
|
||||
? (key: string) => key.toLowerCase()
|
||||
: kind === 'value'
|
||||
? (key: string) => headers[MAP][key].join(', ')
|
||||
: (key: string) => [key.toLowerCase(), headers[MAP][key].join(', ')];
|
||||
|
||||
return Object.keys(headers[MAP])
|
||||
.sort()
|
||||
.map(key => fn(key));
|
||||
}
|
||||
|
||||
const HeadersIteratorPrototype = Object.setPrototypeOf(
|
||||
{
|
||||
next() {
|
||||
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
||||
throw new TypeError('Value of `this` is not a HeadersIterator');
|
||||
}
|
||||
|
||||
const { target, kind, index } = this[INTERNAL];
|
||||
const values = getHeaders(target, kind);
|
||||
const len = values.length;
|
||||
if (index >= len) {
|
||||
return {
|
||||
value: undefined,
|
||||
done: true,
|
||||
};
|
||||
}
|
||||
|
||||
this[INTERNAL].index = index + 1;
|
||||
|
||||
return {
|
||||
value: values[index],
|
||||
done: false,
|
||||
};
|
||||
},
|
||||
},
|
||||
Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))
|
||||
);
|
||||
|
||||
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
||||
value: 'HeadersIterator',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
export { BaseHeaders as Headers };
|
||||
124
packages/middleware/src/websandbox/spec-compliant/request.ts
Normal file
124
packages/middleware/src/websandbox/spec-compliant/request.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import { Body, cloneBody, extractContentType, getInstanceBody } from './body';
|
||||
import { Headers as BaseHeaders } from './headers';
|
||||
import { notImplemented } from '../utils';
|
||||
|
||||
export const INTERNALS = Symbol('internal request');
|
||||
|
||||
class BaseRequest extends Body implements Request {
|
||||
[INTERNALS]: {
|
||||
credentials: RequestCredentials;
|
||||
headers: Headers;
|
||||
method: string;
|
||||
redirect: RequestRedirect;
|
||||
url: URL;
|
||||
};
|
||||
|
||||
constructor(input: BaseRequest | string, init: RequestInit = {}) {
|
||||
const method = init.method?.toUpperCase() ?? 'GET';
|
||||
|
||||
if (
|
||||
(method === 'GET' || method === 'HEAD') &&
|
||||
(init.body || (input instanceof BaseRequest && getInstanceBody(input)))
|
||||
) {
|
||||
throw new TypeError('Request with GET/HEAD method cannot have body');
|
||||
}
|
||||
|
||||
let inputBody: BodyInit | null = null;
|
||||
if (init.body) {
|
||||
inputBody = init.body;
|
||||
} else if (input instanceof BaseRequest && getInstanceBody(input)) {
|
||||
inputBody = cloneBody(input);
|
||||
}
|
||||
|
||||
super(inputBody);
|
||||
|
||||
const headers = new BaseHeaders(
|
||||
init.headers || getProp(input, 'headers') || {}
|
||||
);
|
||||
if (inputBody !== null) {
|
||||
const contentType = extractContentType(this);
|
||||
if (contentType !== null && !headers.has('Content-Type')) {
|
||||
headers.append('Content-Type', contentType);
|
||||
}
|
||||
}
|
||||
|
||||
this[INTERNALS] = {
|
||||
credentials:
|
||||
init.credentials || getProp(input, 'credentials') || 'same-origin',
|
||||
headers,
|
||||
method,
|
||||
redirect: init.redirect || getProp(input, 'redirect') || 'follow',
|
||||
url: new URL(typeof input === 'string' ? input : input.url),
|
||||
};
|
||||
}
|
||||
|
||||
get url() {
|
||||
return this[INTERNALS].url.toString();
|
||||
}
|
||||
|
||||
get credentials() {
|
||||
return this[INTERNALS].credentials;
|
||||
}
|
||||
|
||||
get method() {
|
||||
return this[INTERNALS].method;
|
||||
}
|
||||
|
||||
get headers() {
|
||||
return this[INTERNALS].headers;
|
||||
}
|
||||
|
||||
get redirect() {
|
||||
return this[INTERNALS].redirect;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
public clone() {
|
||||
return new BaseRequest(this);
|
||||
}
|
||||
|
||||
get cache() {
|
||||
return notImplemented('Request', 'cache');
|
||||
}
|
||||
|
||||
get integrity() {
|
||||
return notImplemented('Request', 'integrity');
|
||||
}
|
||||
|
||||
get keepalive() {
|
||||
return notImplemented('Request', 'keepalive');
|
||||
}
|
||||
|
||||
get mode() {
|
||||
return notImplemented('Request', 'mode');
|
||||
}
|
||||
|
||||
get destination() {
|
||||
return notImplemented('Request', 'destination');
|
||||
}
|
||||
|
||||
get referrer() {
|
||||
return notImplemented('Request', 'referrer');
|
||||
}
|
||||
|
||||
get referrerPolicy() {
|
||||
return notImplemented('Request', 'referrerPolicy');
|
||||
}
|
||||
|
||||
get signal() {
|
||||
return notImplemented('Request', 'signal');
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() {
|
||||
return 'Request';
|
||||
}
|
||||
}
|
||||
|
||||
export { BaseRequest as Request };
|
||||
|
||||
function getProp<K extends keyof BaseRequest>(
|
||||
input: BaseRequest | string,
|
||||
key: K
|
||||
): BaseRequest[K] | undefined {
|
||||
return input instanceof BaseRequest ? input[key] : undefined;
|
||||
}
|
||||
113
packages/middleware/src/websandbox/spec-compliant/response.ts
Normal file
113
packages/middleware/src/websandbox/spec-compliant/response.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { Body, BodyInit, cloneBody, extractContentType } from './body';
|
||||
|
||||
const INTERNALS = Symbol('internal response');
|
||||
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
|
||||
|
||||
class BaseResponse extends Body implements Response {
|
||||
[INTERNALS]: {
|
||||
headers: Headers;
|
||||
status: number;
|
||||
statusText: string;
|
||||
type: 'default' | 'error';
|
||||
url?: URL;
|
||||
};
|
||||
|
||||
constructor(body?: BodyInit | null, init?: ResponseInit) {
|
||||
super(body);
|
||||
|
||||
this[INTERNALS] = {
|
||||
headers: new Headers(init?.headers),
|
||||
status: init?.status || 200,
|
||||
statusText: init?.statusText || '',
|
||||
type: 'default',
|
||||
url: init?.url ? new URL(init.url) : undefined,
|
||||
};
|
||||
|
||||
if (this[INTERNALS].status < 200 || this[INTERNALS].status > 599) {
|
||||
throw new RangeError(
|
||||
`Responses may only be constructed with status codes in the range 200 to 599, inclusive.`
|
||||
);
|
||||
}
|
||||
|
||||
if (body !== null && !this[INTERNALS].headers.has('Content-Type')) {
|
||||
const contentType = extractContentType(this);
|
||||
if (contentType) {
|
||||
this[INTERNALS].headers.append('Content-Type', contentType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static redirect(url: string, status = 302) {
|
||||
if (!REDIRECTS.has(status)) {
|
||||
throw new RangeError(
|
||||
'Failed to execute "redirect" on "response": Invalid status code'
|
||||
);
|
||||
}
|
||||
|
||||
return new Response(null, {
|
||||
headers: { Location: url },
|
||||
status,
|
||||
});
|
||||
}
|
||||
|
||||
static error() {
|
||||
const response = new BaseResponse(null, { status: 0, statusText: '' });
|
||||
response[INTERNALS].type = 'error';
|
||||
return response;
|
||||
}
|
||||
|
||||
get url() {
|
||||
return this[INTERNALS].url?.toString() || '';
|
||||
}
|
||||
|
||||
get ok() {
|
||||
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;
|
||||
}
|
||||
|
||||
get status() {
|
||||
return this[INTERNALS].status;
|
||||
}
|
||||
|
||||
get statusText() {
|
||||
return this[INTERNALS].statusText;
|
||||
}
|
||||
|
||||
get headers() {
|
||||
return this[INTERNALS].headers;
|
||||
}
|
||||
|
||||
get redirected() {
|
||||
return (
|
||||
this[INTERNALS].status > 299 &&
|
||||
this[INTERNALS].status < 400 &&
|
||||
this[INTERNALS].headers.has('Location')
|
||||
);
|
||||
}
|
||||
|
||||
get type() {
|
||||
return this[INTERNALS].type;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
clone() {
|
||||
return new BaseResponse(cloneBody(this), {
|
||||
headers: this.headers,
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
url: this.url,
|
||||
});
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() {
|
||||
return 'Response';
|
||||
}
|
||||
}
|
||||
|
||||
export interface ResponseInit {
|
||||
headers?: HeadersInit;
|
||||
status?: number;
|
||||
statusText?: string;
|
||||
url?: string;
|
||||
}
|
||||
|
||||
export { BaseResponse as Response };
|
||||
@@ -0,0 +1,26 @@
|
||||
import { DeprecationError } from '../error';
|
||||
import { FetchEvent } from '../spec-compliant/fetch-event';
|
||||
import { NextRequest } from './request';
|
||||
|
||||
export class NextFetchEvent extends FetchEvent {
|
||||
sourcePage: string;
|
||||
|
||||
constructor(params: { request: NextRequest; page: string }) {
|
||||
//@ts-ignore
|
||||
super(params.request);
|
||||
this.sourcePage = params.page;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
get request() {
|
||||
throw new DeprecationError({
|
||||
page: this.sourcePage,
|
||||
});
|
||||
}
|
||||
|
||||
respondWith() {
|
||||
throw new DeprecationError({
|
||||
page: this.sourcePage,
|
||||
});
|
||||
}
|
||||
}
|
||||
98
packages/middleware/src/websandbox/spec-extension/request.ts
Normal file
98
packages/middleware/src/websandbox/spec-extension/request.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import type { IResult } from 'ua-parser-js';
|
||||
import cookie from 'cookie';
|
||||
import parseua from 'ua-parser-js';
|
||||
import { Request, RequestInit as NodeFetchRequestInit } from 'node-fetch';
|
||||
|
||||
export const INTERNALS = Symbol('internal request');
|
||||
|
||||
export class NextRequest extends Request {
|
||||
[INTERNALS]: {
|
||||
cookieParser(): { [key: string]: string };
|
||||
geo: { city?: string; country?: string; region?: string };
|
||||
ip?: string;
|
||||
page?: { name?: string; params?: { [key: string]: string } };
|
||||
ua?: IResult | null;
|
||||
url: URL;
|
||||
};
|
||||
|
||||
constructor(input: Request | string, init: RequestInit = {}) {
|
||||
//@ts-ignore
|
||||
super(input, init);
|
||||
|
||||
const cookieParser = () => {
|
||||
const value = this.headers.get('cookie');
|
||||
return value ? cookie.parse(value) : {};
|
||||
};
|
||||
|
||||
this[INTERNALS] = {
|
||||
cookieParser,
|
||||
geo: init.geo || {},
|
||||
ip: init.ip,
|
||||
page: init.page,
|
||||
url: new URL(typeof input === 'string' ? input : input.url),
|
||||
};
|
||||
}
|
||||
|
||||
public get cookies() {
|
||||
return this[INTERNALS].cookieParser();
|
||||
}
|
||||
|
||||
public get geo() {
|
||||
return this[INTERNALS].geo;
|
||||
}
|
||||
|
||||
public get ip() {
|
||||
return this[INTERNALS].ip;
|
||||
}
|
||||
|
||||
public get preflight() {
|
||||
return this.headers.get('x-middleware-preflight');
|
||||
}
|
||||
|
||||
public get nextUrl() {
|
||||
return this[INTERNALS].url;
|
||||
}
|
||||
|
||||
public get page() {
|
||||
return {
|
||||
name: this[INTERNALS].page?.name,
|
||||
params: this[INTERNALS].page?.params,
|
||||
};
|
||||
}
|
||||
|
||||
public get ua() {
|
||||
if (typeof this[INTERNALS].ua !== 'undefined') {
|
||||
return this[INTERNALS].ua || undefined;
|
||||
}
|
||||
|
||||
const uaString = this.headers.get('user-agent');
|
||||
if (!uaString) {
|
||||
this[INTERNALS].ua = null;
|
||||
return this[INTERNALS].ua || undefined;
|
||||
}
|
||||
|
||||
this[INTERNALS].ua = {
|
||||
...parseua(uaString),
|
||||
};
|
||||
|
||||
return this[INTERNALS].ua;
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
public get url() {
|
||||
return this[INTERNALS].url.toString();
|
||||
}
|
||||
}
|
||||
|
||||
export interface RequestInit extends NodeFetchRequestInit {
|
||||
geo?: {
|
||||
city?: string;
|
||||
country?: string;
|
||||
region?: string;
|
||||
};
|
||||
ip?: string;
|
||||
page?: {
|
||||
name?: string;
|
||||
params?: { [key: string]: string };
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,97 @@
|
||||
import type { CookieSerializeOptions } from 'cookie';
|
||||
import cookie from 'cookie';
|
||||
import { Response, ResponseInit as NodeFetchResponseInit } from 'node-fetch';
|
||||
|
||||
const INTERNALS = Symbol('internal response');
|
||||
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
|
||||
|
||||
export class SpecResponse extends Response {
|
||||
[INTERNALS]: {
|
||||
cookieParser(): { [key: string]: string };
|
||||
url?: URL;
|
||||
};
|
||||
|
||||
constructor(body?: BodyInit | null, init: ResponseInit = {}) {
|
||||
// TODO - why is this failing?
|
||||
// @ts-ignore
|
||||
super(body, init);
|
||||
|
||||
const cookieParser = () => {
|
||||
const value = this.headers.get('cookie');
|
||||
return value ? cookie.parse(value) : {};
|
||||
};
|
||||
|
||||
this[INTERNALS] = {
|
||||
cookieParser,
|
||||
url: init.url ? new URL(init.url) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
public get cookies() {
|
||||
return this[INTERNALS].cookieParser();
|
||||
}
|
||||
|
||||
public cookie(
|
||||
name: string,
|
||||
value: { [key: string]: any } | string,
|
||||
opts: CookieSerializeOptions = {}
|
||||
) {
|
||||
const val =
|
||||
typeof value === 'object' ? 'j:' + JSON.stringify(value) : String(value);
|
||||
|
||||
if (opts.maxAge) {
|
||||
opts.expires = new Date(Date.now() + opts.maxAge);
|
||||
opts.maxAge /= 1000;
|
||||
}
|
||||
|
||||
if (opts.path == null) {
|
||||
opts.path = '/';
|
||||
}
|
||||
|
||||
this.headers.append(
|
||||
'Set-Cookie',
|
||||
cookie.serialize(name, String(val), opts)
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
public clearCookie(name: string, opts: CookieSerializeOptions = {}) {
|
||||
return this.cookie(name, '', { expires: new Date(1), path: '/', ...opts });
|
||||
}
|
||||
|
||||
static redirect(url: string | URL, status = 302) {
|
||||
if (!REDIRECTS.has(status)) {
|
||||
throw new RangeError(
|
||||
'Failed to execute "redirect" on "response": Invalid status code'
|
||||
);
|
||||
}
|
||||
|
||||
return new SpecResponse(null, {
|
||||
headers: { Location: typeof url === 'string' ? url : url.toString() },
|
||||
status,
|
||||
});
|
||||
}
|
||||
|
||||
static rewrite(destination: string | URL) {
|
||||
return new SpecResponse(null, {
|
||||
headers: {
|
||||
'x-middleware-rewrite':
|
||||
typeof destination === 'string'
|
||||
? destination
|
||||
: destination.toString(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static next() {
|
||||
return new SpecResponse(null, {
|
||||
headers: {
|
||||
'x-middleware-next': '1',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
interface ResponseInit extends NodeFetchResponseInit {
|
||||
url?: string;
|
||||
}
|
||||
26
packages/middleware/src/websandbox/types.ts
Normal file
26
packages/middleware/src/websandbox/types.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
export interface NodeHeaders {
|
||||
[header: string]: string | string[] | undefined;
|
||||
}
|
||||
|
||||
export interface RequestData {
|
||||
geo?: {
|
||||
city?: string;
|
||||
country?: string;
|
||||
region?: string;
|
||||
};
|
||||
headers: NodeHeaders;
|
||||
ip?: string;
|
||||
method: string;
|
||||
page?: {
|
||||
name?: string;
|
||||
params?: { [key: string]: string };
|
||||
};
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface FetchEventResult {
|
||||
response: Response;
|
||||
waitUntil: Promise<any>;
|
||||
}
|
||||
124
packages/middleware/src/websandbox/utils.ts
Normal file
124
packages/middleware/src/websandbox/utils.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import type { NodeHeaders } from './types';
|
||||
import { Headers } from 'node-fetch';
|
||||
|
||||
export async function* streamToIterator<T>(
|
||||
readable: ReadableStream<T>
|
||||
): AsyncIterableIterator<T> {
|
||||
const reader = readable.getReader();
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
if (value) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
reader.releaseLock();
|
||||
}
|
||||
|
||||
export function notImplemented(name: string, method: string): any {
|
||||
throw new Error(
|
||||
`Failed to get the '${method}' property on '${name}': the property is not implemented`
|
||||
);
|
||||
}
|
||||
|
||||
export function fromNodeHeaders(object: NodeHeaders): Headers {
|
||||
const headers = new Headers();
|
||||
for (const [key, value] of Object.entries(object)) {
|
||||
const values = Array.isArray(value) ? value : [value];
|
||||
for (const v of values) {
|
||||
if (v !== undefined) {
|
||||
headers.append(key, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
export function toNodeHeaders(headers?: Headers): NodeHeaders {
|
||||
const result: NodeHeaders = {};
|
||||
if (headers) {
|
||||
for (const [key, value] of headers.entries()) {
|
||||
result[key] = value;
|
||||
if (key.toLowerCase() === 'set-cookie') {
|
||||
result[key] = splitCookiesString(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
Set-Cookie header field-values are sometimes comma joined in one string. This splits them without choking on commas
|
||||
that are within a single set-cookie field-value, such as in the Expires portion.
|
||||
This is uncommon, but explicitly allowed - see https://tools.ietf.org/html/rfc2616#section-4.2
|
||||
Node.js does this for every header *except* set-cookie - see https://github.com/nodejs/node/blob/d5e363b77ebaf1caf67cd7528224b651c86815c1/lib/_http_incoming.js#L128
|
||||
React Native's fetch does this for *every* header, including set-cookie.
|
||||
|
||||
Based on: https://github.com/google/j2objc/commit/16820fdbc8f76ca0c33472810ce0cb03d20efe25
|
||||
Credits to: https://github.com/tomball for original and https://github.com/chrusart for JavaScript implementation
|
||||
*/
|
||||
export function splitCookiesString(cookiesString: string) {
|
||||
const cookiesStrings = [];
|
||||
let pos = 0;
|
||||
let start;
|
||||
let ch;
|
||||
let lastComma;
|
||||
let nextStart;
|
||||
let cookiesSeparatorFound;
|
||||
|
||||
function skipWhitespace() {
|
||||
while (pos < cookiesString.length && /\s/.test(cookiesString.charAt(pos))) {
|
||||
pos += 1;
|
||||
}
|
||||
return pos < cookiesString.length;
|
||||
}
|
||||
|
||||
function notSpecialChar() {
|
||||
ch = cookiesString.charAt(pos);
|
||||
|
||||
return ch !== '=' && ch !== ';' && ch !== ',';
|
||||
}
|
||||
|
||||
while (pos < cookiesString.length) {
|
||||
start = pos;
|
||||
cookiesSeparatorFound = false;
|
||||
|
||||
while (skipWhitespace()) {
|
||||
ch = cookiesString.charAt(pos);
|
||||
if (ch === ',') {
|
||||
// ',' is a cookie separator if we have later first '=', not ';' or ','
|
||||
lastComma = pos;
|
||||
pos += 1;
|
||||
|
||||
skipWhitespace();
|
||||
nextStart = pos;
|
||||
|
||||
while (pos < cookiesString.length && notSpecialChar()) {
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
// currently special character
|
||||
if (pos < cookiesString.length && cookiesString.charAt(pos) === '=') {
|
||||
// we found cookies separator
|
||||
cookiesSeparatorFound = true;
|
||||
// pos is inside the next cookie, so back up and return it.
|
||||
pos = nextStart;
|
||||
cookiesStrings.push(cookiesString.substring(start, lastComma));
|
||||
start = pos;
|
||||
} else {
|
||||
// in param ',' or param separator ';',
|
||||
// we continue from that comma
|
||||
pos = lastComma + 1;
|
||||
}
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (!cookiesSeparatorFound || pos >= cookiesString.length) {
|
||||
cookiesStrings.push(cookiesString.substring(start, cookiesString.length));
|
||||
}
|
||||
}
|
||||
|
||||
return cookiesStrings;
|
||||
}
|
||||
21
packages/middleware/test/__snapshots__/build.test.ts.snap
Normal file
21
packages/middleware/test/__snapshots__/build.test.ts.snap
Normal file
@@ -0,0 +1,21 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`build() should build simple middleware 1`] = `
|
||||
Object {
|
||||
"middleware": Object {
|
||||
"/": Object {
|
||||
"env": Array [],
|
||||
"files": Array [
|
||||
"server/pages/_middleware.js",
|
||||
],
|
||||
"name": "pages/_middleware",
|
||||
"page": "/",
|
||||
"regexp": "^/.*$",
|
||||
},
|
||||
},
|
||||
"sortedMiddleware": Array [
|
||||
"/",
|
||||
],
|
||||
"version": 1,
|
||||
}
|
||||
`;
|
||||
57
packages/middleware/test/build.test.ts
vendored
Normal file
57
packages/middleware/test/build.test.ts
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import { join } from 'path';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { build } from '../src';
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
describe('build()', () => {
|
||||
beforeEach(() => {
|
||||
//@ts-ignore
|
||||
global.Response = Response;
|
||||
});
|
||||
afterEach(() => {
|
||||
//@ts-ignore
|
||||
delete global.Response;
|
||||
//@ts-ignore
|
||||
delete global._ENTRIES;
|
||||
});
|
||||
it('should build simple middleware', async () => {
|
||||
const fixture = join(__dirname, 'fixtures/simple');
|
||||
await build({
|
||||
workPath: fixture,
|
||||
});
|
||||
|
||||
const middlewareManifest = JSON.parse(
|
||||
await fsp.readFile(
|
||||
join(fixture, '.output/server/middleware-manifest.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
expect(middlewareManifest).toMatchSnapshot();
|
||||
|
||||
const outputFile = join(fixture, '.output/server/pages/_middleware.js');
|
||||
expect(await fsp.stat(outputFile)).toBeTruthy();
|
||||
|
||||
require(outputFile);
|
||||
//@ts-ignore
|
||||
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
|
||||
expect(typeof middleware).toStrictEqual('function');
|
||||
const handledResponse = await middleware({
|
||||
request: {
|
||||
url: 'http://google.com',
|
||||
},
|
||||
});
|
||||
const unhandledResponse = await middleware({
|
||||
request: {
|
||||
url: 'literallyanythingelse',
|
||||
},
|
||||
});
|
||||
expect(String(handledResponse.response.body)).toEqual('Hi from the edge!');
|
||||
expect(
|
||||
(handledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual(null);
|
||||
expect(unhandledResponse.response.body).toEqual(null);
|
||||
expect(
|
||||
(unhandledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual('1');
|
||||
});
|
||||
});
|
||||
5
packages/middleware/test/fixtures/simple/_middleware.js
vendored
Normal file
5
packages/middleware/test/fixtures/simple/_middleware.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export default req => {
|
||||
if (req.url === 'http://google.com') {
|
||||
return new Response('Hi from the edge!');
|
||||
}
|
||||
};
|
||||
4
packages/middleware/test/tsconfig.json
vendored
Normal file
4
packages/middleware/test/tsconfig.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["*.test.ts"]
|
||||
}
|
||||
16
packages/middleware/tsconfig.json
Normal file
16
packages/middleware/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext", "dom", "dom.iterable"],
|
||||
"target": "es2018",
|
||||
"module": "commonjs",
|
||||
"outDir": "dist",
|
||||
"sourceMap": false,
|
||||
"declaration": true,
|
||||
"moduleResolution": "node",
|
||||
"typeRoots": ["./@types", "./node_modules/@types"]
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
const { parse } = require('url');
|
||||
const { parse, pathToFileURL } = require('url');
|
||||
const { createServer, Server } = require('http');
|
||||
const { isAbsolute } = require('path');
|
||||
const { Bridge } = require('./bridge.js');
|
||||
|
||||
/**
|
||||
@@ -15,8 +16,9 @@ function makeVercelLauncher(config) {
|
||||
shouldAddSourcemapSupport = false,
|
||||
} = config;
|
||||
return `
|
||||
const { parse } = require('url');
|
||||
const { parse, pathToFileURL } = require('url');
|
||||
const { createServer, Server } = require('http');
|
||||
const { isAbsolute } = require('path');
|
||||
const { Bridge } = require(${JSON.stringify(bridgePath)});
|
||||
${
|
||||
shouldAddSourcemapSupport
|
||||
@@ -60,13 +62,15 @@ function getVercelLauncher({
|
||||
process.env.NODE_ENV = region === 'dev1' ? 'development' : 'production';
|
||||
}
|
||||
|
||||
async function getListener() {
|
||||
/**
|
||||
* @param {string} p - entrypointPath
|
||||
*/
|
||||
async function getListener(p) {
|
||||
let listener = useRequire
|
||||
? require(entrypointPath)
|
||||
: await import(entrypointPath);
|
||||
? require(p)
|
||||
: await import(isAbsolute(p) ? pathToFileURL(p).href : p);
|
||||
|
||||
// In some cases we might have nested default props
|
||||
// due to TS => JS
|
||||
// In some cases we might have nested default props due to TS => JS
|
||||
for (let i = 0; i < 5; i++) {
|
||||
if (listener.default) listener = listener.default;
|
||||
}
|
||||
@@ -74,7 +78,7 @@ function getVercelLauncher({
|
||||
return listener;
|
||||
}
|
||||
|
||||
getListener()
|
||||
getListener(entrypointPath)
|
||||
.then(listener => {
|
||||
if (typeof listener.listen === 'function') {
|
||||
Server.prototype.listen = originalListen;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node-bridge",
|
||||
"version": "2.1.1-canary.1",
|
||||
"version": "2.1.1-canary.2",
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "1.12.2-canary.4",
|
||||
"version": "1.12.2-canary.6",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -34,7 +34,7 @@
|
||||
"@types/test-listen": "1.1.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.14.0",
|
||||
"@vercel/node-bridge": "2.1.1-canary.1",
|
||||
"@vercel/node-bridge": "2.1.1-canary.2",
|
||||
"content-type": "1.0.4",
|
||||
"cookie": "0.4.0",
|
||||
"etag": "1.8.1",
|
||||
|
||||
@@ -16,7 +16,7 @@ const init = async () => {
|
||||
console.log('Hapi server running on %s', server.info.uri);
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', (err) => {
|
||||
process.on('unhandledRejection', err => {
|
||||
console.log('Hapi failed in an unexpected way');
|
||||
console.log(err);
|
||||
process.exit(1);
|
||||
|
||||
@@ -3,10 +3,10 @@ const path = require('path');
|
||||
|
||||
module.exports = (req, resp) => {
|
||||
const asset1 = fs.readFileSync(
|
||||
path.join(__dirname, 'subdirectory1/asset.txt'),
|
||||
path.join(__dirname, 'subdirectory1/asset.txt')
|
||||
);
|
||||
const asset2 = fs.readFileSync(
|
||||
path.join(__dirname, 'subdirectory2/asset.txt'),
|
||||
path.join(__dirname, 'subdirectory2/asset.txt')
|
||||
);
|
||||
resp.end(`${asset1},${asset2}`);
|
||||
};
|
||||
|
||||
@@ -8,8 +8,8 @@ const typeDefs = `
|
||||
|
||||
const resolvers = {
|
||||
Query: {
|
||||
hello: (_, { name }) => `Hello ${name || "world"}`
|
||||
}
|
||||
hello: (_, { name }) => `Hello ${name || 'world'}`,
|
||||
},
|
||||
};
|
||||
|
||||
const lambda = new GraphQLServerLambda({
|
||||
|
||||
27
packages/plugin-go/package.json
Normal file
27
packages/plugin-go/package.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-go",
|
||||
"version": "1.0.0-canary.4",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
"directory": "packages/vercel-plugin-go"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.19",
|
||||
"@vercel/go": "1.2.4-canary.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "*",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
}
|
||||
6
packages/plugin-go/src/index.ts
Normal file
6
packages/plugin-go/src/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { convertRuntimeToPlugin } from '@vercel/build-utils';
|
||||
import * as go from '@vercel/go';
|
||||
|
||||
export const build = convertRuntimeToPlugin(go.build, '.go');
|
||||
|
||||
export const startDevServer = go.startDevServer;
|
||||
17
packages/plugin-go/tsconfig.json
Normal file
17
packages/plugin-go/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
6
packages/plugin-node/.gitignore
vendored
Normal file
6
packages/plugin-node/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
/dist
|
||||
/test/fixtures/**/.env
|
||||
/test/fixtures/**/.gitignore
|
||||
/test/fixtures/**/.output
|
||||
/test/fixtures/**/types.d.ts
|
||||
/test/fixtures/11-symlinks/symlink
|
||||
45
packages/plugin-node/@types/zeit__ncc/index.d.ts
vendored
Normal file
45
packages/plugin-node/@types/zeit__ncc/index.d.ts
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
declare function ncc(
|
||||
entrypoint: string,
|
||||
options?: ncc.NccOptions
|
||||
): ncc.NccResult;
|
||||
|
||||
declare namespace ncc {
|
||||
export interface NccOptions {
|
||||
watch?: any;
|
||||
sourceMap?: boolean;
|
||||
sourceMapRegister?: boolean;
|
||||
}
|
||||
|
||||
export interface Asset {
|
||||
source: Buffer;
|
||||
permissions: number;
|
||||
}
|
||||
|
||||
export interface Assets {
|
||||
[name: string]: Asset;
|
||||
}
|
||||
|
||||
export interface BuildResult {
|
||||
err: Error | null | undefined;
|
||||
code: string;
|
||||
map: string | undefined;
|
||||
assets: Assets | undefined;
|
||||
permissions: number | undefined;
|
||||
}
|
||||
|
||||
export type HandlerFn = (params: BuildResult) => void;
|
||||
export type HandlerCallback = (fn: HandlerFn) => void;
|
||||
export type RebuildFn = () => void;
|
||||
export type RebuildCallback = (fn: RebuildFn) => void;
|
||||
export type CloseCallback = () => void;
|
||||
|
||||
export interface NccResult {
|
||||
handler: HandlerCallback;
|
||||
rebuild: RebuildCallback;
|
||||
close: CloseCallback;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@vercel/ncc' {
|
||||
export = ncc;
|
||||
}
|
||||
1
packages/plugin-node/bench/.gitignore
vendored
Normal file
1
packages/plugin-node/bench/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
lambda
|
||||
19
packages/plugin-node/bench/entrypoint-express.js
Normal file
19
packages/plugin-node/bench/entrypoint-express.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const express = require('express');
|
||||
|
||||
const app = express();
|
||||
|
||||
module.exports = app;
|
||||
|
||||
app.use(express.json());
|
||||
|
||||
app.post('*', (req, res) => {
|
||||
if (req.body == null) {
|
||||
return res.status(400).send({ error: 'no JSON object in the request' });
|
||||
}
|
||||
|
||||
return res.status(200).send(JSON.stringify(req.body, null, 4));
|
||||
});
|
||||
|
||||
app.all('*', (req, res) => {
|
||||
res.status(405).send({ error: 'only POST requests are accepted' });
|
||||
});
|
||||
7
packages/plugin-node/bench/entrypoint-helpers.js
Normal file
7
packages/plugin-node/bench/entrypoint-helpers.js
Normal file
@@ -0,0 +1,7 @@
|
||||
module.exports = (req, res) => {
|
||||
if (req.body == null) {
|
||||
return res.status(400).send({ error: 'no JSON object in the request' });
|
||||
}
|
||||
|
||||
return res.status(200).send(JSON.stringify(req.body, null, 4));
|
||||
};
|
||||
9
packages/plugin-node/bench/entrypoint-load-helpers.js
Normal file
9
packages/plugin-node/bench/entrypoint-load-helpers.js
Normal file
@@ -0,0 +1,9 @@
|
||||
function doNothing() {}
|
||||
|
||||
module.exports = (req, res) => {
|
||||
doNothing(req.query.who);
|
||||
doNothing(req.body);
|
||||
doNothing(req.cookies);
|
||||
|
||||
res.end('hello');
|
||||
};
|
||||
3
packages/plugin-node/bench/entrypoint-notload-helpers.js
Normal file
3
packages/plugin-node/bench/entrypoint-notload-helpers.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = (req, res) => {
|
||||
res.end('hello');
|
||||
};
|
||||
10
packages/plugin-node/bench/package.json
Normal file
10
packages/plugin-node/bench/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"name": "bench",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"express": "4.17.1",
|
||||
"fs-extra": "8.0.1"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user