mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
41 Commits
vercel-plu
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d3ef240f6e | ||
|
|
5b26ebc7b8 | ||
|
|
3427ad6ce0 | ||
|
|
4ab5e4326b | ||
|
|
d24a3ce3ab | ||
|
|
29a44db8d9 | ||
|
|
695f3a9212 | ||
|
|
3ff777b8ed | ||
|
|
d94b9806ab | ||
|
|
35c8fc2729 | ||
|
|
0a468fd6d7 | ||
|
|
d31ebbabe4 | ||
|
|
09c9b71adb | ||
|
|
5975db4d66 | ||
|
|
2c86ac654c | ||
|
|
ca5f066eb9 | ||
|
|
410ef86102 | ||
|
|
6792edf32a | ||
|
|
67de167a7e | ||
|
|
0c5c05d90b | ||
|
|
fe43c9c4b2 | ||
|
|
d6a5aa4f6d | ||
|
|
1c3701628d | ||
|
|
45689f22ab | ||
|
|
2c3ddffaac | ||
|
|
c3ea0195c2 | ||
|
|
5f5e50cff0 | ||
|
|
160f4d46d9 | ||
|
|
8d619bd7cc | ||
|
|
b94337d842 | ||
|
|
34f4222ca2 | ||
|
|
5de045edd7 | ||
|
|
5efd3b98de | ||
|
|
82c83312c7 | ||
|
|
5ccb983007 | ||
|
|
7a921399be | ||
|
|
3900f2f982 | ||
|
|
09939f1e07 | ||
|
|
fc3a3ca81f | ||
|
|
ba7bf2e4a6 | ||
|
|
00641037fc |
40
.github/CODEOWNERS
vendored
40
.github/CODEOWNERS
vendored
@@ -4,24 +4,26 @@
|
||||
* @TooTallNate
|
||||
/.github/workflows @AndyBitz @styfle
|
||||
/packages/frameworks @AndyBitz
|
||||
/packages/cli/src/commands/dev @TooTallNate @styfle @AndyBitz
|
||||
/packages/cli/src/util/dev @TooTallNate @styfle @AndyBitz
|
||||
/packages/cli/src/commands/domains @javivelasco @mglagola @anatrajkovska
|
||||
/packages/cli/src/commands/certs @javivelasco @mglagola @anatrajkovska
|
||||
/packages/cli/src/commands/env @styfle @lucleray
|
||||
/packages/client @rdev @styfle @TooTallNate
|
||||
/packages/build-utils @styfle @AndyBitz @TooTallNate
|
||||
/packages/node @styfle @TooTallNate @lucleray
|
||||
/packages/node-bridge @styfle @TooTallNate @lucleray
|
||||
/packages/next @Timer @ijjk
|
||||
/packages/go @styfle @TooTallNate
|
||||
/packages/python @styfle @TooTallNate
|
||||
/packages/ruby @styfle @coetry @TooTallNate
|
||||
/packages/static-build @styfle @AndyBitz
|
||||
/packages/routing-utils @styfle @dav-is @ijjk
|
||||
/examples @mcsdevv @timothyis
|
||||
/packages/cli/src/commands/build @TooTallNate @styfle @AndyBitz @gdborton @jaredpalmer
|
||||
/packages/cli/src/commands/dev @TooTallNate @styfle @AndyBitz
|
||||
/packages/cli/src/util/dev @TooTallNate @styfle @AndyBitz
|
||||
/packages/cli/src/commands/domains @javivelasco @mglagola @anatrajkovska
|
||||
/packages/cli/src/commands/certs @javivelasco @mglagola @anatrajkovska
|
||||
/packages/cli/src/commands/env @styfle @lucleray
|
||||
/packages/client @styfle @TooTallNate
|
||||
/packages/build-utils @styfle @AndyBitz @TooTallNate
|
||||
/packages/middleware @gdborton @javivelasco
|
||||
/packages/node @styfle @TooTallNate @lucleray
|
||||
/packages/node-bridge @styfle @TooTallNate @lucleray
|
||||
/packages/next @Timer @ijjk
|
||||
/packages/go @styfle @TooTallNate
|
||||
/packages/python @styfle @TooTallNate
|
||||
/packages/ruby @styfle @TooTallNate
|
||||
/packages/static-build @styfle @AndyBitz
|
||||
/packages/routing-utils @styfle @dav-is @ijjk
|
||||
/examples @mcsdevv
|
||||
/examples/create-react-app @Timer
|
||||
/examples/nextjs @timneutkens @Timer
|
||||
/examples/hugo @mcsdevv @timothyis @styfle
|
||||
/examples/jekyll @mcsdevv @timothyis @styfle
|
||||
/examples/zola @mcsdevv @timothyis @styfle
|
||||
/examples/hugo @mcsdevv @styfle
|
||||
/examples/jekyll @mcsdevv @styfle
|
||||
/examples/zola @mcsdevv @styfle
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "2.12.3-canary.31",
|
||||
"version": "2.12.3-canary.42",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -30,7 +30,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "^2.4.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.16",
|
||||
"@vercel/frameworks": "0.5.1-canary.17",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import fs from 'fs-extra';
|
||||
import { join, dirname, parse, relative } from 'path';
|
||||
import { join, parse, relative, dirname, basename, extname } from 'path';
|
||||
import glob from './fs/glob';
|
||||
import { normalizePath } from './fs/normalize-path';
|
||||
import { FILES_SYMBOL, Lambda } from './lambda';
|
||||
import type FileBlob from './file-blob';
|
||||
import type { BuildOptions, Files } from './types';
|
||||
import { getIgnoreFilter } from '.';
|
||||
import { debug, getIgnoreFilter } from '.';
|
||||
|
||||
// `.output` was already created by the Build Command, so we have
|
||||
// to ensure its contents don't get bundled into the Lambda. Similarily,
|
||||
@@ -87,10 +86,10 @@ export function convertRuntimeToPlugin(
|
||||
|
||||
const pages: { [key: string]: any } = {};
|
||||
const pluginName = packageName.replace('vercel-plugin-', '');
|
||||
const outputPath = join(workPath, '.output');
|
||||
|
||||
const traceDir = join(
|
||||
workPath,
|
||||
`.output`,
|
||||
outputPath,
|
||||
`inputs`,
|
||||
// Legacy Runtimes can only provide API Routes, so that's
|
||||
// why we can use this prefix for all of them. Here, we have to
|
||||
@@ -101,6 +100,8 @@ export function convertRuntimeToPlugin(
|
||||
|
||||
await fs.ensureDir(traceDir);
|
||||
|
||||
const entryRoot = join(outputPath, 'server', 'pages');
|
||||
|
||||
for (const entrypoint of Object.keys(entrypoints)) {
|
||||
const { output } = await buildRuntime({
|
||||
files: sourceFilesPreBuild,
|
||||
@@ -111,38 +112,10 @@ export function convertRuntimeToPlugin(
|
||||
},
|
||||
meta: {
|
||||
avoidTopLevelInstall: true,
|
||||
skipDownload: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
||||
// because the `workPath` used to be a place that was a place where they could
|
||||
// just put anything, but nowadays it's the working directory of the `vercel build`
|
||||
// command, which is the place where the developer keeps their source files,
|
||||
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
||||
// up files that were created by the build, which is done further below.
|
||||
const sourceFilesAfterBuild = await getSourceFiles(
|
||||
workPath,
|
||||
ignoreFilter
|
||||
);
|
||||
|
||||
// Further down, we will need the filename of the Lambda handler
|
||||
// for placing it inside `server/pages/api`, but because Legacy Runtimes
|
||||
// don't expose the filename directly, we have to construct it
|
||||
// from the handler name, and then find the matching file further below,
|
||||
// because we don't yet know its extension here.
|
||||
const handler = output.handler;
|
||||
const handlerMethod = handler.split('.').reverse()[0];
|
||||
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
||||
|
||||
pages[entrypoint] = {
|
||||
handler: handler,
|
||||
runtime: output.runtime,
|
||||
memory: output.memory,
|
||||
maxDuration: output.maxDuration,
|
||||
environment: output.environment,
|
||||
allowQuery: output.allowQuery,
|
||||
};
|
||||
|
||||
// @ts-ignore This symbol is a private API
|
||||
const lambdaFiles: Files = output[FILES_SYMBOL];
|
||||
|
||||
@@ -156,95 +129,168 @@ export function convertRuntimeToPlugin(
|
||||
}
|
||||
}
|
||||
|
||||
const handlerFilePath = Object.keys(lambdaFiles).find(item => {
|
||||
return parse(item).name === handlerFileName;
|
||||
});
|
||||
let handlerFileBase = output.handler;
|
||||
let handlerFile = lambdaFiles[handlerFileBase];
|
||||
let handlerHasImport = false;
|
||||
|
||||
const handlerFileOrigin = lambdaFiles[handlerFilePath || ''].fsPath;
|
||||
const { handler } = output;
|
||||
const handlerMethod = handler.split('.').pop();
|
||||
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
||||
|
||||
if (!handlerFileOrigin) {
|
||||
// For compiled languages, the launcher file for the Lambda generated
|
||||
// by the Legacy Runtime matches the `handler` defined for it, but for
|
||||
// interpreted languages, the `handler` consists of the launcher file name
|
||||
// without an extension, plus the name of the method inside of that file
|
||||
// that should be invoked, so we have to construct the file path explicitly.
|
||||
if (!handlerFile) {
|
||||
handlerFileBase = handlerFileName + ext;
|
||||
handlerFile = lambdaFiles[handlerFileBase];
|
||||
handlerHasImport = true;
|
||||
}
|
||||
|
||||
if (!handlerFile || !handlerFile.fsPath) {
|
||||
throw new Error(
|
||||
`Could not find a handler file. Please ensure that the list of \`files\` defined for the returned \`Lambda\` contains a file with the name ${handlerFileName} (+ any extension).`
|
||||
`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`
|
||||
);
|
||||
}
|
||||
|
||||
const entry = join(workPath, '.output', 'server', 'pages', entrypoint);
|
||||
const handlerExtName = extname(handlerFile.fsPath);
|
||||
|
||||
const entryBase = basename(entrypoint).replace(ext, handlerExtName);
|
||||
const entryPath = join(dirname(entrypoint), entryBase);
|
||||
const entry = join(entryRoot, entryPath);
|
||||
|
||||
// Create the parent directory of the API Route that will be created
|
||||
// for the current entrypoint inside of `.output/server/pages/api`.
|
||||
await fs.ensureDir(dirname(entry));
|
||||
await linkOrCopy(handlerFileOrigin, entry);
|
||||
|
||||
const toRemove = [];
|
||||
// For compiled languages, the launcher file will be binary and therefore
|
||||
// won't try to import a user-provided request handler (instead, it will
|
||||
// contain it). But for interpreted languages, the launcher might try to
|
||||
// load a user-provided request handler from the source file instead of bundling
|
||||
// it, so we have to adjust the import statement inside the launcher to point
|
||||
// to the respective source file. Previously, Legacy Runtimes simply expected
|
||||
// the user-provided request-handler to be copied right next to the launcher,
|
||||
// but with the new File System API, files won't be moved around unnecessarily.
|
||||
if (handlerHasImport) {
|
||||
const { fsPath } = handlerFile;
|
||||
const encoding = 'utf-8';
|
||||
|
||||
// You can find more details about this at the point where the
|
||||
// `sourceFilesAfterBuild` is created originally.
|
||||
for (const file in sourceFilesAfterBuild) {
|
||||
if (!sourceFilesPreBuild[file]) {
|
||||
const path = sourceFilesAfterBuild[file].fsPath;
|
||||
toRemove.push(fs.remove(path));
|
||||
// This is the true directory of the user-provided request handler in the
|
||||
// source files, so that's what we will use as an import path in the launcher.
|
||||
const locationPrefix = relative(entry, outputPath);
|
||||
|
||||
let handlerContent = await fs.readFile(fsPath, encoding);
|
||||
|
||||
const importPaths = [
|
||||
// This is the full entrypoint path, like `./api/test.py`. In our tests
|
||||
// Python didn't support importing from a parent directory without using different
|
||||
// code in the launcher that registers it as a location for modules and then changing
|
||||
// the importing syntax, but continuing to import it like before seems to work. If
|
||||
// other languages need this, we should consider excluding Python explicitly.
|
||||
// `./${entrypoint}`,
|
||||
|
||||
// This is the entrypoint path without extension, like `api/test`
|
||||
entrypoint.slice(0, -ext.length),
|
||||
];
|
||||
|
||||
// Generate a list of regular expressions that we can use for
|
||||
// finding matches, but only allow matches if the import path is
|
||||
// wrapped inside single (') or double quotes (").
|
||||
const patterns = importPaths.map(path => {
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
||||
});
|
||||
|
||||
let replacedMatch = null;
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const newContent = handlerContent.replace(
|
||||
pattern,
|
||||
(_, p1, p2, p3) => {
|
||||
return `${p1}${join(locationPrefix, p2)}${p3}`;
|
||||
}
|
||||
);
|
||||
|
||||
if (newContent !== handlerContent) {
|
||||
debug(
|
||||
`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`
|
||||
);
|
||||
|
||||
handlerContent = newContent;
|
||||
replacedMatch = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!replacedMatch) {
|
||||
new Error(
|
||||
`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`
|
||||
);
|
||||
}
|
||||
|
||||
await fs.writeFile(entry, handlerContent, encoding);
|
||||
} else {
|
||||
await fs.copy(handlerFile.fsPath, entry);
|
||||
}
|
||||
|
||||
await Promise.all(toRemove);
|
||||
// Legacy Runtimes based on interpreted languages will create a new launcher file
|
||||
// for every entrypoint, but they will create each one inside `workPath`, which means that
|
||||
// the launcher for one entrypoint will overwrite the launcher provided for the previous
|
||||
// entrypoint. That's why, above, we copy the file contents into the new destination (and
|
||||
// optionally transform them along the way), instead of linking. We then also want to remove
|
||||
// the copy origin right here, so that the `workPath` doesn't contain a useless launcher file
|
||||
// once the build has finished running.
|
||||
await fs.remove(handlerFile.fsPath);
|
||||
debug(`Removed temporary file "${handlerFile.fsPath}"`);
|
||||
|
||||
const tracedFiles: {
|
||||
absolutePath: string;
|
||||
relativePath: string;
|
||||
}[] = [];
|
||||
|
||||
Object.entries(lambdaFiles).forEach(async ([relPath, file]) => {
|
||||
const newPath = join(traceDir, relPath);
|
||||
|
||||
// The handler was already moved into position above.
|
||||
if (relPath === handlerFilePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
||||
|
||||
if (file.fsPath) {
|
||||
await linkOrCopy(file.fsPath, newPath);
|
||||
} else if (file.type === 'FileBlob') {
|
||||
const { data, mode } = file as FileBlob;
|
||||
await fs.writeFile(newPath, data, { mode });
|
||||
} else {
|
||||
throw new Error(`Unknown file type: ${file.type}`);
|
||||
}
|
||||
});
|
||||
|
||||
const nft = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'server',
|
||||
'pages',
|
||||
`${entrypoint}.nft.json`
|
||||
);
|
||||
const nft = `${entry}.nft.json`;
|
||||
|
||||
const json = JSON.stringify({
|
||||
version: 1,
|
||||
files: tracedFiles.map(file => ({
|
||||
input: normalizePath(relative(nft, file.absolutePath)),
|
||||
output: normalizePath(file.relativePath),
|
||||
})),
|
||||
version: 2,
|
||||
files: Object.keys(lambdaFiles)
|
||||
.map(file => {
|
||||
const { fsPath } = lambdaFiles[file];
|
||||
|
||||
if (!fsPath) {
|
||||
throw new Error(
|
||||
`File "${file}" is missing valid \`fsPath\` property`
|
||||
);
|
||||
}
|
||||
|
||||
// The handler was already moved into position above.
|
||||
if (file === handlerFileBase) {
|
||||
return;
|
||||
}
|
||||
|
||||
return normalizePath(relative(dirname(nft), fsPath));
|
||||
})
|
||||
.filter(Boolean),
|
||||
});
|
||||
|
||||
await fs.ensureDir(dirname(nft));
|
||||
await fs.writeFile(nft, json);
|
||||
|
||||
// Add an entry that will later on be added to the `functions-manifest.json`
|
||||
// file that is placed inside of the `.output` directory.
|
||||
pages[normalizePath(entryPath)] = {
|
||||
// Because the underlying file used as a handler was placed
|
||||
// inside `.output/server/pages/api`, it no longer has the name it originally
|
||||
// had and is now named after the API Route that it's responsible for,
|
||||
// so we have to adjust the name of the Lambda handler accordingly.
|
||||
handler: handler.replace(handlerFileName, parse(entry).name),
|
||||
runtime: output.runtime,
|
||||
memory: output.memory,
|
||||
maxDuration: output.maxDuration,
|
||||
environment: output.environment,
|
||||
allowQuery: output.allowQuery,
|
||||
};
|
||||
}
|
||||
|
||||
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
||||
// to the `functions-manifest.json` file provided in `.output`.
|
||||
await updateFunctionsManifest({ workPath, pages });
|
||||
};
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
} catch (err: any) {
|
||||
if (err.code !== 'EEXIST') {
|
||||
await fs.copyFile(existingPath, newPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
|
||||
try {
|
||||
const str = await fs.readFile(filePath, 'utf8');
|
||||
@@ -275,7 +321,7 @@ export async function updateFunctionsManifest({
|
||||
);
|
||||
const functionsManifest = await readJson(functionsManifestPath);
|
||||
|
||||
if (!functionsManifest.version) functionsManifest.version = 1;
|
||||
if (!functionsManifest.version) functionsManifest.version = 2;
|
||||
if (!functionsManifest.pages) functionsManifest.pages = {};
|
||||
|
||||
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
||||
|
||||
@@ -62,7 +62,6 @@ describe('convert-runtime-to-plugin', () => {
|
||||
return { output: lambda };
|
||||
};
|
||||
|
||||
const lambdaFiles = await fsToJson(workPath);
|
||||
const packageName = 'vercel-plugin-python';
|
||||
const build = await convertRuntimeToPlugin(buildRuntime, packageName, ext);
|
||||
|
||||
@@ -70,14 +69,8 @@ describe('convert-runtime-to-plugin', () => {
|
||||
|
||||
const output = await fsToJson(join(workPath, '.output'));
|
||||
|
||||
delete lambdaFiles['vercel.json'];
|
||||
delete lambdaFiles['vc__handler__python.py'];
|
||||
|
||||
expect(output).toMatchObject({
|
||||
'functions-manifest.json': expect.stringContaining('{'),
|
||||
inputs: {
|
||||
'api-routes-python': lambdaFiles,
|
||||
},
|
||||
server: {
|
||||
pages: {
|
||||
api: {
|
||||
@@ -96,50 +89,30 @@ describe('convert-runtime-to-plugin', () => {
|
||||
|
||||
const funcManifest = JSON.parse(output['functions-manifest.json']);
|
||||
expect(funcManifest).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
pages: {
|
||||
'api/index.py': lambdaOptions,
|
||||
'api/users/get.py': lambdaOptions,
|
||||
'api/users/post.py': { ...lambdaOptions, memory: 512 },
|
||||
'api/index.py': { ...lambdaOptions, handler: 'index.vc_handler' },
|
||||
'api/users/get.py': { ...lambdaOptions, handler: 'get.vc_handler' },
|
||||
'api/users/post.py': {
|
||||
...lambdaOptions,
|
||||
handler: 'post.vc_handler',
|
||||
memory: 512,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const indexJson = JSON.parse(output.server.pages.api['index.py.nft.json']);
|
||||
expect(indexJson).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
'../../../../api/db/[id].py',
|
||||
'../../../../api/index.py',
|
||||
'../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../api/users/get.py',
|
||||
'../../../../api/users/post.py',
|
||||
'../../../../file.txt',
|
||||
'../../../../util/date.py',
|
||||
'../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
@@ -147,40 +120,16 @@ describe('convert-runtime-to-plugin', () => {
|
||||
output.server.pages.api.users['get.py.nft.json']
|
||||
);
|
||||
expect(getJson).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
'../../../../../api/db/[id].py',
|
||||
'../../../../../api/index.py',
|
||||
'../../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../../api/users/get.py',
|
||||
'../../../../../api/users/post.py',
|
||||
'../../../../../file.txt',
|
||||
'../../../../../util/date.py',
|
||||
'../../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
@@ -188,40 +137,16 @@ describe('convert-runtime-to-plugin', () => {
|
||||
output.server.pages.api.users['post.py.nft.json']
|
||||
);
|
||||
expect(postJson).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
'../../../../../api/db/[id].py',
|
||||
'../../../../../api/index.py',
|
||||
'../../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../../api/users/get.py',
|
||||
'../../../../../api/users/post.py',
|
||||
'../../../../../file.txt',
|
||||
'../../../../../util/date.py',
|
||||
'../../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ Finally, [connect your Git repository to Vercel](https://vercel.com/docs/git) an
|
||||
|
||||
## Documentation
|
||||
|
||||
For details on how to use Vercel CLI, check out our [documentation](https://vercel.com/docs).
|
||||
For details on how to use Vercel CLI, check out our [documentation](https://vercel.com/docs/cli).
|
||||
|
||||
## Local Development
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "23.1.3-canary.52",
|
||||
"version": "23.1.3-canary.67",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -43,14 +43,14 @@
|
||||
"node": ">= 12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.31",
|
||||
"@vercel/build-utils": "2.12.3-canary.42",
|
||||
"@vercel/go": "1.2.4-canary.4",
|
||||
"@vercel/node": "1.12.2-canary.7",
|
||||
"@vercel/python": "2.1.2-canary.1",
|
||||
"@vercel/ruby": "1.2.8-canary.6",
|
||||
"@vercel/python": "2.1.2-canary.2",
|
||||
"@vercel/ruby": "1.2.10-canary.0",
|
||||
"update-notifier": "4.1.0",
|
||||
"vercel-plugin-middleware": "0.0.0-canary.7",
|
||||
"vercel-plugin-node": "1.12.2-canary.23"
|
||||
"vercel-plugin-middleware": "0.0.0-canary.19",
|
||||
"vercel-plugin-node": "1.12.2-canary.34"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/env": "11.1.2",
|
||||
@@ -90,7 +90,7 @@
|
||||
"@types/update-notifier": "5.1.0",
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.16",
|
||||
"@vercel/frameworks": "0.5.1-canary.17",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.17.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
|
||||
@@ -5,16 +5,16 @@ import {
|
||||
GlobOptions,
|
||||
scanParentDirs,
|
||||
spawnAsync,
|
||||
glob as buildUtilsGlob,
|
||||
} from '@vercel/build-utils';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import Sema from 'async-sema';
|
||||
import chalk from 'chalk';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { assert } from 'console';
|
||||
import { createHash } from 'crypto';
|
||||
import fs from 'fs-extra';
|
||||
import ogGlob from 'glob';
|
||||
import { dirname, isAbsolute, join, parse, relative, resolve } from 'path';
|
||||
import { dirname, isAbsolute, join, parse, relative } from 'path';
|
||||
import pluralize from 'pluralize';
|
||||
import Client from '../util/client';
|
||||
import { VercelConfig } from '../util/dev/types';
|
||||
@@ -353,13 +353,19 @@ export default async function main(client: Client) {
|
||||
}
|
||||
|
||||
// We cannot rely on the `framework` alone, as it might be a static export,
|
||||
// and the current build might use a differnt project that's not in the settings.
|
||||
// and the current build might use a different project that's not in the settings.
|
||||
const isNextOutput = Boolean(dotNextDir);
|
||||
const outputDir = isNextOutput ? OUTPUT_DIR : join(OUTPUT_DIR, 'static');
|
||||
const nextExport = await getNextExportStatus(dotNextDir);
|
||||
const outputDir =
|
||||
isNextOutput && !nextExport ? OUTPUT_DIR : join(OUTPUT_DIR, 'static');
|
||||
const getDistDir = framework.getFsOutputDir || framework.getOutputDirName;
|
||||
const distDir =
|
||||
(nextExport?.exportDetail.outDirectory
|
||||
? relative(cwd, nextExport.exportDetail.outDirectory)
|
||||
: false) ||
|
||||
dotNextDir ||
|
||||
userOutputDirectory ||
|
||||
(await framework.getFsOutputDir(cwd));
|
||||
(await getDistDir(cwd));
|
||||
|
||||
await fs.ensureDir(join(cwd, outputDir));
|
||||
|
||||
@@ -443,7 +449,53 @@ export default async function main(client: Client) {
|
||||
}
|
||||
|
||||
// Special Next.js processing.
|
||||
if (isNextOutput) {
|
||||
if (nextExport) {
|
||||
client.output.debug('Found `next export` output.');
|
||||
|
||||
const htmlFiles = await buildUtilsGlob(
|
||||
'**/*.html',
|
||||
join(cwd, OUTPUT_DIR, 'static')
|
||||
);
|
||||
|
||||
if (nextExport.exportDetail.success !== true) {
|
||||
client.output.error(
|
||||
`Export of Next.js app failed. Please check your build logs.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'server', 'pages'));
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'static'));
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(htmlFiles).map(async fileName => {
|
||||
await sema.acquire();
|
||||
|
||||
const input = join(cwd, OUTPUT_DIR, 'static', fileName);
|
||||
const target = join(cwd, OUTPUT_DIR, 'server', 'pages', fileName);
|
||||
|
||||
await fs.mkdirp(dirname(target));
|
||||
|
||||
await fs.promises.rename(input, target).finally(() => {
|
||||
sema.release();
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
for (const file of [
|
||||
'BUILD_ID',
|
||||
'images-manifest.json',
|
||||
'routes-manifest.json',
|
||||
'build-manifest.json',
|
||||
]) {
|
||||
const input = join(nextExport.dotNextDir, file);
|
||||
|
||||
if (fs.existsSync(input)) {
|
||||
// Do not use `smartCopy`, since we want to overwrite if they already exist.
|
||||
await fs.copyFile(input, join(OUTPUT_DIR, file));
|
||||
}
|
||||
}
|
||||
} else if (isNextOutput) {
|
||||
// The contents of `.output/static` should be placed inside of `.output/static/_next/static`
|
||||
const tempStatic = '___static';
|
||||
await fs.rename(
|
||||
@@ -584,30 +636,15 @@ export default async function main(client: Client) {
|
||||
],
|
||||
});
|
||||
fileList.delete(relative(cwd, f));
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f.replace(ext, '.js.nft.json'),
|
||||
distDir,
|
||||
nft: {
|
||||
version: 1,
|
||||
files: Array.from(fileList).map(fileListEntry =>
|
||||
relative(dir, fileListEntry)
|
||||
),
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
for (let f of nftFiles) {
|
||||
const json = await fs.readJson(f);
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f,
|
||||
nft: json,
|
||||
distDir,
|
||||
|
||||
const nftFileName = f.replace(ext, '.js.nft.json');
|
||||
client.output.debug(`Creating ${nftFileName}`);
|
||||
|
||||
await fs.writeJSON(nftFileName, {
|
||||
version: 2,
|
||||
files: Array.from(fileList).map(fileListEntry =>
|
||||
relative(dir, fileListEntry)
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -631,15 +668,7 @@ export default async function main(client: Client) {
|
||||
const originalPath = join(requiredServerFilesJson.appDir, i);
|
||||
const relPath = join(OUTPUT_DIR, relative(distDir, originalPath));
|
||||
|
||||
const absolutePath = join(cwd, relPath);
|
||||
const output = relative(baseDir, absolutePath);
|
||||
|
||||
return relPath === output
|
||||
? relPath
|
||||
: {
|
||||
input: relPath,
|
||||
output,
|
||||
};
|
||||
return relPath;
|
||||
}),
|
||||
});
|
||||
}
|
||||
@@ -797,83 +826,51 @@ async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a hash for the given buf.
|
||||
*
|
||||
* @param {Buffer} file data
|
||||
* @return {String} hex digest
|
||||
* Files will only exist when `next export` was used.
|
||||
*/
|
||||
function hash(buf: Buffer): string {
|
||||
return createHash('sha1').update(buf).digest('hex');
|
||||
}
|
||||
|
||||
interface NftFile {
|
||||
version: number;
|
||||
files: (string | { input: string; output: string })[];
|
||||
}
|
||||
|
||||
// resolveNftToOutput takes nft file and moves all of its trace files
|
||||
// into the specified directory + `inputs`, (renaming them to their hash + ext) and
|
||||
// subsequently updating the original nft file accordingly. This is done
|
||||
// to make the `.output` directory be self-contained, so that it works
|
||||
// properly with `vc --prebuilt`.
|
||||
async function resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir,
|
||||
nftFileName,
|
||||
distDir,
|
||||
nft,
|
||||
}: {
|
||||
client: Client;
|
||||
baseDir: string;
|
||||
outputDir: string;
|
||||
nftFileName: string;
|
||||
distDir: string;
|
||||
nft: NftFile;
|
||||
}) {
|
||||
client.output.debug(`Processing and resolving ${nftFileName}`);
|
||||
await fs.ensureDir(join(outputDir, 'inputs'));
|
||||
const newFilesList: NftFile['files'] = [];
|
||||
|
||||
// If `distDir` is a subdirectory, then the input has to be resolved to where the `.output` directory will be.
|
||||
const relNftFileName = relative(outputDir, nftFileName);
|
||||
const origNftFilename = join(distDir, relNftFileName);
|
||||
|
||||
if (relNftFileName.startsWith('cache/')) {
|
||||
// No need to process the `cache/` directory.
|
||||
// Paths in it might also not be relative to `cache` itself.
|
||||
return;
|
||||
async function getNextExportStatus(dotNextDir: string | null) {
|
||||
if (!dotNextDir) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (let fileEntity of nft.files) {
|
||||
const relativeInput =
|
||||
typeof fileEntity === 'string' ? fileEntity : fileEntity.input;
|
||||
const fullInput = resolve(join(parse(origNftFilename).dir, relativeInput));
|
||||
const exportDetail: {
|
||||
success: boolean;
|
||||
outDirectory: string;
|
||||
} | null = await fs
|
||||
.readJson(join(dotNextDir, 'export-detail.json'))
|
||||
.catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
// if the resolved path is NOT in the .output directory we move in it there
|
||||
if (!fullInput.includes(distDir)) {
|
||||
const { ext } = parse(fullInput);
|
||||
const raw = await fs.readFile(fullInput);
|
||||
const newFilePath = join(outputDir, 'inputs', hash(raw) + ext);
|
||||
smartCopy(client, fullInput, newFilePath);
|
||||
throw error;
|
||||
});
|
||||
|
||||
// We have to use `baseDir` instead of `cwd`, because we want to
|
||||
// mount everything from there (especially `node_modules`).
|
||||
// This is important for NPM Workspaces where `node_modules` is not
|
||||
// in the directory of the workspace.
|
||||
const output = relative(baseDir, fullInput).replace('.output', '.next');
|
||||
|
||||
newFilesList.push({
|
||||
input: relative(parse(nftFileName).dir, newFilePath),
|
||||
output,
|
||||
});
|
||||
} else {
|
||||
newFilesList.push(relativeInput);
|
||||
}
|
||||
if (!exportDetail) {
|
||||
return null;
|
||||
}
|
||||
// Update the .nft.json with new input and output mapping
|
||||
await fs.writeJSON(nftFileName, {
|
||||
...nft,
|
||||
files: newFilesList,
|
||||
});
|
||||
|
||||
const exportMarker: {
|
||||
version: 1;
|
||||
exportTrailingSlash: boolean;
|
||||
hasExportPathMap: boolean;
|
||||
} | null = await fs
|
||||
.readJSON(join(dotNextDir, 'export-marker.json'))
|
||||
.catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
|
||||
return {
|
||||
dotNextDir,
|
||||
exportDetail,
|
||||
exportMarker: {
|
||||
trailingSlash: exportMarker?.hasExportPathMap
|
||||
? exportMarker.exportTrailingSlash
|
||||
: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -447,6 +447,7 @@ export default async (client: Client) => {
|
||||
forceNew: argv['--force'],
|
||||
withCache: argv['--with-cache'],
|
||||
prebuilt: argv['--prebuilt'],
|
||||
rootDirectory,
|
||||
quiet,
|
||||
wantsPublic: argv['--public'] || localConfig.public,
|
||||
isFile,
|
||||
|
||||
@@ -52,6 +52,7 @@ export default async function processDeployment({
|
||||
isSettingUpProject: boolean;
|
||||
skipAutoDetectionConfirmation?: boolean;
|
||||
cwd?: string;
|
||||
rootDirectory?: string;
|
||||
}) {
|
||||
let {
|
||||
now,
|
||||
@@ -64,6 +65,7 @@ export default async function processDeployment({
|
||||
nowConfig,
|
||||
quiet,
|
||||
prebuilt,
|
||||
rootDirectory,
|
||||
} = args;
|
||||
|
||||
const { debug } = output;
|
||||
@@ -86,6 +88,7 @@ export default async function processDeployment({
|
||||
force,
|
||||
withCache,
|
||||
prebuilt,
|
||||
rootDirectory,
|
||||
skipAutoDetectionConfirmation,
|
||||
};
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ export interface CreateOptions {
|
||||
project?: string;
|
||||
wantsPublic: boolean;
|
||||
prebuilt?: boolean;
|
||||
rootDirectory?: string;
|
||||
meta: Dictionary<string>;
|
||||
regions?: string[];
|
||||
quiet?: boolean;
|
||||
@@ -113,6 +114,7 @@ export default class Now extends EventEmitter {
|
||||
name,
|
||||
project,
|
||||
prebuilt = false,
|
||||
rootDirectory,
|
||||
wantsPublic,
|
||||
meta,
|
||||
regions,
|
||||
@@ -168,6 +170,7 @@ export default class Now extends EventEmitter {
|
||||
skipAutoDetectionConfirmation,
|
||||
cwd,
|
||||
prebuilt,
|
||||
rootDirectory,
|
||||
});
|
||||
|
||||
if (deployment && deployment.warnings) {
|
||||
|
||||
6
packages/cli/test/fixtures/unit/edge-middleware-strict/_middleware.ts
vendored
Normal file
6
packages/cli/test/fixtures/unit/edge-middleware-strict/_middleware.ts
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export default function (req) {
|
||||
const isStrict = (function () {
|
||||
return !this;
|
||||
})();
|
||||
return new Response('is strict mode? ' + (isStrict ? 'yes' : 'no'));
|
||||
}
|
||||
@@ -385,4 +385,13 @@ describe('DevServer', () => {
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should run middleware in strict mode',
|
||||
testFixture('edge-middleware-strict', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.text();
|
||||
expect(body).toStrictEqual('is strict mode? yes');
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
2
packages/client/.gitignore
vendored
2
packages/client/.gitignore
vendored
@@ -6,3 +6,5 @@ node_modules
|
||||
!tests/fixtures/nowignore/node_modules
|
||||
!tests/fixtures/vercelignore-allow-nodemodules/node_modules
|
||||
!tests/fixtures/vercelignore-allow-nodemodules/sub/node_modules
|
||||
!tests/fixtures/file-system-api/.output
|
||||
!tests/fixtures/file-system-api-root-directory/**/.output
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "10.2.3-canary.32",
|
||||
"version": "10.2.3-canary.45",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -40,7 +40,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.31",
|
||||
"@vercel/build-utils": "2.12.3-canary.42",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import { lstatSync } from 'fs-extra';
|
||||
|
||||
import { relative, isAbsolute } from 'path';
|
||||
import hashes, { mapToObject } from './utils/hashes';
|
||||
import { hashes, mapToObject, resolveNftJsonFiles } from './utils/hashes';
|
||||
import { upload } from './upload';
|
||||
import { buildFileTree, createDebug, parseVercelConfig } from './utils';
|
||||
import { DeploymentError } from './errors';
|
||||
import {
|
||||
NowConfig,
|
||||
VercelConfig,
|
||||
VercelClientOptions,
|
||||
DeploymentOptions,
|
||||
DeploymentEventType,
|
||||
@@ -16,7 +16,7 @@ export default function buildCreateDeployment() {
|
||||
return async function* createDeployment(
|
||||
clientOptions: VercelClientOptions,
|
||||
deploymentOptions: DeploymentOptions = {},
|
||||
nowConfig: NowConfig = {}
|
||||
nowConfig: VercelConfig = {}
|
||||
): AsyncIterableIterator<{ type: DeploymentEventType; payload: any }> {
|
||||
const { path } = clientOptions;
|
||||
|
||||
@@ -74,12 +74,7 @@ export default function buildCreateDeployment() {
|
||||
debug(`Provided 'path' is a single file`);
|
||||
}
|
||||
|
||||
let { fileList } = await buildFileTree(
|
||||
path,
|
||||
clientOptions.isDirectory,
|
||||
debug,
|
||||
clientOptions.prebuilt
|
||||
);
|
||||
let { fileList } = await buildFileTree(path, clientOptions, debug);
|
||||
|
||||
let configPath: string | undefined;
|
||||
if (!nowConfig) {
|
||||
@@ -114,7 +109,11 @@ export default function buildCreateDeployment() {
|
||||
};
|
||||
}
|
||||
|
||||
const files = await hashes(fileList);
|
||||
const hashedFileMap = await hashes(fileList);
|
||||
const nftFileList = clientOptions.prebuilt
|
||||
? await resolveNftJsonFiles(hashedFileMap)
|
||||
: [];
|
||||
const files = await hashes(nftFileList, hashedFileMap);
|
||||
|
||||
debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`);
|
||||
yield { type: 'hashes-calculated', payload: mapToObject(files) };
|
||||
|
||||
@@ -15,6 +15,7 @@ export interface VercelClientOptions {
|
||||
apiUrl?: string;
|
||||
force?: boolean;
|
||||
prebuilt?: boolean;
|
||||
rootDirectory?: string;
|
||||
withCache?: boolean;
|
||||
userAgent?: string;
|
||||
defaultName?: string;
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { createHash } from 'crypto';
|
||||
import fs from 'fs-extra';
|
||||
import { Sema } from 'async-sema';
|
||||
import { join, dirname } from 'path';
|
||||
|
||||
export interface DeploymentFile {
|
||||
names: string[];
|
||||
@@ -15,9 +16,7 @@ export interface DeploymentFile {
|
||||
* @return {String} hex digest
|
||||
*/
|
||||
function hash(buf: Buffer): string {
|
||||
return createHash('sha1')
|
||||
.update(buf)
|
||||
.digest('hex');
|
||||
return createHash('sha1').update(buf).digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -39,34 +38,68 @@ export const mapToObject = (
|
||||
/**
|
||||
* Computes hashes for the contents of each file given.
|
||||
*
|
||||
* @param {Array} of {String} full paths
|
||||
* @return {Map}
|
||||
* @param files - absolute file paths
|
||||
* @param map - optional map of files to append
|
||||
* @return Map of hash digest to file object
|
||||
*/
|
||||
async function hashes(files: string[]): Promise<Map<string, DeploymentFile>> {
|
||||
const map = new Map<string, DeploymentFile>();
|
||||
export async function hashes(
|
||||
files: string[],
|
||||
map = new Map<string, DeploymentFile>()
|
||||
): Promise<Map<string, DeploymentFile>> {
|
||||
const semaphore = new Sema(100);
|
||||
|
||||
await Promise.all(
|
||||
files.map(
|
||||
async (name: string): Promise<void> => {
|
||||
await semaphore.acquire();
|
||||
const data = await fs.readFile(name);
|
||||
const { mode } = await fs.stat(name);
|
||||
files.map(async (name: string): Promise<void> => {
|
||||
await semaphore.acquire();
|
||||
const data = await fs.readFile(name);
|
||||
const { mode } = await fs.stat(name);
|
||||
|
||||
const h = hash(data);
|
||||
const entry = map.get(h);
|
||||
const h = hash(data);
|
||||
const entry = map.get(h);
|
||||
|
||||
if (entry) {
|
||||
if (entry) {
|
||||
if (entry.names[0] !== name) {
|
||||
entry.names.push(name);
|
||||
} else {
|
||||
map.set(h, { names: [name], data, mode });
|
||||
}
|
||||
|
||||
semaphore.release();
|
||||
} else {
|
||||
map.set(h, { names: [name], data, mode });
|
||||
}
|
||||
)
|
||||
|
||||
semaphore.release();
|
||||
})
|
||||
);
|
||||
return map;
|
||||
}
|
||||
|
||||
export default hashes;
|
||||
export async function resolveNftJsonFiles(
|
||||
hashedFiles: Map<string, DeploymentFile>
|
||||
): Promise<string[]> {
|
||||
const semaphore = new Sema(100);
|
||||
const existingFiles = Array.from(hashedFiles.values());
|
||||
const resolvedFiles = new Set<string>();
|
||||
|
||||
await Promise.all(
|
||||
existingFiles.map(async file => {
|
||||
await semaphore.acquire();
|
||||
const fsPath = file.names[0];
|
||||
if (fsPath.endsWith('.nft.json')) {
|
||||
const json = file.data.toString('utf8');
|
||||
const { version, files } = JSON.parse(json) as {
|
||||
version: number;
|
||||
files: string[] | { input: string; output: string }[];
|
||||
};
|
||||
if (version === 1 || version === 2) {
|
||||
for (let f of files) {
|
||||
const relPath = typeof f === 'string' ? f : f.input;
|
||||
resolvedFiles.add(join(dirname(fsPath), relPath));
|
||||
}
|
||||
} else {
|
||||
console.error(`Invalid nft.json version: ${version}`);
|
||||
}
|
||||
}
|
||||
semaphore.release();
|
||||
})
|
||||
);
|
||||
|
||||
return Array.from(resolvedFiles);
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { DeploymentFile } from './hashes';
|
||||
import { FetchOptions } from '@zeit/fetch';
|
||||
import { nodeFetch, zeitFetch } from './fetch';
|
||||
import { join, sep, relative } from 'path';
|
||||
import { join, sep, relative, posix } from 'path';
|
||||
import { URL } from 'url';
|
||||
import ignore from 'ignore';
|
||||
type Ignore = ReturnType<typeof ignore>;
|
||||
@@ -81,13 +81,16 @@ const maybeRead = async function <T>(path: string, default_: T) {
|
||||
|
||||
export async function buildFileTree(
|
||||
path: string | string[],
|
||||
isDirectory: boolean,
|
||||
debug: Debug,
|
||||
prebuilt?: boolean
|
||||
{
|
||||
isDirectory,
|
||||
prebuilt,
|
||||
rootDirectory,
|
||||
}: Pick<VercelClientOptions, 'isDirectory' | 'prebuilt' | 'rootDirectory'>,
|
||||
debug: Debug
|
||||
): Promise<{ fileList: string[]; ignoreList: string[] }> {
|
||||
const ignoreList: string[] = [];
|
||||
let fileList: string[];
|
||||
let { ig, ignores } = await getVercelIgnore(path, prebuilt);
|
||||
let { ig, ignores } = await getVercelIgnore(path, prebuilt, rootDirectory);
|
||||
|
||||
debug(`Found ${ignores.length} rules in .vercelignore`);
|
||||
debug('Building file tree...');
|
||||
@@ -119,37 +122,50 @@ export async function buildFileTree(
|
||||
|
||||
export async function getVercelIgnore(
|
||||
cwd: string | string[],
|
||||
prebuilt?: boolean
|
||||
prebuilt?: boolean,
|
||||
rootDirectory?: string
|
||||
): Promise<{ ig: Ignore; ignores: string[] }> {
|
||||
const ignores: string[] = prebuilt
|
||||
? ['*', '!.output', '!.output/**']
|
||||
: [
|
||||
'.hg',
|
||||
'.git',
|
||||
'.gitmodules',
|
||||
'.svn',
|
||||
'.cache',
|
||||
'.next',
|
||||
'.now',
|
||||
'.vercel',
|
||||
'.npmignore',
|
||||
'.dockerignore',
|
||||
'.gitignore',
|
||||
'.*.swp',
|
||||
'.DS_Store',
|
||||
'.wafpicke-*',
|
||||
'.lock-wscript',
|
||||
'.env.local',
|
||||
'.env.*.local',
|
||||
'.venv',
|
||||
'npm-debug.log',
|
||||
'config.gypi',
|
||||
'node_modules',
|
||||
'__pycache__',
|
||||
'venv',
|
||||
'CVS',
|
||||
'.output',
|
||||
];
|
||||
let ignores: string[] = [];
|
||||
|
||||
const outputDir = posix.join(rootDirectory || '', '.output');
|
||||
|
||||
if (prebuilt) {
|
||||
ignores.push('*');
|
||||
const parts = outputDir.split('/');
|
||||
parts.forEach((_, i) => {
|
||||
const level = parts.slice(0, i + 1).join('/');
|
||||
ignores.push(`!${level}`);
|
||||
});
|
||||
ignores.push(`!${outputDir}/**`);
|
||||
} else {
|
||||
ignores = [
|
||||
'.hg',
|
||||
'.git',
|
||||
'.gitmodules',
|
||||
'.svn',
|
||||
'.cache',
|
||||
'.next',
|
||||
'.now',
|
||||
'.vercel',
|
||||
'.npmignore',
|
||||
'.dockerignore',
|
||||
'.gitignore',
|
||||
'.*.swp',
|
||||
'.DS_Store',
|
||||
'.wafpicke-*',
|
||||
'.lock-wscript',
|
||||
'.env.local',
|
||||
'.env.*.local',
|
||||
'.venv',
|
||||
'npm-debug.log',
|
||||
'config.gypi',
|
||||
'node_modules',
|
||||
'__pycache__',
|
||||
'venv',
|
||||
'CVS',
|
||||
`.output`,
|
||||
];
|
||||
}
|
||||
const cwds = Array.isArray(cwd) ? cwd : [cwd];
|
||||
|
||||
const files = await Promise.all(
|
||||
@@ -250,39 +266,31 @@ export const prepareFiles = (
|
||||
files: Map<string, DeploymentFile>,
|
||||
clientOptions: VercelClientOptions
|
||||
): PreparedFile[] => {
|
||||
const preparedFiles = [...files.keys()].reduce(
|
||||
(acc: PreparedFile[], sha: string): PreparedFile[] => {
|
||||
const next = [...acc];
|
||||
const preparedFiles: PreparedFile[] = [];
|
||||
for (const [sha, file] of files) {
|
||||
for (const name of file.names) {
|
||||
let fileName: string;
|
||||
|
||||
const file = files.get(sha) as DeploymentFile;
|
||||
|
||||
for (const name of file.names) {
|
||||
let fileName: string;
|
||||
|
||||
if (clientOptions.isDirectory) {
|
||||
// Directory
|
||||
fileName =
|
||||
typeof clientOptions.path === 'string'
|
||||
? relative(clientOptions.path, name)
|
||||
: name;
|
||||
} else {
|
||||
// Array of files or single file
|
||||
const segments = name.split(sep);
|
||||
fileName = segments[segments.length - 1];
|
||||
}
|
||||
|
||||
next.push({
|
||||
file: isWin ? fileName.replace(/\\/g, '/') : fileName,
|
||||
size: file.data.byteLength || file.data.length,
|
||||
mode: file.mode,
|
||||
sha,
|
||||
});
|
||||
if (clientOptions.isDirectory) {
|
||||
// Directory
|
||||
fileName =
|
||||
typeof clientOptions.path === 'string'
|
||||
? relative(clientOptions.path, name)
|
||||
: name;
|
||||
} else {
|
||||
// Array of files or single file
|
||||
const segments = name.split(sep);
|
||||
fileName = segments[segments.length - 1];
|
||||
}
|
||||
|
||||
return next;
|
||||
},
|
||||
[]
|
||||
);
|
||||
preparedFiles.push({
|
||||
file: isWin ? fileName.replace(/\\/g, '/') : fileName,
|
||||
size: file.data.byteLength || file.data.length,
|
||||
mode: file.mode,
|
||||
sha,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return preparedFiles;
|
||||
};
|
||||
|
||||
1
packages/client/tests/fixtures/file-system-api-root-directory/foo.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api-root-directory/foo.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
foo
|
||||
1
packages/client/tests/fixtures/file-system-api-root-directory/root/.output/baz.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api-root-directory/root/.output/baz.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
baz
|
||||
1
packages/client/tests/fixtures/file-system-api-root-directory/root/.output/sub/qux.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api-root-directory/root/.output/sub/qux.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
qux
|
||||
1
packages/client/tests/fixtures/file-system-api-root-directory/root/bar.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api-root-directory/root/bar.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
bar
|
||||
1
packages/client/tests/fixtures/file-system-api-root-directory/someother/.output/baz.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api-root-directory/someother/.output/baz.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
baz
|
||||
1
packages/client/tests/fixtures/file-system-api-root-directory/someother/.output/sub/qux.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api-root-directory/someother/.output/sub/qux.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
qux
|
||||
1
packages/client/tests/fixtures/file-system-api-root-directory/someother/bar.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api-root-directory/someother/bar.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
bar
|
||||
1
packages/client/tests/fixtures/file-system-api/.output/baz.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api/.output/baz.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
baz
|
||||
1
packages/client/tests/fixtures/file-system-api/.output/sub/qux.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api/.output/sub/qux.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
qux
|
||||
1
packages/client/tests/fixtures/file-system-api/foo.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api/foo.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
foo
|
||||
1
packages/client/tests/fixtures/file-system-api/sub/bar.txt
vendored
Normal file
1
packages/client/tests/fixtures/file-system-api/sub/bar.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
bar
|
||||
4
packages/client/tests/tsconfig.json
Normal file
4
packages/client/tests/tsconfig.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["*.test.ts"]
|
||||
}
|
||||
@@ -17,7 +17,11 @@ const toAbsolutePaths = (cwd: string, files: string[]) =>
|
||||
describe('buildFileTree()', () => {
|
||||
it('should exclude files using `.nowignore` blocklist', async () => {
|
||||
const cwd = fixture('nowignore');
|
||||
const { fileList, ignoreList } = await buildFileTree(cwd, true, noop);
|
||||
const { fileList, ignoreList } = await buildFileTree(
|
||||
cwd,
|
||||
{ isDirectory: true },
|
||||
noop
|
||||
);
|
||||
|
||||
const expectedFileList = toAbsolutePaths(cwd, ['.nowignore', 'index.txt']);
|
||||
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
|
||||
@@ -36,7 +40,11 @@ describe('buildFileTree()', () => {
|
||||
|
||||
it('should include the node_modules using `.vercelignore` allowlist', async () => {
|
||||
const cwd = fixture('vercelignore-allow-nodemodules');
|
||||
const { fileList, ignoreList } = await buildFileTree(cwd, true, noop);
|
||||
const { fileList, ignoreList } = await buildFileTree(
|
||||
cwd,
|
||||
{ isDirectory: true },
|
||||
noop
|
||||
);
|
||||
|
||||
const expected = toAbsolutePaths(cwd, [
|
||||
'node_modules/one.txt',
|
||||
@@ -54,4 +62,90 @@ describe('buildFileTree()', () => {
|
||||
normalizeWindowsPaths(ignoreList).sort()
|
||||
);
|
||||
});
|
||||
|
||||
it('should find root files but ignore .output files when prebuilt=false', async () => {
|
||||
const cwd = fixture('file-system-api');
|
||||
const { fileList, ignoreList } = await buildFileTree(
|
||||
cwd,
|
||||
{ isDirectory: true, prebuilt: false },
|
||||
noop
|
||||
);
|
||||
|
||||
const expectedFileList = toAbsolutePaths(cwd, ['foo.txt', 'sub/bar.txt']);
|
||||
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
|
||||
normalizeWindowsPaths(fileList).sort()
|
||||
);
|
||||
|
||||
const expectedIgnoreList = ['.output'];
|
||||
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
|
||||
normalizeWindowsPaths(ignoreList).sort()
|
||||
);
|
||||
});
|
||||
|
||||
it('should find .output files but ignore other files when prebuilt=true', async () => {
|
||||
const cwd = fixture('file-system-api');
|
||||
const { fileList, ignoreList } = await buildFileTree(
|
||||
cwd,
|
||||
{ isDirectory: true, prebuilt: true },
|
||||
noop
|
||||
);
|
||||
|
||||
const expectedFileList = toAbsolutePaths(cwd, [
|
||||
'.output/baz.txt',
|
||||
'.output/sub/qux.txt',
|
||||
]);
|
||||
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
|
||||
normalizeWindowsPaths(fileList).sort()
|
||||
);
|
||||
|
||||
const expectedIgnoreList = ['foo.txt', 'sub'];
|
||||
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
|
||||
normalizeWindowsPaths(ignoreList).sort()
|
||||
);
|
||||
});
|
||||
|
||||
it('should find root files but ignore all .output files when prebuilt=false and rootDirectory=root', async () => {
|
||||
const cwd = fixture('file-system-api-root-directory');
|
||||
const { fileList, ignoreList } = await buildFileTree(
|
||||
cwd,
|
||||
{ isDirectory: true, prebuilt: false, rootDirectory: 'root' },
|
||||
noop
|
||||
);
|
||||
|
||||
const expectedFileList = toAbsolutePaths(cwd, [
|
||||
'foo.txt',
|
||||
'root/bar.txt',
|
||||
'someother/bar.txt',
|
||||
]);
|
||||
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
|
||||
normalizeWindowsPaths(fileList).sort()
|
||||
);
|
||||
|
||||
const expectedIgnoreList = ['root/.output', 'someother/.output'];
|
||||
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
|
||||
normalizeWindowsPaths(ignoreList).sort()
|
||||
);
|
||||
});
|
||||
|
||||
it('should find root/.output files but ignore other files when prebuilt=true and rootDirectory=root', async () => {
|
||||
const cwd = fixture('file-system-api-root-directory');
|
||||
const { fileList, ignoreList } = await buildFileTree(
|
||||
cwd,
|
||||
{ isDirectory: true, prebuilt: true, rootDirectory: 'root' },
|
||||
noop
|
||||
);
|
||||
|
||||
const expectedFileList = toAbsolutePaths(cwd, [
|
||||
'root/.output/baz.txt',
|
||||
'root/.output/sub/qux.txt',
|
||||
]);
|
||||
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
|
||||
normalizeWindowsPaths(fileList).sort()
|
||||
);
|
||||
|
||||
const expectedIgnoreList = ['foo.txt', 'root/bar.txt', 'someother'];
|
||||
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
|
||||
normalizeWindowsPaths(ignoreList).sort()
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "0.5.1-canary.16",
|
||||
"version": "0.5.1-canary.17",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
|
||||
@@ -141,7 +141,6 @@ export const frameworks = [
|
||||
},
|
||||
dependency: 'gatsby',
|
||||
getOutputDirName: async () => 'public',
|
||||
getFsOutputDir: async () => 'public',
|
||||
defaultRoutes: async (dirPrefix: string) => {
|
||||
// This file could be generated by gatsby-plugin-now or gatsby-plugin-zeit-now
|
||||
try {
|
||||
@@ -226,7 +225,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'remix',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -254,10 +252,13 @@ export const frameworks = [
|
||||
source: '/build/(.*)',
|
||||
regex: '/build/(.*)',
|
||||
headers: [
|
||||
{ key: 'cache-control', value: 'public, max-age=31536000, immutable' },
|
||||
{
|
||||
key: 'cache-control',
|
||||
value: 'public, max-age=31536000, immutable',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Hexo',
|
||||
@@ -294,7 +295,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'hexo',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -332,7 +332,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@11ty/eleventy',
|
||||
getFsOutputDir: async () => '_site',
|
||||
getOutputDirName: async () => '_site',
|
||||
cachePattern: '.cache/**',
|
||||
},
|
||||
@@ -372,22 +371,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@docusaurus/core',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
const location = join(dirPrefix, base);
|
||||
const content = await readdir(location, { withFileTypes: true });
|
||||
|
||||
// If there is only one file in it that is a dir we'll use it as dist dir
|
||||
if (content.length === 1 && content[0].isDirectory()) {
|
||||
return join(base, content[0].name);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error detecting output directory: `, error);
|
||||
}
|
||||
|
||||
return base;
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -527,21 +510,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'docusaurus',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
const location = join(dirPrefix, base);
|
||||
const content = await readdir(location, { withFileTypes: true });
|
||||
|
||||
// If there is only one file in it that is a dir we'll use it as dist dir
|
||||
if (content.length === 1 && content[0].isDirectory()) {
|
||||
return join(base, content[0].name);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error detecting output directory: `, error);
|
||||
}
|
||||
return base;
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -593,7 +561,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'preact-cli',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -650,7 +617,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@dojo/cli',
|
||||
getFsOutputDir: async () => 'output/dist',
|
||||
getOutputDirName: async () => join('output', 'dist'),
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -717,7 +683,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'ember-cli',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -772,7 +737,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@vue/cli-service',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -849,7 +813,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@scullyio/init',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist/static',
|
||||
},
|
||||
{
|
||||
@@ -886,7 +849,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/angular',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -940,7 +902,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@angular/cli',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'dist';
|
||||
try {
|
||||
@@ -1008,7 +969,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'polymer-cli',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -1078,7 +1038,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sirv-cli',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1128,10 +1087,9 @@ export const frameworks = [
|
||||
placeholder: 'svelte-kit dev',
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: 'public',
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -1168,7 +1126,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/react',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1276,7 +1233,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'react-scripts',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1378,7 +1334,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'gridsome',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
{
|
||||
@@ -1416,7 +1371,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'umi',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1470,7 +1424,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sapper',
|
||||
getFsOutputDir: async () => '__sapper__/export',
|
||||
getOutputDirName: async () => '__sapper__/export',
|
||||
},
|
||||
{
|
||||
@@ -1508,7 +1461,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'saber',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1577,7 +1529,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@stencil/core',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1666,7 +1617,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'nuxt',
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'dist',
|
||||
cachePattern: '.nuxt/**',
|
||||
defaultRoutes: [
|
||||
@@ -1724,7 +1674,6 @@ export const frameworks = [
|
||||
placeholder: 'RedwoodJS default',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -1768,16 +1717,6 @@ export const frameworks = [
|
||||
placeholder: '`public` or `publishDir` from the `config` file',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type HugoConfig = { publishDir?: string };
|
||||
const config = await readConfigFile<HugoConfig>(
|
||||
['config.json', 'config.yaml', 'config.toml'].map(fileName => {
|
||||
return join(dirPrefix, fileName);
|
||||
})
|
||||
);
|
||||
|
||||
return (config && config.publishDir) || 'public';
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string): Promise<string> => {
|
||||
type HugoConfig = { publishDir?: string };
|
||||
const config = await readConfigFile<HugoConfig>(
|
||||
@@ -1822,13 +1761,6 @@ export const frameworks = [
|
||||
placeholder: '`_site` or `destination` from `_config.yml`',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type JekyllConfig = { destination?: string };
|
||||
const config = await readConfigFile<JekyllConfig>(
|
||||
join(dirPrefix, '_config.yml')
|
||||
);
|
||||
return (config && config.destination) || '_site';
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string): Promise<string> => {
|
||||
type JekyllConfig = { destination?: string };
|
||||
const config = await readConfigFile<JekyllConfig>(
|
||||
@@ -1870,7 +1802,6 @@ export const frameworks = [
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -1905,7 +1836,6 @@ export const frameworks = [
|
||||
value: 'build',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
cachePattern: '{vendor/bin,vendor/cache,vendor/bundle}/**',
|
||||
},
|
||||
@@ -1940,7 +1870,6 @@ export const frameworks = [
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultVersion: '0.13.0',
|
||||
},
|
||||
@@ -1980,7 +1909,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'vite',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
{
|
||||
@@ -2018,7 +1946,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'parcel',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
|
||||
@@ -162,9 +162,9 @@ export interface Framework {
|
||||
dependency?: string;
|
||||
/**
|
||||
* Function that returns the name of the directory that the framework outputs
|
||||
* its build results to. In some cases this is read from a configuration file.
|
||||
* its File System API build results to, usually called `.output`.
|
||||
*/
|
||||
getFsOutputDir: (dirPrefix: string) => Promise<string>;
|
||||
getFsOutputDir?: (dirPrefix: string) => Promise<string>;
|
||||
/**
|
||||
* Function that returns the name of the directory that the framework outputs
|
||||
* its STATIC build results to. In some cases this is read from a configuration file.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel-plugin-middleware",
|
||||
"version": "0.0.0-canary.7",
|
||||
"version": "0.0.0-canary.19",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "",
|
||||
@@ -30,6 +30,7 @@
|
||||
"@types/node-fetch": "^2",
|
||||
"@types/ua-parser-js": "0.7.36",
|
||||
"@types/uuid": "8.3.1",
|
||||
"@vercel/build-utils": "2.12.3-canary.42",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"cookie": "0.4.1",
|
||||
"formdata-node": "4.3.1",
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import * as middleware from './_middleware';
|
||||
import * as middleware from './_temp_middleware';
|
||||
_ENTRIES = typeof _ENTRIES === 'undefined' ? {} : _ENTRIES;
|
||||
_ENTRIES['middleware_pages/_middleware'] = {
|
||||
default: async function (ev) {
|
||||
|
||||
52
packages/middleware/src/esbuild-plugins.ts
Normal file
52
packages/middleware/src/esbuild-plugins.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import path from 'path';
|
||||
import * as esbuild from 'esbuild';
|
||||
|
||||
const processInjectFile = `
|
||||
// envOverride is passed by esbuild plugin
|
||||
const env = envOverride
|
||||
function cwd() {
|
||||
return '/'
|
||||
}
|
||||
function chdir(dir) {
|
||||
throw new Error('process.chdir is not supported')
|
||||
}
|
||||
export const process = {
|
||||
argv: [],
|
||||
env,
|
||||
chdir,
|
||||
cwd,
|
||||
};
|
||||
`;
|
||||
|
||||
export function nodeProcessPolyfillPlugin({ env = {} } = {}): esbuild.Plugin {
|
||||
return {
|
||||
name: 'node-process-polyfill',
|
||||
setup({ initialOptions, onResolve, onLoad }) {
|
||||
onResolve({ filter: /_virtual-process-polyfill_\.js/ }, ({ path }) => {
|
||||
return {
|
||||
path,
|
||||
sideEffects: false,
|
||||
};
|
||||
});
|
||||
|
||||
onLoad({ filter: /_virtual-process-polyfill_\.js/ }, () => {
|
||||
const contents = `const envOverride = ${JSON.stringify(
|
||||
env
|
||||
)};\n${processInjectFile}`;
|
||||
return {
|
||||
loader: 'js',
|
||||
contents,
|
||||
};
|
||||
});
|
||||
|
||||
const polyfills = [
|
||||
path.resolve(__dirname, '_virtual-process-polyfill_.js'),
|
||||
];
|
||||
if (initialOptions.inject) {
|
||||
initialOptions.inject.push(...polyfills);
|
||||
} else {
|
||||
initialOptions.inject = [...polyfills];
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import { promises as fsp } from 'fs';
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
import libGlob from 'glob';
|
||||
import Proxy from 'http-proxy';
|
||||
import { updateFunctionsManifest } from '@vercel/build-utils';
|
||||
|
||||
import { run } from './websandbox';
|
||||
import type { FetchEventResult } from './websandbox/types';
|
||||
@@ -16,13 +17,15 @@ import {
|
||||
UrlWithParsedQuery,
|
||||
} from 'url';
|
||||
import { toNodeHeaders } from './websandbox/utils';
|
||||
import { nodeProcessPolyfillPlugin } from './esbuild-plugins';
|
||||
|
||||
const glob = util.promisify(libGlob);
|
||||
const SUPPORTED_EXTENSIONS = ['.js', '.ts'];
|
||||
|
||||
// File name of the `entries.js` file that gets copied into the
|
||||
// project directory. Use a name that is unlikely to conflict.
|
||||
const ENTRIES_NAME = '___vc_entries.js';
|
||||
const TMP_ENTRIES_NAME = '.output/inputs/middleware/___vc_entries.js';
|
||||
const TMP_MIDDLEWARE_BUNDLE = '.output/inputs/middleware/_temp_middleware.js';
|
||||
|
||||
async function getMiddlewareFile(workingDirectory: string) {
|
||||
// Only the root-level `_middleware.*` files are considered.
|
||||
@@ -52,17 +55,37 @@ async function getMiddlewareFile(workingDirectory: string) {
|
||||
}
|
||||
|
||||
export async function build({ workPath }: { workPath: string }) {
|
||||
const entriesPath = join(workPath, ENTRIES_NAME);
|
||||
const entriesPath = join(workPath, TMP_ENTRIES_NAME);
|
||||
const transientFilePath = join(workPath, TMP_MIDDLEWARE_BUNDLE);
|
||||
const middlewareFile = await getMiddlewareFile(workPath);
|
||||
if (!middlewareFile) return;
|
||||
|
||||
console.log('Compiling middleware file: %j', middlewareFile);
|
||||
|
||||
// Create `_ENTRIES` wrapper
|
||||
await fsp.copyFile(join(__dirname, 'entries.js'), entriesPath);
|
||||
|
||||
// Build
|
||||
/**
|
||||
* Two builds happen here, because esbuild doesn't offer a way to add a banner
|
||||
* to individual input files, and the entries wrapper relies on running in
|
||||
* non-strict mode to access the ENTRIES global.
|
||||
*
|
||||
* To work around this, we bundle the middleware directly and add
|
||||
* 'use strict'; to make the entire bundle run in strict mode. We then bundle
|
||||
* a second time, adding the global ENTRIES wrapper and preserving the
|
||||
* 'use strict' for the entire scope of the original bundle.
|
||||
*/
|
||||
try {
|
||||
await esbuild.build({
|
||||
entryPoints: [middlewareFile],
|
||||
bundle: true,
|
||||
absWorkingDir: workPath,
|
||||
outfile: transientFilePath,
|
||||
banner: {
|
||||
js: '"use strict";',
|
||||
},
|
||||
plugins: [nodeProcessPolyfillPlugin({ env: process.env })],
|
||||
format: 'cjs',
|
||||
});
|
||||
// Create `_ENTRIES` wrapper
|
||||
await fsp.copyFile(join(__dirname, 'entries.js'), entriesPath);
|
||||
await esbuild.build({
|
||||
entryPoints: [entriesPath],
|
||||
bundle: true,
|
||||
@@ -70,29 +93,24 @@ export async function build({ workPath }: { workPath: string }) {
|
||||
outfile: join(workPath, '.output/server/pages/_middleware.js'),
|
||||
});
|
||||
} finally {
|
||||
await fsp.unlink(transientFilePath);
|
||||
await fsp.unlink(entriesPath);
|
||||
}
|
||||
|
||||
// Write middleware manifest
|
||||
const middlewareManifest = {
|
||||
version: 1,
|
||||
sortedMiddleware: ['/'],
|
||||
middleware: {
|
||||
'/': {
|
||||
env: [],
|
||||
files: ['server/pages/_middleware.js'],
|
||||
name: 'pages/_middleware',
|
||||
page: '/',
|
||||
regexp: '^/.*$',
|
||||
},
|
||||
},
|
||||
const fileName = basename(middlewareFile);
|
||||
const pages: { [key: string]: any } = {};
|
||||
|
||||
pages[fileName] = {
|
||||
runtime: 'web',
|
||||
env: [],
|
||||
files: ['server/pages/_middleware.js'],
|
||||
name: 'pages/_middleware',
|
||||
page: '/',
|
||||
regexp: '^/.*$',
|
||||
sortingIndex: 1,
|
||||
};
|
||||
const middlewareManifestData = JSON.stringify(middlewareManifest, null, 2);
|
||||
const middlewareManifestPath = join(
|
||||
workPath,
|
||||
'.output/server/middleware-manifest.json'
|
||||
);
|
||||
await fsp.writeFile(middlewareManifestPath, middlewareManifestData);
|
||||
|
||||
await updateFunctionsManifest({ workPath, pages });
|
||||
}
|
||||
|
||||
const stringifyQuery = (req: IncomingMessage, query: ParsedUrlQuery) => {
|
||||
|
||||
@@ -114,6 +114,7 @@ export async function run(params: {
|
||||
const content = readFileSync(params.path, 'utf-8');
|
||||
const esBuildResult = esbuild.transformSync(content, {
|
||||
format: 'cjs',
|
||||
banner: '"use strict";',
|
||||
});
|
||||
const x = vm.runInNewContext(m.wrap(esBuildResult.code), cache.sandbox, {
|
||||
filename: params.path,
|
||||
@@ -163,6 +164,7 @@ function sandboxRequire(referrer: string, specifier: string) {
|
||||
|
||||
const transformOptions: esbuild.TransformOptions = {
|
||||
format: 'cjs',
|
||||
banner: '"use strict";',
|
||||
};
|
||||
if (extname(resolved) === '.json') {
|
||||
transformOptions.loader = 'json';
|
||||
|
||||
@@ -2,8 +2,8 @@
|
||||
|
||||
exports[`build() should build simple middleware 1`] = `
|
||||
Object {
|
||||
"middleware": Object {
|
||||
"/": Object {
|
||||
"pages": Object {
|
||||
"_middleware.js": Object {
|
||||
"env": Array [],
|
||||
"files": Array [
|
||||
"server/pages/_middleware.js",
|
||||
@@ -11,11 +11,10 @@ Object {
|
||||
"name": "pages/_middleware",
|
||||
"page": "/",
|
||||
"regexp": "^/.*$",
|
||||
"runtime": "web",
|
||||
"sortingIndex": 1,
|
||||
},
|
||||
},
|
||||
"sortedMiddleware": Array [
|
||||
"/",
|
||||
],
|
||||
"version": 1,
|
||||
"version": 2,
|
||||
}
|
||||
`;
|
||||
|
||||
78
packages/middleware/test/build.test.ts
vendored
78
packages/middleware/test/build.test.ts
vendored
@@ -3,6 +3,30 @@ import { promises as fsp } from 'fs';
|
||||
import { build } from '../src';
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
const setupFixture = async (fixture: string) => {
|
||||
const fixturePath = join(__dirname, `fixtures/${fixture}`);
|
||||
await build({
|
||||
workPath: fixturePath,
|
||||
});
|
||||
|
||||
const functionsManifest = JSON.parse(
|
||||
await fsp.readFile(
|
||||
join(fixturePath, '.output/functions-manifest.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
|
||||
const outputFile = join(fixturePath, '.output/server/pages/_middleware.js');
|
||||
expect(await fsp.stat(outputFile)).toBeTruthy();
|
||||
require(outputFile);
|
||||
//@ts-ignore
|
||||
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
|
||||
return {
|
||||
middleware,
|
||||
functionsManifest,
|
||||
};
|
||||
};
|
||||
|
||||
describe('build()', () => {
|
||||
beforeEach(() => {
|
||||
//@ts-ignore
|
||||
@@ -15,25 +39,9 @@ describe('build()', () => {
|
||||
delete global._ENTRIES;
|
||||
});
|
||||
it('should build simple middleware', async () => {
|
||||
const fixture = join(__dirname, 'fixtures/simple');
|
||||
await build({
|
||||
workPath: fixture,
|
||||
});
|
||||
const { functionsManifest, middleware } = await setupFixture('simple');
|
||||
|
||||
const middlewareManifest = JSON.parse(
|
||||
await fsp.readFile(
|
||||
join(fixture, '.output/server/middleware-manifest.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
expect(middlewareManifest).toMatchSnapshot();
|
||||
|
||||
const outputFile = join(fixture, '.output/server/pages/_middleware.js');
|
||||
expect(await fsp.stat(outputFile)).toBeTruthy();
|
||||
|
||||
require(outputFile);
|
||||
//@ts-ignore
|
||||
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
|
||||
expect(functionsManifest).toMatchSnapshot();
|
||||
expect(typeof middleware).toStrictEqual('function');
|
||||
const handledResponse = await middleware({
|
||||
request: {
|
||||
@@ -54,4 +62,38 @@ describe('build()', () => {
|
||||
(unhandledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual('1');
|
||||
});
|
||||
|
||||
it('should build simple middleware with env vars', async () => {
|
||||
const expectedEnvVar = 'expected-env-var';
|
||||
const fixture = join(__dirname, 'fixtures/env');
|
||||
process.env.ENV_VAR_SHOULD_BE_DEFINED = expectedEnvVar;
|
||||
await build({
|
||||
workPath: fixture,
|
||||
});
|
||||
// env var should be inlined in the output
|
||||
delete process.env.ENV_VAR_SHOULD_BE_DEFINED;
|
||||
|
||||
const outputFile = join(fixture, '.output/server/pages/_middleware.js');
|
||||
expect(await fsp.stat(outputFile)).toBeTruthy();
|
||||
|
||||
require(outputFile);
|
||||
//@ts-ignore
|
||||
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
|
||||
expect(typeof middleware).toStrictEqual('function');
|
||||
const handledResponse = await middleware({
|
||||
request: {},
|
||||
});
|
||||
expect(String(handledResponse.response.body)).toEqual(expectedEnvVar);
|
||||
expect(
|
||||
(handledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual(null);
|
||||
});
|
||||
|
||||
it('should create a middleware that runs in strict mode', async () => {
|
||||
const { middleware } = await setupFixture('use-strict');
|
||||
const response = await middleware({
|
||||
request: {},
|
||||
});
|
||||
expect(String(response.response.body)).toEqual('is strict mode? yes');
|
||||
});
|
||||
});
|
||||
|
||||
3
packages/middleware/test/fixtures/env/_middleware.js
vendored
Normal file
3
packages/middleware/test/fixtures/env/_middleware.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export default req => {
|
||||
return new Response(process.env.ENV_VAR_SHOULD_BE_DEFINED);
|
||||
};
|
||||
6
packages/middleware/test/fixtures/use-strict/_middleware.js
vendored
Normal file
6
packages/middleware/test/fixtures/use-strict/_middleware.js
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
export default function (req) {
|
||||
const isStrict = (function () {
|
||||
return !this;
|
||||
})();
|
||||
return new Response('is strict mode? ' + (isStrict ? 'yes' : 'no'));
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-go",
|
||||
"version": "1.0.0-canary.19",
|
||||
"version": "1.0.0-canary.30",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
@@ -17,7 +17,7 @@
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.31",
|
||||
"@vercel/build-utils": "2.12.3-canary.42",
|
||||
"@vercel/go": "1.2.4-canary.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel-plugin-node",
|
||||
"version": "1.12.2-canary.23",
|
||||
"version": "1.12.2-canary.34",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -34,7 +34,7 @@
|
||||
"@types/node-fetch": "2",
|
||||
"@types/test-listen": "1.1.0",
|
||||
"@types/yazl": "2.4.2",
|
||||
"@vercel/build-utils": "2.12.3-canary.31",
|
||||
"@vercel/build-utils": "2.12.3-canary.42",
|
||||
"@vercel/fun": "1.0.3",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.14.0",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-python",
|
||||
"version": "1.0.0-canary.20",
|
||||
"version": "1.0.0-canary.31",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
@@ -17,8 +17,8 @@
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.31",
|
||||
"@vercel/python": "2.1.2-canary.1"
|
||||
"@vercel/build-utils": "2.12.3-canary.42",
|
||||
"@vercel/python": "2.1.2-canary.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "*",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-ruby",
|
||||
"version": "1.0.0-canary.18",
|
||||
"version": "1.0.0-canary.30",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
@@ -17,8 +17,8 @@
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.31",
|
||||
"@vercel/ruby": "1.2.8-canary.6"
|
||||
"@vercel/build-utils": "2.12.3-canary.42",
|
||||
"@vercel/ruby": "1.2.10-canary.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "*",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/python",
|
||||
"version": "2.1.2-canary.1",
|
||||
"version": "2.1.2-canary.2",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { relative, basename } from 'path';
|
||||
import execa from 'execa';
|
||||
import { Meta, debug } from '@vercel/build-utils';
|
||||
|
||||
@@ -136,17 +135,10 @@ export async function installRequirementsFile({
|
||||
meta,
|
||||
args = [],
|
||||
}: InstallRequirementsFileArg) {
|
||||
const fileAtRoot = relative(workPath, filePath) === basename(filePath);
|
||||
|
||||
// If the `requirements.txt` file is located in the Root Directory of the project and
|
||||
// the new File System API is used (`avoidTopLevelInstall`), the Install Command
|
||||
// will have already installed its dependencies, so we don't need to do it again.
|
||||
if (meta.avoidTopLevelInstall && fileAtRoot) {
|
||||
debug(
|
||||
`Skipping requirements file installation, already installed by Install Command`
|
||||
);
|
||||
return;
|
||||
}
|
||||
// The Vercel platform already handles `requirements.txt` for frontend projects,
|
||||
// but the installation logic there is different, because it seems to install all
|
||||
// of the dependencies globally, whereas, for this Runtime, we want it to happen only
|
||||
// locally, so we'll run a separate installation.
|
||||
|
||||
if (
|
||||
meta.isDev &&
|
||||
|
||||
@@ -9,13 +9,24 @@ interface RubyVersion extends NodeVersion {
|
||||
|
||||
const allOptions: RubyVersion[] = [
|
||||
{ major: 2, minor: 7, range: '2.7.x', runtime: 'ruby2.7' },
|
||||
{ major: 2, minor: 5, range: '2.5.x', runtime: 'ruby2.5' },
|
||||
{
|
||||
major: 2,
|
||||
minor: 5,
|
||||
range: '2.5.x',
|
||||
runtime: 'ruby2.5',
|
||||
discontinueDate: new Date('2021-11-30'),
|
||||
},
|
||||
];
|
||||
|
||||
function getLatestRubyVersion(): RubyVersion {
|
||||
return allOptions[0];
|
||||
}
|
||||
|
||||
function isDiscontinued({ discontinueDate }: RubyVersion): boolean {
|
||||
const today = Date.now();
|
||||
return discontinueDate !== undefined && discontinueDate.getTime() <= today;
|
||||
}
|
||||
|
||||
function getRubyPath(meta: Meta, gemfileContents: string) {
|
||||
let selection = getLatestRubyVersion();
|
||||
if (meta.isDev) {
|
||||
@@ -37,8 +48,20 @@ function getRubyPath(meta: Meta, gemfileContents: string) {
|
||||
if (!found) {
|
||||
throw new NowBuildError({
|
||||
code: 'RUBY_INVALID_VERSION',
|
||||
message: 'Found `Gemfile` with invalid Ruby version: `' + line + '`.',
|
||||
link: 'https://vercel.com/docs/runtimes#official-runtimes/ruby/ruby-version',
|
||||
message: `Found \`Gemfile\` with invalid Ruby version: \`${line}.\``,
|
||||
link: 'http://vercel.link/ruby-version',
|
||||
});
|
||||
}
|
||||
if (isDiscontinued(selection)) {
|
||||
const latest = getLatestRubyVersion();
|
||||
const intro = `Found \`Gemfile\` with discontinued Ruby version: \`${line}.\``;
|
||||
const hint = `Please set \`ruby "~> ${latest.range}"\` in your \`Gemfile\` to use Ruby ${latest.range}.`;
|
||||
const upstream =
|
||||
'This change is the result of a decision made by an upstream infrastructure provider (AWS).';
|
||||
throw new NowBuildError({
|
||||
code: 'RUBY_DISCONTINUED_VERSION',
|
||||
link: 'http://vercel.link/ruby-version',
|
||||
message: `${intro} ${hint} ${upstream}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@vercel/ruby",
|
||||
"author": "Nathan Cahill <nathan@nathancahill.com>",
|
||||
"version": "1.2.8-canary.6",
|
||||
"version": "1.2.10-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/ruby",
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.rb", "use": "@vercel/ruby" }],
|
||||
"build": { "env": { "RUBY_VERSION": "2.7.x" } },
|
||||
"probes": [{ "path": "/", "mustContain": "gem:RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "project/index.rb", "use": "@vercel/ruby" }],
|
||||
"build": { "env": { "RUBY_VERSION": "2.7.x" } },
|
||||
"probes": [
|
||||
{ "path": "/project/", "mustContain": "gem:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.ru", "use": "@vercel/ruby" }],
|
||||
"build": { "env": { "RUBY_VERSION": "2.7.x" } },
|
||||
"probes": [{ "path": "/", "mustContain": "gem:RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
|
||||
7
packages/ruby/test/fixtures/11-version-2-5-error/Gemfile
vendored
Normal file
7
packages/ruby/test/fixtures/11-version-2-5-error/Gemfile
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
# frozen_string_literal: true
|
||||
|
||||
source "https://rubygems.org"
|
||||
|
||||
ruby "~> 2.5.x"
|
||||
|
||||
gem "cowsay", "~> 0.3.0"
|
||||
16
packages/ruby/test/fixtures/11-version-2-5-error/Gemfile.lock
vendored
Normal file
16
packages/ruby/test/fixtures/11-version-2-5-error/Gemfile.lock
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
cowsay (0.3.0)
|
||||
|
||||
PLATFORMS
|
||||
x86_64-linux
|
||||
|
||||
DEPENDENCIES
|
||||
cowsay (~> 0.3.0)
|
||||
|
||||
RUBY VERSION
|
||||
ruby 2.5.5p157
|
||||
|
||||
BUNDLED WITH
|
||||
2.2.22
|
||||
9
packages/ruby/test/fixtures/11-version-2-5-error/index.rb
vendored
Normal file
9
packages/ruby/test/fixtures/11-version-2-5-error/index.rb
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
require 'cowsay'
|
||||
|
||||
Handler = Proc.new do |req, res|
|
||||
name = req.query['name'] || 'World'
|
||||
|
||||
res.status = 200
|
||||
res['Content-Type'] = 'text/text; charset=utf-8'
|
||||
res.body = Cowsay.say("Hello #{name}", 'cow')
|
||||
end
|
||||
4
packages/ruby/test/fixtures/11-version-2-5-error/vercel.json
vendored
Normal file
4
packages/ruby/test/fixtures/11-version-2-5-error/vercel.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.rb", "use": "@vercel/ruby" }]
|
||||
}
|
||||
24
packages/ruby/test/test.js
vendored
24
packages/ruby/test/test.js
vendored
@@ -23,8 +23,32 @@ beforeAll(async () => {
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
const testsThatFailToBuild = new Map([
|
||||
[
|
||||
'11-version-2-5-error',
|
||||
'Found `Gemfile` with discontinued Ruby version: `ruby "~> 2.5.x".` Please set `ruby "~> 2.7.x"` in your `Gemfile` to use Ruby 2.7.x. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
],
|
||||
]);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
const errMsg = testsThatFailToBuild.get(fixture);
|
||||
if (errMsg) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should fail to build ${fixture}`, async () => {
|
||||
try {
|
||||
await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture)
|
||||
);
|
||||
} catch (err) {
|
||||
expect(err).toBeTruthy();
|
||||
expect(err.deployment).toBeTruthy();
|
||||
expect(err.deployment.errorMessage).toBe(errMsg);
|
||||
}
|
||||
});
|
||||
continue; //eslint-disable-line
|
||||
}
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
|
||||
Reference in New Issue
Block a user