mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-12 04:22:14 +00:00
Compare commits
13 Commits
vercel-plu
...
vercel-plu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d31ebbabe4 | ||
|
|
09c9b71adb | ||
|
|
5975db4d66 | ||
|
|
2c86ac654c | ||
|
|
ca5f066eb9 | ||
|
|
410ef86102 | ||
|
|
6792edf32a | ||
|
|
67de167a7e | ||
|
|
0c5c05d90b | ||
|
|
fe43c9c4b2 | ||
|
|
d6a5aa4f6d | ||
|
|
1c3701628d | ||
|
|
45689f22ab |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "2.12.3-canary.35",
|
||||
"version": "2.12.3-canary.40",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -30,7 +30,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "^2.4.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.16",
|
||||
"@vercel/frameworks": "0.5.1-canary.17",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
|
||||
@@ -3,7 +3,6 @@ import { join, parse, relative, dirname, basename, extname } from 'path';
|
||||
import glob from './fs/glob';
|
||||
import { normalizePath } from './fs/normalize-path';
|
||||
import { FILES_SYMBOL, Lambda } from './lambda';
|
||||
import type FileBlob from './file-blob';
|
||||
import type { BuildOptions, Files } from './types';
|
||||
import { debug, getIgnoreFilter } from '.';
|
||||
|
||||
@@ -87,10 +86,10 @@ export function convertRuntimeToPlugin(
|
||||
|
||||
const pages: { [key: string]: any } = {};
|
||||
const pluginName = packageName.replace('vercel-plugin-', '');
|
||||
const outputPath = join(workPath, '.output');
|
||||
|
||||
const traceDir = join(
|
||||
workPath,
|
||||
`.output`,
|
||||
outputPath,
|
||||
`inputs`,
|
||||
// Legacy Runtimes can only provide API Routes, so that's
|
||||
// why we can use this prefix for all of them. Here, we have to
|
||||
@@ -102,7 +101,8 @@ export function convertRuntimeToPlugin(
|
||||
await fs.ensureDir(traceDir);
|
||||
|
||||
let newPathsRuntime: Set<string> = new Set();
|
||||
let linkersRuntime: Array<Promise<void>> = [];
|
||||
|
||||
const entryRoot = join(outputPath, 'server', 'pages');
|
||||
|
||||
for (const entrypoint of Object.keys(entrypoints)) {
|
||||
const { output } = await buildRuntime({
|
||||
@@ -114,6 +114,7 @@ export function convertRuntimeToPlugin(
|
||||
},
|
||||
meta: {
|
||||
avoidTopLevelInstall: true,
|
||||
skipDownload: true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -143,6 +144,7 @@ export function convertRuntimeToPlugin(
|
||||
|
||||
let handlerFileBase = output.handler;
|
||||
let handlerFile = lambdaFiles[handlerFileBase];
|
||||
let handlerHasImport = false;
|
||||
|
||||
const { handler } = output;
|
||||
const handlerMethod = handler.split('.').pop();
|
||||
@@ -156,6 +158,7 @@ export function convertRuntimeToPlugin(
|
||||
if (!handlerFile) {
|
||||
handlerFileBase = handlerFileName + ext;
|
||||
handlerFile = lambdaFiles[handlerFileBase];
|
||||
handlerHasImport = true;
|
||||
}
|
||||
|
||||
if (!handlerFile || !handlerFile.fsPath) {
|
||||
@@ -166,7 +169,6 @@ export function convertRuntimeToPlugin(
|
||||
|
||||
const handlerExtName = extname(handlerFile.fsPath);
|
||||
|
||||
const entryRoot = join(workPath, '.output', 'server', 'pages');
|
||||
const entryBase = basename(entrypoint).replace(ext, handlerExtName);
|
||||
const entryPath = join(dirname(entrypoint), entryBase);
|
||||
const entry = join(entryRoot, entryPath);
|
||||
@@ -178,6 +180,70 @@ export function convertRuntimeToPlugin(
|
||||
await fs.ensureDir(dirname(entry));
|
||||
await fs.copy(handlerFile.fsPath, entry);
|
||||
|
||||
// For compiled languages, the launcher file will be binary and therefore
|
||||
// won't try to import a user-provided request handler (instead, it will
|
||||
// contain it). But for interpreted languages, the launcher might try to
|
||||
// load a user-provided request handler from the source file instead of bundling
|
||||
// it, so we have to adjust the import statement inside the launcher to point
|
||||
// to the respective source file. Previously, Legacy Runtimes simply expected
|
||||
// the user-provided request-handler to be copied right next to the launcher,
|
||||
// but with the new File System API, files won't be moved around unnecessarily.
|
||||
if (handlerHasImport) {
|
||||
const { fsPath } = handlerFile;
|
||||
const encoding = 'utf-8';
|
||||
|
||||
// This is the true directory of the user-provided request handler in the
|
||||
// source files, so that's what we will use as an import path in the launcher.
|
||||
const locationPrefix = relative(entry, outputPath);
|
||||
|
||||
let handlerContent = await fs.readFile(fsPath, encoding);
|
||||
|
||||
const importPaths = [
|
||||
// This is the full entrypoint path, like `./api/test.py`
|
||||
`./${entrypoint}`,
|
||||
// This is the entrypoint path without extension, like `api/test`
|
||||
entrypoint.slice(0, -ext.length),
|
||||
];
|
||||
|
||||
// Generate a list of regular expressions that we can use for
|
||||
// finding matches, but only allow matches if the import path is
|
||||
// wrapped inside single (') or double quotes (").
|
||||
const patterns = importPaths.map(path => {
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
||||
});
|
||||
|
||||
let replacedMatch = null;
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const newContent = handlerContent.replace(
|
||||
pattern,
|
||||
(_, p1, p2, p3) => {
|
||||
return `${p1}${join(locationPrefix, p2)}${p3}`;
|
||||
}
|
||||
);
|
||||
|
||||
if (newContent !== handlerContent) {
|
||||
debug(
|
||||
`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`
|
||||
);
|
||||
|
||||
handlerContent = newContent;
|
||||
replacedMatch = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!replacedMatch) {
|
||||
new Error(
|
||||
`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`
|
||||
);
|
||||
}
|
||||
|
||||
await fs.writeFile(entry, handlerContent, encoding);
|
||||
} else {
|
||||
await fs.copy(handlerFile.fsPath, entry);
|
||||
}
|
||||
|
||||
const newFilesEntrypoint: Array<string> = [];
|
||||
const newDirectoriesEntrypoint: Array<string> = [];
|
||||
|
||||
@@ -230,68 +296,28 @@ export function convertRuntimeToPlugin(
|
||||
}
|
||||
}
|
||||
|
||||
const tracedFiles: {
|
||||
absolutePath: string;
|
||||
relativePath: string;
|
||||
}[] = [];
|
||||
|
||||
const linkers = Object.entries(lambdaFiles).map(
|
||||
async ([relPath, file]) => {
|
||||
const newPath = join(traceDir, relPath);
|
||||
|
||||
// The handler was already moved into position above.
|
||||
if (relPath === handlerFileBase) {
|
||||
return;
|
||||
}
|
||||
|
||||
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
||||
const { fsPath, type } = file;
|
||||
|
||||
if (fsPath) {
|
||||
await fs.ensureDir(dirname(newPath));
|
||||
|
||||
const isNewFile = newFilesEntrypoint.includes(fsPath);
|
||||
|
||||
const isInsideNewDirectory = newDirectoriesEntrypoint.some(
|
||||
dirPath => {
|
||||
return fsPath.startsWith(dirPath);
|
||||
}
|
||||
);
|
||||
|
||||
// With this, we're making sure that files in the `workPath` that existed
|
||||
// before the Legacy Runtime was invoked (source files) are linked from
|
||||
// `.output` instead of copying there (the latter only happens if linking fails),
|
||||
// which is the fastest solution. However, files that are created fresh
|
||||
// by the Legacy Runtimes are always copied, because their link destinations
|
||||
// are likely to be overwritten every time an entrypoint is processed by
|
||||
// the Legacy Runtime. This is likely to overwrite the destination on subsequent
|
||||
// runs, but that's also how `workPath` used to work originally, without
|
||||
// the File System API (meaning that there was one `workPath` for all entrypoints).
|
||||
if (isNewFile || isInsideNewDirectory) {
|
||||
debug(`Copying from ${fsPath} to ${newPath}`);
|
||||
await fs.copy(fsPath, newPath);
|
||||
} else {
|
||||
await linkOrCopy(fsPath, newPath);
|
||||
}
|
||||
} else if (type === 'FileBlob') {
|
||||
const { data, mode } = file as FileBlob;
|
||||
await fs.writeFile(newPath, data, { mode });
|
||||
} else {
|
||||
throw new Error(`Unknown file type: ${type}`);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
linkersRuntime = linkersRuntime.concat(linkers);
|
||||
|
||||
const nft = `${entry}.nft.json`;
|
||||
|
||||
const json = JSON.stringify({
|
||||
version: 1,
|
||||
files: tracedFiles.map(file => ({
|
||||
input: normalizePath(relative(dirname(nft), file.absolutePath)),
|
||||
output: normalizePath(file.relativePath),
|
||||
})),
|
||||
version: 2,
|
||||
files: Object.keys(lambdaFiles)
|
||||
.map(file => {
|
||||
const { fsPath } = lambdaFiles[file];
|
||||
|
||||
if (!fsPath) {
|
||||
throw new Error(
|
||||
`File "${file}" is missing valid \`fsPath\` property`
|
||||
);
|
||||
}
|
||||
|
||||
// The handler was already moved into position above.
|
||||
if (file === handlerFileBase) {
|
||||
return;
|
||||
}
|
||||
|
||||
return normalizePath(relative(dirname(nft), fsPath));
|
||||
})
|
||||
.filter(Boolean),
|
||||
});
|
||||
|
||||
await fs.ensureDir(dirname(nft));
|
||||
@@ -322,12 +348,6 @@ export function convertRuntimeToPlugin(
|
||||
};
|
||||
}
|
||||
|
||||
// Instead of of waiting for all of the linking to be done for every
|
||||
// entrypoint before processing the next one, we immediately handle all
|
||||
// of them one after the other, while then waiting for the linking
|
||||
// to finish right here, before we clean up newly created files below.
|
||||
await Promise.all(linkersRuntime);
|
||||
|
||||
// A list of all the files that were created by the Legacy Runtime,
|
||||
// which we'd like to remove from the File System.
|
||||
const toRemove = Array.from(newPathsRuntime).map(path => {
|
||||
@@ -348,16 +368,6 @@ export function convertRuntimeToPlugin(
|
||||
};
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
} catch (err: any) {
|
||||
if (err.code !== 'EEXIST') {
|
||||
await fs.copyFile(existingPath, newPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
|
||||
try {
|
||||
const str = await fs.readFile(filePath, 'utf8');
|
||||
@@ -388,7 +398,7 @@ export async function updateFunctionsManifest({
|
||||
);
|
||||
const functionsManifest = await readJson(functionsManifestPath);
|
||||
|
||||
if (!functionsManifest.version) functionsManifest.version = 1;
|
||||
if (!functionsManifest.version) functionsManifest.version = 2;
|
||||
if (!functionsManifest.pages) functionsManifest.pages = {};
|
||||
|
||||
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
||||
|
||||
@@ -62,7 +62,6 @@ describe('convert-runtime-to-plugin', () => {
|
||||
return { output: lambda };
|
||||
};
|
||||
|
||||
const lambdaFiles = await fsToJson(workPath);
|
||||
const packageName = 'vercel-plugin-python';
|
||||
const build = await convertRuntimeToPlugin(buildRuntime, packageName, ext);
|
||||
|
||||
@@ -70,14 +69,8 @@ describe('convert-runtime-to-plugin', () => {
|
||||
|
||||
const output = await fsToJson(join(workPath, '.output'));
|
||||
|
||||
delete lambdaFiles['vercel.json'];
|
||||
delete lambdaFiles['vc__handler__python.py'];
|
||||
|
||||
expect(output).toMatchObject({
|
||||
'functions-manifest.json': expect.stringContaining('{'),
|
||||
inputs: {
|
||||
'api-routes-python': lambdaFiles,
|
||||
},
|
||||
server: {
|
||||
pages: {
|
||||
api: {
|
||||
@@ -96,7 +89,7 @@ describe('convert-runtime-to-plugin', () => {
|
||||
|
||||
const funcManifest = JSON.parse(output['functions-manifest.json']);
|
||||
expect(funcManifest).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
pages: {
|
||||
'api/index.py': { ...lambdaOptions, handler: 'index.vc_handler' },
|
||||
'api/users/get.py': { ...lambdaOptions, handler: 'get.vc_handler' },
|
||||
@@ -110,40 +103,16 @@ describe('convert-runtime-to-plugin', () => {
|
||||
|
||||
const indexJson = JSON.parse(output.server.pages.api['index.py.nft.json']);
|
||||
expect(indexJson).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
files: [
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
'../../../../api/db/[id].py',
|
||||
'../../../../api/index.py',
|
||||
'../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../api/users/get.py',
|
||||
'../../../../api/users/post.py',
|
||||
'../../../../file.txt',
|
||||
'../../../../util/date.py',
|
||||
'../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
@@ -151,40 +120,16 @@ describe('convert-runtime-to-plugin', () => {
|
||||
output.server.pages.api.users['get.py.nft.json']
|
||||
);
|
||||
expect(getJson).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
'../../../../../api/db/[id].py',
|
||||
'../../../../../api/index.py',
|
||||
'../../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../../api/users/get.py',
|
||||
'../../../../../api/users/post.py',
|
||||
'../../../../../file.txt',
|
||||
'../../../../../util/date.py',
|
||||
'../../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
@@ -192,40 +137,16 @@ describe('convert-runtime-to-plugin', () => {
|
||||
output.server.pages.api.users['post.py.nft.json']
|
||||
);
|
||||
expect(postJson).toMatchObject({
|
||||
version: 1,
|
||||
version: 2,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
'../../../../../api/db/[id].py',
|
||||
'../../../../../api/index.py',
|
||||
'../../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../../api/users/get.py',
|
||||
'../../../../../api/users/post.py',
|
||||
'../../../../../file.txt',
|
||||
'../../../../../util/date.py',
|
||||
'../../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "23.1.3-canary.58",
|
||||
"version": "23.1.3-canary.63",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -43,14 +43,14 @@
|
||||
"node": ">= 12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.35",
|
||||
"@vercel/build-utils": "2.12.3-canary.40",
|
||||
"@vercel/go": "1.2.4-canary.4",
|
||||
"@vercel/node": "1.12.2-canary.7",
|
||||
"@vercel/python": "2.1.2-canary.1",
|
||||
"@vercel/ruby": "1.2.10-canary.0",
|
||||
"update-notifier": "4.1.0",
|
||||
"vercel-plugin-middleware": "0.0.0-canary.11",
|
||||
"vercel-plugin-node": "1.12.2-canary.27"
|
||||
"vercel-plugin-middleware": "0.0.0-canary.16",
|
||||
"vercel-plugin-node": "1.12.2-canary.32"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/env": "11.1.2",
|
||||
@@ -90,7 +90,7 @@
|
||||
"@types/update-notifier": "5.1.0",
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.16",
|
||||
"@vercel/frameworks": "0.5.1-canary.17",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.17.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
|
||||
@@ -12,10 +12,9 @@ import Sema from 'async-sema';
|
||||
import chalk from 'chalk';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { assert } from 'console';
|
||||
import { createHash } from 'crypto';
|
||||
import fs from 'fs-extra';
|
||||
import ogGlob from 'glob';
|
||||
import { dirname, isAbsolute, join, parse, relative, resolve } from 'path';
|
||||
import { dirname, isAbsolute, join, parse, relative } from 'path';
|
||||
import pluralize from 'pluralize';
|
||||
import Client from '../util/client';
|
||||
import { VercelConfig } from '../util/dev/types';
|
||||
@@ -354,18 +353,19 @@ export default async function main(client: Client) {
|
||||
}
|
||||
|
||||
// We cannot rely on the `framework` alone, as it might be a static export,
|
||||
// and the current build might use a differnt project that's not in the settings.
|
||||
// and the current build might use a different project that's not in the settings.
|
||||
const isNextOutput = Boolean(dotNextDir);
|
||||
const nextExport = await getNextExportStatus(dotNextDir);
|
||||
const outputDir =
|
||||
isNextOutput && !nextExport ? OUTPUT_DIR : join(OUTPUT_DIR, 'static');
|
||||
const getDistDir = framework.getFsOutputDir || framework.getOutputDirName;
|
||||
const distDir =
|
||||
(nextExport?.exportDetail.outDirectory
|
||||
? relative(cwd, nextExport.exportDetail.outDirectory)
|
||||
: false) ||
|
||||
dotNextDir ||
|
||||
userOutputDirectory ||
|
||||
(await framework.getFsOutputDir(cwd));
|
||||
(await getDistDir(cwd));
|
||||
|
||||
await fs.ensureDir(join(cwd, outputDir));
|
||||
|
||||
@@ -636,30 +636,15 @@ export default async function main(client: Client) {
|
||||
],
|
||||
});
|
||||
fileList.delete(relative(cwd, f));
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f.replace(ext, '.js.nft.json'),
|
||||
distDir,
|
||||
nft: {
|
||||
version: 1,
|
||||
files: Array.from(fileList).map(fileListEntry =>
|
||||
relative(dir, fileListEntry)
|
||||
),
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
for (let f of nftFiles) {
|
||||
const json = await fs.readJson(f);
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f,
|
||||
nft: json,
|
||||
distDir,
|
||||
|
||||
const nftFileName = f.replace(ext, '.js.nft.json');
|
||||
client.output.debug(`Creating ${nftFileName}`);
|
||||
|
||||
await fs.writeJSON(nftFileName, {
|
||||
version: 2,
|
||||
files: Array.from(fileList).map(fileListEntry =>
|
||||
relative(dir, fileListEntry)
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -683,15 +668,7 @@ export default async function main(client: Client) {
|
||||
const originalPath = join(requiredServerFilesJson.appDir, i);
|
||||
const relPath = join(OUTPUT_DIR, relative(distDir, originalPath));
|
||||
|
||||
const absolutePath = join(cwd, relPath);
|
||||
const output = relative(baseDir, absolutePath);
|
||||
|
||||
return relPath === output
|
||||
? relPath
|
||||
: {
|
||||
input: relPath,
|
||||
output,
|
||||
};
|
||||
return relPath;
|
||||
}),
|
||||
});
|
||||
}
|
||||
@@ -848,88 +825,6 @@ async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a hash for the given buf.
|
||||
*
|
||||
* @param {Buffer} file data
|
||||
* @return {String} hex digest
|
||||
*/
|
||||
function hash(buf: Buffer): string {
|
||||
return createHash('sha1').update(buf).digest('hex');
|
||||
}
|
||||
|
||||
interface NftFile {
|
||||
version: number;
|
||||
files: (string | { input: string; output: string })[];
|
||||
}
|
||||
|
||||
// resolveNftToOutput takes nft file and moves all of its trace files
|
||||
// into the specified directory + `inputs`, (renaming them to their hash + ext) and
|
||||
// subsequently updating the original nft file accordingly. This is done
|
||||
// to make the `.output` directory be self-contained, so that it works
|
||||
// properly with `vc --prebuilt`.
|
||||
async function resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir,
|
||||
nftFileName,
|
||||
distDir,
|
||||
nft,
|
||||
}: {
|
||||
client: Client;
|
||||
baseDir: string;
|
||||
outputDir: string;
|
||||
nftFileName: string;
|
||||
distDir: string;
|
||||
nft: NftFile;
|
||||
}) {
|
||||
client.output.debug(`Processing and resolving ${nftFileName}`);
|
||||
await fs.ensureDir(join(outputDir, 'inputs'));
|
||||
const newFilesList: NftFile['files'] = [];
|
||||
|
||||
// If `distDir` is a subdirectory, then the input has to be resolved to where the `.output` directory will be.
|
||||
const relNftFileName = relative(outputDir, nftFileName);
|
||||
const origNftFilename = join(distDir, relNftFileName);
|
||||
|
||||
if (relNftFileName.startsWith('cache/')) {
|
||||
// No need to process the `cache/` directory.
|
||||
// Paths in it might also not be relative to `cache` itself.
|
||||
return;
|
||||
}
|
||||
|
||||
for (let fileEntity of nft.files) {
|
||||
const relativeInput =
|
||||
typeof fileEntity === 'string' ? fileEntity : fileEntity.input;
|
||||
const fullInput = resolve(join(parse(origNftFilename).dir, relativeInput));
|
||||
|
||||
// if the resolved path is NOT in the .output directory we move in it there
|
||||
if (!fullInput.includes(distDir)) {
|
||||
const { ext } = parse(fullInput);
|
||||
const raw = await fs.readFile(fullInput);
|
||||
const newFilePath = join(outputDir, 'inputs', hash(raw) + ext);
|
||||
smartCopy(client, fullInput, newFilePath);
|
||||
|
||||
// We have to use `baseDir` instead of `cwd`, because we want to
|
||||
// mount everything from there (especially `node_modules`).
|
||||
// This is important for NPM Workspaces where `node_modules` is not
|
||||
// in the directory of the workspace.
|
||||
const output = relative(baseDir, fullInput).replace('.output', '.next');
|
||||
|
||||
newFilesList.push({
|
||||
input: relative(parse(nftFileName).dir, newFilePath),
|
||||
output,
|
||||
});
|
||||
} else {
|
||||
newFilesList.push(relativeInput);
|
||||
}
|
||||
}
|
||||
// Update the .nft.json with new input and output mapping
|
||||
await fs.writeJSON(nftFileName, {
|
||||
...nft,
|
||||
files: newFilesList,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Files will only exist when `next export` was used.
|
||||
*/
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "10.2.3-canary.36",
|
||||
"version": "10.2.3-canary.41",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -40,7 +40,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.35",
|
||||
"@vercel/build-utils": "2.12.3-canary.40",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "0.5.1-canary.16",
|
||||
"version": "0.5.1-canary.17",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
|
||||
@@ -141,7 +141,6 @@ export const frameworks = [
|
||||
},
|
||||
dependency: 'gatsby',
|
||||
getOutputDirName: async () => 'public',
|
||||
getFsOutputDir: async () => 'public',
|
||||
defaultRoutes: async (dirPrefix: string) => {
|
||||
// This file could be generated by gatsby-plugin-now or gatsby-plugin-zeit-now
|
||||
try {
|
||||
@@ -226,7 +225,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'remix',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -254,10 +252,13 @@ export const frameworks = [
|
||||
source: '/build/(.*)',
|
||||
regex: '/build/(.*)',
|
||||
headers: [
|
||||
{ key: 'cache-control', value: 'public, max-age=31536000, immutable' },
|
||||
{
|
||||
key: 'cache-control',
|
||||
value: 'public, max-age=31536000, immutable',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Hexo',
|
||||
@@ -294,7 +295,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'hexo',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -332,7 +332,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@11ty/eleventy',
|
||||
getFsOutputDir: async () => '_site',
|
||||
getOutputDirName: async () => '_site',
|
||||
cachePattern: '.cache/**',
|
||||
},
|
||||
@@ -372,22 +371,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@docusaurus/core',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
const location = join(dirPrefix, base);
|
||||
const content = await readdir(location, { withFileTypes: true });
|
||||
|
||||
// If there is only one file in it that is a dir we'll use it as dist dir
|
||||
if (content.length === 1 && content[0].isDirectory()) {
|
||||
return join(base, content[0].name);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error detecting output directory: `, error);
|
||||
}
|
||||
|
||||
return base;
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -527,21 +510,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'docusaurus',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
const location = join(dirPrefix, base);
|
||||
const content = await readdir(location, { withFileTypes: true });
|
||||
|
||||
// If there is only one file in it that is a dir we'll use it as dist dir
|
||||
if (content.length === 1 && content[0].isDirectory()) {
|
||||
return join(base, content[0].name);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(`Error detecting output directory: `, error);
|
||||
}
|
||||
return base;
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -593,7 +561,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'preact-cli',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -650,7 +617,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@dojo/cli',
|
||||
getFsOutputDir: async () => 'output/dist',
|
||||
getOutputDirName: async () => join('output', 'dist'),
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -717,7 +683,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'ember-cli',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -772,7 +737,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@vue/cli-service',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -849,7 +813,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@scullyio/init',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist/static',
|
||||
},
|
||||
{
|
||||
@@ -886,7 +849,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/angular',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -940,7 +902,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@angular/cli',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'dist';
|
||||
try {
|
||||
@@ -1008,7 +969,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'polymer-cli',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -1078,7 +1038,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sirv-cli',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1128,10 +1087,9 @@ export const frameworks = [
|
||||
placeholder: 'svelte-kit dev',
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: 'public',
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -1168,7 +1126,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/react',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1276,7 +1233,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'react-scripts',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1378,7 +1334,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'gridsome',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
{
|
||||
@@ -1416,7 +1371,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'umi',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1470,7 +1424,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sapper',
|
||||
getFsOutputDir: async () => '__sapper__/export',
|
||||
getOutputDirName: async () => '__sapper__/export',
|
||||
},
|
||||
{
|
||||
@@ -1508,7 +1461,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'saber',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1577,7 +1529,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@stencil/core',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
{
|
||||
@@ -1666,7 +1617,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'nuxt',
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'dist',
|
||||
cachePattern: '.nuxt/**',
|
||||
defaultRoutes: [
|
||||
@@ -1724,7 +1674,6 @@ export const frameworks = [
|
||||
placeholder: 'RedwoodJS default',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -1768,16 +1717,6 @@ export const frameworks = [
|
||||
placeholder: '`public` or `publishDir` from the `config` file',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type HugoConfig = { publishDir?: string };
|
||||
const config = await readConfigFile<HugoConfig>(
|
||||
['config.json', 'config.yaml', 'config.toml'].map(fileName => {
|
||||
return join(dirPrefix, fileName);
|
||||
})
|
||||
);
|
||||
|
||||
return (config && config.publishDir) || 'public';
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string): Promise<string> => {
|
||||
type HugoConfig = { publishDir?: string };
|
||||
const config = await readConfigFile<HugoConfig>(
|
||||
@@ -1822,13 +1761,6 @@ export const frameworks = [
|
||||
placeholder: '`_site` or `destination` from `_config.yml`',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type JekyllConfig = { destination?: string };
|
||||
const config = await readConfigFile<JekyllConfig>(
|
||||
join(dirPrefix, '_config.yml')
|
||||
);
|
||||
return (config && config.destination) || '_site';
|
||||
},
|
||||
getOutputDirName: async (dirPrefix: string): Promise<string> => {
|
||||
type JekyllConfig = { destination?: string };
|
||||
const config = await readConfigFile<JekyllConfig>(
|
||||
@@ -1870,7 +1802,6 @@ export const frameworks = [
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
{
|
||||
@@ -1905,7 +1836,6 @@ export const frameworks = [
|
||||
value: 'build',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
cachePattern: '{vendor/bin,vendor/cache,vendor/bundle}/**',
|
||||
},
|
||||
@@ -1940,7 +1870,6 @@ export const frameworks = [
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultVersion: '0.13.0',
|
||||
},
|
||||
@@ -1980,7 +1909,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'vite',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
{
|
||||
@@ -2018,7 +1946,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'parcel',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
|
||||
@@ -162,9 +162,9 @@ export interface Framework {
|
||||
dependency?: string;
|
||||
/**
|
||||
* Function that returns the name of the directory that the framework outputs
|
||||
* its build results to. In some cases this is read from a configuration file.
|
||||
* its File System API build results to, usually called `.output`.
|
||||
*/
|
||||
getFsOutputDir: (dirPrefix: string) => Promise<string>;
|
||||
getFsOutputDir?: (dirPrefix: string) => Promise<string>;
|
||||
/**
|
||||
* Function that returns the name of the directory that the framework outputs
|
||||
* its STATIC build results to. In some cases this is read from a configuration file.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel-plugin-middleware",
|
||||
"version": "0.0.0-canary.11",
|
||||
"version": "0.0.0-canary.16",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "",
|
||||
@@ -30,7 +30,7 @@
|
||||
"@types/node-fetch": "^2",
|
||||
"@types/ua-parser-js": "0.7.36",
|
||||
"@types/uuid": "8.3.1",
|
||||
"@vercel/build-utils": "2.12.3-canary.35",
|
||||
"@vercel/build-utils": "2.12.3-canary.40",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"cookie": "0.4.1",
|
||||
"formdata-node": "4.3.1",
|
||||
|
||||
@@ -15,6 +15,6 @@ Object {
|
||||
"sortingIndex": 1,
|
||||
},
|
||||
},
|
||||
"version": 1,
|
||||
"version": 2,
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-go",
|
||||
"version": "1.0.0-canary.23",
|
||||
"version": "1.0.0-canary.28",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
@@ -17,7 +17,7 @@
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.35",
|
||||
"@vercel/build-utils": "2.12.3-canary.40",
|
||||
"@vercel/go": "1.2.4-canary.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel-plugin-node",
|
||||
"version": "1.12.2-canary.27",
|
||||
"version": "1.12.2-canary.32",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -34,7 +34,7 @@
|
||||
"@types/node-fetch": "2",
|
||||
"@types/test-listen": "1.1.0",
|
||||
"@types/yazl": "2.4.2",
|
||||
"@vercel/build-utils": "2.12.3-canary.35",
|
||||
"@vercel/build-utils": "2.12.3-canary.40",
|
||||
"@vercel/fun": "1.0.3",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.14.0",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-python",
|
||||
"version": "1.0.0-canary.24",
|
||||
"version": "1.0.0-canary.29",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
@@ -17,7 +17,7 @@
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.35",
|
||||
"@vercel/build-utils": "2.12.3-canary.40",
|
||||
"@vercel/python": "2.1.2-canary.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-ruby",
|
||||
"version": "1.0.0-canary.23",
|
||||
"version": "1.0.0-canary.28",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
@@ -17,7 +17,7 @@
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.35",
|
||||
"@vercel/build-utils": "2.12.3-canary.40",
|
||||
"@vercel/ruby": "1.2.10-canary.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
Reference in New Issue
Block a user