Compare commits

..

14 Commits

Author SHA1 Message Date
Leo Lamprecht
d3ef240f6e Publish Canary
- @vercel/build-utils@2.12.3-canary.42
 - vercel@23.1.3-canary.67
 - @vercel/client@10.2.3-canary.45
 - vercel-plugin-middleware@0.0.0-canary.19
 - vercel-plugin-go@1.0.0-canary.30
 - vercel-plugin-node@1.12.2-canary.34
 - vercel-plugin-python@1.0.0-canary.31
 - vercel-plugin-ruby@1.0.0-canary.30
 - @vercel/python@2.1.2-canary.2
2021-12-08 15:53:14 +01:00
Leo Lamprecht
5b26ebc7b8 Make Python CLI Plugin work (#7155) 2021-12-08 15:52:43 +01:00
Leo Lamprecht
3427ad6ce0 Publish Canary
- vercel@23.1.3-canary.66
2021-12-08 12:50:58 +01:00
Leo Lamprecht
4ab5e4326b Improved Vercel CLI link (#7151) 2021-12-08 12:50:27 +01:00
Leo Lamprecht
d24a3ce3ab Publish Canary
- @vercel/client@10.2.3-canary.44
2021-12-08 12:10:44 +01:00
Steven
29a44db8d9 [client] Fix duplicate files when analyzing nft.json (#7150)
This PR fixes a regression from #7144 where a duplicate file was added if `nft.json` referenced an existing file.

I also refactored the prepared files logic to avoid cloning the array in every loop iteration.

Review [without whitespace](https://github.com/vercel/vercel/pull/7150/files?diff=split&w=1) will make it easier to understand.

- Related to https://github.com/vercel/runtimes/issues/304
2021-12-08 01:40:49 +00:00
Steven
695f3a9212 Publish Canary
- vercel@23.1.3-canary.65
 - @vercel/client@10.2.3-canary.43
 - vercel-plugin-middleware@0.0.0-canary.18
2021-12-07 18:25:02 -05:00
Steven
3ff777b8ed [client] Resolve .nft.json files when vc deploy --prebuilt (#7144)
This ensures that using `vc deploy --prebuilt` will also upload any files that `.output/**/.nft.json` points to and also handle the Root Directory correctly since `vc build` emits `rootdir/.output`.

- Related to https://github.com/vercel/runtimes/issues/304.
2021-12-07 18:17:58 -05:00
Tommaso De Rossi
d94b9806ab [middleware] Define env vars when building _middleware.js with esbuild (#7087)
### Related Issues

> Fixes #7086
> Related to #7086

### 📋 Checklist

<!--
  Please keep your PR as a Draft until the checklist is complete
-->

#### Tests

- [x] The code changed/added as part of this PR has been covered with tests
- [x] All tests pass locally with `yarn test-unit`

#### Code Review

- [x] This PR has a concise title and thorough description useful to a reviewer
- [ ] Issue from task tracker has a link to this PR
2021-12-07 22:16:46 +00:00
Leo Lamprecht
35c8fc2729 Publish Canary
- @vercel/build-utils@2.12.3-canary.41
 - vercel@23.1.3-canary.64
 - @vercel/client@10.2.3-canary.42
 - vercel-plugin-middleware@0.0.0-canary.17
 - vercel-plugin-go@1.0.0-canary.29
 - vercel-plugin-node@1.12.2-canary.33
 - vercel-plugin-python@1.0.0-canary.30
 - vercel-plugin-ruby@1.0.0-canary.29
2021-12-07 21:13:41 +01:00
Leo Lamprecht
0a468fd6d7 Correctly clean up files for CLI Plugins (#7149)
* Correctly clean up files for CLI Plugins

* Cleaned up the code
2021-12-07 21:13:29 +01:00
Leo Lamprecht
d31ebbabe4 Publish Canary
- @vercel/build-utils@2.12.3-canary.40
 - vercel@23.1.3-canary.63
 - @vercel/client@10.2.3-canary.41
 - vercel-plugin-middleware@0.0.0-canary.16
 - vercel-plugin-go@1.0.0-canary.28
 - vercel-plugin-node@1.12.2-canary.32
 - vercel-plugin-python@1.0.0-canary.29
 - vercel-plugin-ruby@1.0.0-canary.28
2021-12-07 17:46:08 +01:00
Leo Lamprecht
09c9b71adb Adjust import statements inside Runtime launchers (#7148)
* Added basic logic

* Polished basic logic

* Made logic actually replace content

* Perfected the logic

* Added comment

* Simplified logic

* Added another comment

* Added debug log

* More detailed debug log

* Update packages/build-utils/src/convert-runtime-to-plugin.ts

Co-authored-by: Andy <AndyBitz@users.noreply.github.com>

* Update packages/build-utils/src/convert-runtime-to-plugin.ts

Co-authored-by: Steven <steven@ceriously.com>

* Simpler logic

Co-authored-by: Andy <AndyBitz@users.noreply.github.com>
Co-authored-by: Steven <steven@ceriously.com>
2021-12-07 17:44:59 +01:00
Leo Lamprecht
5975db4d66 Fixed middleware tests (#7146) 2021-12-07 01:10:58 +01:00
38 changed files with 442 additions and 220 deletions

View File

@@ -1,6 +1,6 @@
{ {
"name": "@vercel/build-utils", "name": "@vercel/build-utils",
"version": "2.12.3-canary.39", "version": "2.12.3-canary.42",
"license": "MIT", "license": "MIT",
"main": "./dist/index.js", "main": "./dist/index.js",
"types": "./dist/index.d.js", "types": "./dist/index.d.js",

View File

@@ -86,10 +86,10 @@ export function convertRuntimeToPlugin(
const pages: { [key: string]: any } = {}; const pages: { [key: string]: any } = {};
const pluginName = packageName.replace('vercel-plugin-', ''); const pluginName = packageName.replace('vercel-plugin-', '');
const outputPath = join(workPath, '.output');
const traceDir = join( const traceDir = join(
workPath, outputPath,
`.output`,
`inputs`, `inputs`,
// Legacy Runtimes can only provide API Routes, so that's // Legacy Runtimes can only provide API Routes, so that's
// why we can use this prefix for all of them. Here, we have to // why we can use this prefix for all of them. Here, we have to
@@ -100,10 +100,7 @@ export function convertRuntimeToPlugin(
await fs.ensureDir(traceDir); await fs.ensureDir(traceDir);
let newPathsRuntime: Set<string> = new Set(); const entryRoot = join(outputPath, 'server', 'pages');
const entryDir = join('.output', 'server', 'pages');
const entryRoot = join(workPath, entryDir);
for (const entrypoint of Object.keys(entrypoints)) { for (const entrypoint of Object.keys(entrypoints)) {
const { output } = await buildRuntime({ const { output } = await buildRuntime({
@@ -119,17 +116,6 @@ export function convertRuntimeToPlugin(
}, },
}); });
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
// because the `workPath` used to be a place that was a place where they could
// just put anything, but nowadays it's the working directory of the `vercel build`
// command, which is the place where the developer keeps their source files,
// so we don't want to pollute this space unnecessarily. That means we have to clean
// up files that were created by the build, which is done further below.
const sourceFilesAfterBuild = await getSourceFiles(
workPath,
ignoreFilter
);
// @ts-ignore This symbol is a private API // @ts-ignore This symbol is a private API
const lambdaFiles: Files = output[FILES_SYMBOL]; const lambdaFiles: Files = output[FILES_SYMBOL];
@@ -145,6 +131,7 @@ export function convertRuntimeToPlugin(
let handlerFileBase = output.handler; let handlerFileBase = output.handler;
let handlerFile = lambdaFiles[handlerFileBase]; let handlerFile = lambdaFiles[handlerFileBase];
let handlerHasImport = false;
const { handler } = output; const { handler } = output;
const handlerMethod = handler.split('.').pop(); const handlerMethod = handler.split('.').pop();
@@ -158,6 +145,7 @@ export function convertRuntimeToPlugin(
if (!handlerFile) { if (!handlerFile) {
handlerFileBase = handlerFileName + ext; handlerFileBase = handlerFileName + ext;
handlerFile = lambdaFiles[handlerFileBase]; handlerFile = lambdaFiles[handlerFileBase];
handlerHasImport = true;
} }
if (!handlerFile || !handlerFile.fsPath) { if (!handlerFile || !handlerFile.fsPath) {
@@ -172,65 +160,89 @@ export function convertRuntimeToPlugin(
const entryPath = join(dirname(entrypoint), entryBase); const entryPath = join(dirname(entrypoint), entryBase);
const entry = join(entryRoot, entryPath); const entry = join(entryRoot, entryPath);
// We never want to link here, only copy, because the launcher // Create the parent directory of the API Route that will be created
// file often has the same name for every entrypoint, which means that // for the current entrypoint inside of `.output/server/pages/api`.
// every build for every entrypoint overwrites the launcher of the previous
// one, so linking would end with a broken reference.
await fs.ensureDir(dirname(entry)); await fs.ensureDir(dirname(entry));
await fs.copy(handlerFile.fsPath, entry);
const newFilesEntrypoint: Array<string> = []; // For compiled languages, the launcher file will be binary and therefore
const newDirectoriesEntrypoint: Array<string> = []; // won't try to import a user-provided request handler (instead, it will
// contain it). But for interpreted languages, the launcher might try to
// load a user-provided request handler from the source file instead of bundling
// it, so we have to adjust the import statement inside the launcher to point
// to the respective source file. Previously, Legacy Runtimes simply expected
// the user-provided request-handler to be copied right next to the launcher,
// but with the new File System API, files won't be moved around unnecessarily.
if (handlerHasImport) {
const { fsPath } = handlerFile;
const encoding = 'utf-8';
const preBuildFiles = Object.values(sourceFilesPreBuild).map(file => { // This is the true directory of the user-provided request handler in the
return file.fsPath; // source files, so that's what we will use as an import path in the launcher.
}); const locationPrefix = relative(entry, outputPath);
// Generate a list of directories and files that weren't present let handlerContent = await fs.readFile(fsPath, encoding);
// before the entrypoint was processed by the Legacy Runtime, so
// that we can perform a cleanup later. We need to divide into files
// and directories because only cleaning up files might leave empty
// directories, and listing directories separately also speeds up the
// build because we can just delete them, which wipes all of their nested
// paths, instead of iterating through all files that should be deleted.
for (const file in sourceFilesAfterBuild) {
if (!sourceFilesPreBuild[file]) {
const path = sourceFilesAfterBuild[file].fsPath;
const dirPath = dirname(path);
// If none of the files that were present before the entrypoint const importPaths = [
// was processed are contained within the directory we're looking // This is the full entrypoint path, like `./api/test.py`. In our tests
// at right now, then we know it's a newly added directory // Python didn't support importing from a parent directory without using different
// and it can therefore be removed later on. // code in the launcher that registers it as a location for modules and then changing
const isNewDir = !preBuildFiles.some(filePath => { // the importing syntax, but continuing to import it like before seems to work. If
return dirname(filePath).startsWith(dirPath); // other languages need this, we should consider excluding Python explicitly.
}); // `./${entrypoint}`,
// Check out the list of tracked directories that were // This is the entrypoint path without extension, like `api/test`
// newly added and see if one of them contains the path entrypoint.slice(0, -ext.length),
// we're looking at. ];
const hasParentDir = newDirectoriesEntrypoint.some(dir => {
return path.startsWith(dir);
});
// If we have already tracked a directory that was newly // Generate a list of regular expressions that we can use for
// added that sits above the file or directory that we're // finding matches, but only allow matches if the import path is
// looking at, we don't need to add more entries to the list // wrapped inside single (') or double quotes (").
// because when the parent will get removed in the future, const patterns = importPaths.map(path => {
// all of its children (and therefore the path we're looking at) // eslint-disable-next-line no-useless-escape
// will automatically get removed anyways. return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
if (hasParentDir) { });
continue;
}
if (isNewDir) { let replacedMatch = null;
newDirectoriesEntrypoint.push(dirPath);
} else { for (const pattern of patterns) {
newFilesEntrypoint.push(path); const newContent = handlerContent.replace(
pattern,
(_, p1, p2, p3) => {
return `${p1}${join(locationPrefix, p2)}${p3}`;
}
);
if (newContent !== handlerContent) {
debug(
`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`
);
handlerContent = newContent;
replacedMatch = true;
} }
} }
if (!replacedMatch) {
new Error(
`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`
);
}
await fs.writeFile(entry, handlerContent, encoding);
} else {
await fs.copy(handlerFile.fsPath, entry);
} }
// Legacy Runtimes based on interpreted languages will create a new launcher file
// for every entrypoint, but they will create each one inside `workPath`, which means that
// the launcher for one entrypoint will overwrite the launcher provided for the previous
// entrypoint. That's why, above, we copy the file contents into the new destination (and
// optionally transform them along the way), instead of linking. We then also want to remove
// the copy origin right here, so that the `workPath` doesn't contain a useless launcher file
// once the build has finished running.
await fs.remove(handlerFile.fsPath);
debug(`Removed temporary file "${handlerFile.fsPath}"`);
const nft = `${entry}.nft.json`; const nft = `${entry}.nft.json`;
const json = JSON.stringify({ const json = JSON.stringify({
@@ -255,18 +267,8 @@ export function convertRuntimeToPlugin(
.filter(Boolean), .filter(Boolean),
}); });
await fs.ensureDir(dirname(nft));
await fs.writeFile(nft, json); await fs.writeFile(nft, json);
// Extend the list of directories and files that were created by the
// Legacy Runtime with the list of directories and files that were
// created for the entrypoint that was just processed above.
newPathsRuntime = new Set([
...newPathsRuntime,
...newFilesEntrypoint,
...newDirectoriesEntrypoint,
]);
// Add an entry that will later on be added to the `functions-manifest.json` // Add an entry that will later on be added to the `functions-manifest.json`
// file that is placed inside of the `.output` directory. // file that is placed inside of the `.output` directory.
pages[normalizePath(entryPath)] = { pages[normalizePath(entryPath)] = {
@@ -283,20 +285,6 @@ export function convertRuntimeToPlugin(
}; };
} }
// A list of all the files that were created by the Legacy Runtime,
// which we'd like to remove from the File System.
const toRemove = Array.from(newPathsRuntime).map(path => {
debug(`Removing ${path} as part of cleanup`);
return fs.remove(path);
});
// Once all the entrypoints have been processed, we'd like to
// remove all the files from `workPath` that originally weren't present
// before the Legacy Runtime began running, because the `workPath`
// is nowadays the directory in which the user keeps their source code, since
// we're no longer running separate parallel builds for every Legacy Runtime.
await Promise.all(toRemove);
// Add any Serverless Functions that were exposed by the Legacy Runtime // Add any Serverless Functions that were exposed by the Legacy Runtime
// to the `functions-manifest.json` file provided in `.output`. // to the `functions-manifest.json` file provided in `.output`.
await updateFunctionsManifest({ workPath, pages }); await updateFunctionsManifest({ workPath, pages });

View File

@@ -34,7 +34,7 @@ Finally, [connect your Git repository to Vercel](https://vercel.com/docs/git) an
## Documentation ## Documentation
For details on how to use Vercel CLI, check out our [documentation](https://vercel.com/docs). For details on how to use Vercel CLI, check out our [documentation](https://vercel.com/docs/cli).
## Local Development ## Local Development

View File

@@ -1,6 +1,6 @@
{ {
"name": "vercel", "name": "vercel",
"version": "23.1.3-canary.62", "version": "23.1.3-canary.67",
"preferGlobal": true, "preferGlobal": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"description": "The command-line interface for Vercel", "description": "The command-line interface for Vercel",
@@ -43,14 +43,14 @@
"node": ">= 12" "node": ">= 12"
}, },
"dependencies": { "dependencies": {
"@vercel/build-utils": "2.12.3-canary.39", "@vercel/build-utils": "2.12.3-canary.42",
"@vercel/go": "1.2.4-canary.4", "@vercel/go": "1.2.4-canary.4",
"@vercel/node": "1.12.2-canary.7", "@vercel/node": "1.12.2-canary.7",
"@vercel/python": "2.1.2-canary.1", "@vercel/python": "2.1.2-canary.2",
"@vercel/ruby": "1.2.10-canary.0", "@vercel/ruby": "1.2.10-canary.0",
"update-notifier": "4.1.0", "update-notifier": "4.1.0",
"vercel-plugin-middleware": "0.0.0-canary.15", "vercel-plugin-middleware": "0.0.0-canary.19",
"vercel-plugin-node": "1.12.2-canary.31" "vercel-plugin-node": "1.12.2-canary.34"
}, },
"devDependencies": { "devDependencies": {
"@next/env": "11.1.2", "@next/env": "11.1.2",

View File

@@ -447,6 +447,7 @@ export default async (client: Client) => {
forceNew: argv['--force'], forceNew: argv['--force'],
withCache: argv['--with-cache'], withCache: argv['--with-cache'],
prebuilt: argv['--prebuilt'], prebuilt: argv['--prebuilt'],
rootDirectory,
quiet, quiet,
wantsPublic: argv['--public'] || localConfig.public, wantsPublic: argv['--public'] || localConfig.public,
isFile, isFile,

View File

@@ -52,6 +52,7 @@ export default async function processDeployment({
isSettingUpProject: boolean; isSettingUpProject: boolean;
skipAutoDetectionConfirmation?: boolean; skipAutoDetectionConfirmation?: boolean;
cwd?: string; cwd?: string;
rootDirectory?: string;
}) { }) {
let { let {
now, now,
@@ -64,6 +65,7 @@ export default async function processDeployment({
nowConfig, nowConfig,
quiet, quiet,
prebuilt, prebuilt,
rootDirectory,
} = args; } = args;
const { debug } = output; const { debug } = output;
@@ -86,6 +88,7 @@ export default async function processDeployment({
force, force,
withCache, withCache,
prebuilt, prebuilt,
rootDirectory,
skipAutoDetectionConfirmation, skipAutoDetectionConfirmation,
}; };

View File

@@ -37,6 +37,7 @@ export interface CreateOptions {
project?: string; project?: string;
wantsPublic: boolean; wantsPublic: boolean;
prebuilt?: boolean; prebuilt?: boolean;
rootDirectory?: string;
meta: Dictionary<string>; meta: Dictionary<string>;
regions?: string[]; regions?: string[];
quiet?: boolean; quiet?: boolean;
@@ -113,6 +114,7 @@ export default class Now extends EventEmitter {
name, name,
project, project,
prebuilt = false, prebuilt = false,
rootDirectory,
wantsPublic, wantsPublic,
meta, meta,
regions, regions,
@@ -168,6 +170,7 @@ export default class Now extends EventEmitter {
skipAutoDetectionConfirmation, skipAutoDetectionConfirmation,
cwd, cwd,
prebuilt, prebuilt,
rootDirectory,
}); });
if (deployment && deployment.warnings) { if (deployment && deployment.warnings) {

View File

@@ -6,3 +6,5 @@ node_modules
!tests/fixtures/nowignore/node_modules !tests/fixtures/nowignore/node_modules
!tests/fixtures/vercelignore-allow-nodemodules/node_modules !tests/fixtures/vercelignore-allow-nodemodules/node_modules
!tests/fixtures/vercelignore-allow-nodemodules/sub/node_modules !tests/fixtures/vercelignore-allow-nodemodules/sub/node_modules
!tests/fixtures/file-system-api/.output
!tests/fixtures/file-system-api-root-directory/**/.output

View File

@@ -1,6 +1,6 @@
{ {
"name": "@vercel/client", "name": "@vercel/client",
"version": "10.2.3-canary.40", "version": "10.2.3-canary.45",
"main": "dist/index.js", "main": "dist/index.js",
"typings": "dist/index.d.ts", "typings": "dist/index.d.ts",
"homepage": "https://vercel.com", "homepage": "https://vercel.com",
@@ -40,7 +40,7 @@
] ]
}, },
"dependencies": { "dependencies": {
"@vercel/build-utils": "2.12.3-canary.39", "@vercel/build-utils": "2.12.3-canary.42",
"@zeit/fetch": "5.2.0", "@zeit/fetch": "5.2.0",
"async-retry": "1.2.3", "async-retry": "1.2.3",
"async-sema": "3.0.0", "async-sema": "3.0.0",

View File

@@ -1,12 +1,12 @@
import { lstatSync } from 'fs-extra'; import { lstatSync } from 'fs-extra';
import { relative, isAbsolute } from 'path'; import { relative, isAbsolute } from 'path';
import hashes, { mapToObject } from './utils/hashes'; import { hashes, mapToObject, resolveNftJsonFiles } from './utils/hashes';
import { upload } from './upload'; import { upload } from './upload';
import { buildFileTree, createDebug, parseVercelConfig } from './utils'; import { buildFileTree, createDebug, parseVercelConfig } from './utils';
import { DeploymentError } from './errors'; import { DeploymentError } from './errors';
import { import {
NowConfig, VercelConfig,
VercelClientOptions, VercelClientOptions,
DeploymentOptions, DeploymentOptions,
DeploymentEventType, DeploymentEventType,
@@ -16,7 +16,7 @@ export default function buildCreateDeployment() {
return async function* createDeployment( return async function* createDeployment(
clientOptions: VercelClientOptions, clientOptions: VercelClientOptions,
deploymentOptions: DeploymentOptions = {}, deploymentOptions: DeploymentOptions = {},
nowConfig: NowConfig = {} nowConfig: VercelConfig = {}
): AsyncIterableIterator<{ type: DeploymentEventType; payload: any }> { ): AsyncIterableIterator<{ type: DeploymentEventType; payload: any }> {
const { path } = clientOptions; const { path } = clientOptions;
@@ -74,12 +74,7 @@ export default function buildCreateDeployment() {
debug(`Provided 'path' is a single file`); debug(`Provided 'path' is a single file`);
} }
let { fileList } = await buildFileTree( let { fileList } = await buildFileTree(path, clientOptions, debug);
path,
clientOptions.isDirectory,
debug,
clientOptions.prebuilt
);
let configPath: string | undefined; let configPath: string | undefined;
if (!nowConfig) { if (!nowConfig) {
@@ -114,7 +109,11 @@ export default function buildCreateDeployment() {
}; };
} }
const files = await hashes(fileList); const hashedFileMap = await hashes(fileList);
const nftFileList = clientOptions.prebuilt
? await resolveNftJsonFiles(hashedFileMap)
: [];
const files = await hashes(nftFileList, hashedFileMap);
debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`); debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`);
yield { type: 'hashes-calculated', payload: mapToObject(files) }; yield { type: 'hashes-calculated', payload: mapToObject(files) };

View File

@@ -15,6 +15,7 @@ export interface VercelClientOptions {
apiUrl?: string; apiUrl?: string;
force?: boolean; force?: boolean;
prebuilt?: boolean; prebuilt?: boolean;
rootDirectory?: string;
withCache?: boolean; withCache?: boolean;
userAgent?: string; userAgent?: string;
defaultName?: string; defaultName?: string;

View File

@@ -1,6 +1,7 @@
import { createHash } from 'crypto'; import { createHash } from 'crypto';
import fs from 'fs-extra'; import fs from 'fs-extra';
import { Sema } from 'async-sema'; import { Sema } from 'async-sema';
import { join, dirname } from 'path';
export interface DeploymentFile { export interface DeploymentFile {
names: string[]; names: string[];
@@ -15,9 +16,7 @@ export interface DeploymentFile {
* @return {String} hex digest * @return {String} hex digest
*/ */
function hash(buf: Buffer): string { function hash(buf: Buffer): string {
return createHash('sha1') return createHash('sha1').update(buf).digest('hex');
.update(buf)
.digest('hex');
} }
/** /**
@@ -39,34 +38,68 @@ export const mapToObject = (
/** /**
* Computes hashes for the contents of each file given. * Computes hashes for the contents of each file given.
* *
* @param {Array} of {String} full paths * @param files - absolute file paths
* @return {Map} * @param map - optional map of files to append
* @return Map of hash digest to file object
*/ */
async function hashes(files: string[]): Promise<Map<string, DeploymentFile>> { export async function hashes(
const map = new Map<string, DeploymentFile>(); files: string[],
map = new Map<string, DeploymentFile>()
): Promise<Map<string, DeploymentFile>> {
const semaphore = new Sema(100); const semaphore = new Sema(100);
await Promise.all( await Promise.all(
files.map( files.map(async (name: string): Promise<void> => {
async (name: string): Promise<void> => { await semaphore.acquire();
await semaphore.acquire(); const data = await fs.readFile(name);
const data = await fs.readFile(name); const { mode } = await fs.stat(name);
const { mode } = await fs.stat(name);
const h = hash(data); const h = hash(data);
const entry = map.get(h); const entry = map.get(h);
if (entry) { if (entry) {
if (entry.names[0] !== name) {
entry.names.push(name); entry.names.push(name);
} else {
map.set(h, { names: [name], data, mode });
} }
} else {
semaphore.release(); map.set(h, { names: [name], data, mode });
} }
)
semaphore.release();
})
); );
return map; return map;
} }
export default hashes; export async function resolveNftJsonFiles(
hashedFiles: Map<string, DeploymentFile>
): Promise<string[]> {
const semaphore = new Sema(100);
const existingFiles = Array.from(hashedFiles.values());
const resolvedFiles = new Set<string>();
await Promise.all(
existingFiles.map(async file => {
await semaphore.acquire();
const fsPath = file.names[0];
if (fsPath.endsWith('.nft.json')) {
const json = file.data.toString('utf8');
const { version, files } = JSON.parse(json) as {
version: number;
files: string[] | { input: string; output: string }[];
};
if (version === 1 || version === 2) {
for (let f of files) {
const relPath = typeof f === 'string' ? f : f.input;
resolvedFiles.add(join(dirname(fsPath), relPath));
}
} else {
console.error(`Invalid nft.json version: ${version}`);
}
}
semaphore.release();
})
);
return Array.from(resolvedFiles);
}

View File

@@ -1,7 +1,7 @@
import { DeploymentFile } from './hashes'; import { DeploymentFile } from './hashes';
import { FetchOptions } from '@zeit/fetch'; import { FetchOptions } from '@zeit/fetch';
import { nodeFetch, zeitFetch } from './fetch'; import { nodeFetch, zeitFetch } from './fetch';
import { join, sep, relative } from 'path'; import { join, sep, relative, posix } from 'path';
import { URL } from 'url'; import { URL } from 'url';
import ignore from 'ignore'; import ignore from 'ignore';
type Ignore = ReturnType<typeof ignore>; type Ignore = ReturnType<typeof ignore>;
@@ -81,13 +81,16 @@ const maybeRead = async function <T>(path: string, default_: T) {
export async function buildFileTree( export async function buildFileTree(
path: string | string[], path: string | string[],
isDirectory: boolean, {
debug: Debug, isDirectory,
prebuilt?: boolean prebuilt,
rootDirectory,
}: Pick<VercelClientOptions, 'isDirectory' | 'prebuilt' | 'rootDirectory'>,
debug: Debug
): Promise<{ fileList: string[]; ignoreList: string[] }> { ): Promise<{ fileList: string[]; ignoreList: string[] }> {
const ignoreList: string[] = []; const ignoreList: string[] = [];
let fileList: string[]; let fileList: string[];
let { ig, ignores } = await getVercelIgnore(path, prebuilt); let { ig, ignores } = await getVercelIgnore(path, prebuilt, rootDirectory);
debug(`Found ${ignores.length} rules in .vercelignore`); debug(`Found ${ignores.length} rules in .vercelignore`);
debug('Building file tree...'); debug('Building file tree...');
@@ -119,37 +122,50 @@ export async function buildFileTree(
export async function getVercelIgnore( export async function getVercelIgnore(
cwd: string | string[], cwd: string | string[],
prebuilt?: boolean prebuilt?: boolean,
rootDirectory?: string
): Promise<{ ig: Ignore; ignores: string[] }> { ): Promise<{ ig: Ignore; ignores: string[] }> {
const ignores: string[] = prebuilt let ignores: string[] = [];
? ['*', '!.output', '!.output/**']
: [ const outputDir = posix.join(rootDirectory || '', '.output');
'.hg',
'.git', if (prebuilt) {
'.gitmodules', ignores.push('*');
'.svn', const parts = outputDir.split('/');
'.cache', parts.forEach((_, i) => {
'.next', const level = parts.slice(0, i + 1).join('/');
'.now', ignores.push(`!${level}`);
'.vercel', });
'.npmignore', ignores.push(`!${outputDir}/**`);
'.dockerignore', } else {
'.gitignore', ignores = [
'.*.swp', '.hg',
'.DS_Store', '.git',
'.wafpicke-*', '.gitmodules',
'.lock-wscript', '.svn',
'.env.local', '.cache',
'.env.*.local', '.next',
'.venv', '.now',
'npm-debug.log', '.vercel',
'config.gypi', '.npmignore',
'node_modules', '.dockerignore',
'__pycache__', '.gitignore',
'venv', '.*.swp',
'CVS', '.DS_Store',
'.output', '.wafpicke-*',
]; '.lock-wscript',
'.env.local',
'.env.*.local',
'.venv',
'npm-debug.log',
'config.gypi',
'node_modules',
'__pycache__',
'venv',
'CVS',
`.output`,
];
}
const cwds = Array.isArray(cwd) ? cwd : [cwd]; const cwds = Array.isArray(cwd) ? cwd : [cwd];
const files = await Promise.all( const files = await Promise.all(
@@ -250,39 +266,31 @@ export const prepareFiles = (
files: Map<string, DeploymentFile>, files: Map<string, DeploymentFile>,
clientOptions: VercelClientOptions clientOptions: VercelClientOptions
): PreparedFile[] => { ): PreparedFile[] => {
const preparedFiles = [...files.keys()].reduce( const preparedFiles: PreparedFile[] = [];
(acc: PreparedFile[], sha: string): PreparedFile[] => { for (const [sha, file] of files) {
const next = [...acc]; for (const name of file.names) {
let fileName: string;
const file = files.get(sha) as DeploymentFile; if (clientOptions.isDirectory) {
// Directory
for (const name of file.names) { fileName =
let fileName: string; typeof clientOptions.path === 'string'
? relative(clientOptions.path, name)
if (clientOptions.isDirectory) { : name;
// Directory } else {
fileName = // Array of files or single file
typeof clientOptions.path === 'string' const segments = name.split(sep);
? relative(clientOptions.path, name) fileName = segments[segments.length - 1];
: name;
} else {
// Array of files or single file
const segments = name.split(sep);
fileName = segments[segments.length - 1];
}
next.push({
file: isWin ? fileName.replace(/\\/g, '/') : fileName,
size: file.data.byteLength || file.data.length,
mode: file.mode,
sha,
});
} }
return next; preparedFiles.push({
}, file: isWin ? fileName.replace(/\\/g, '/') : fileName,
[] size: file.data.byteLength || file.data.length,
); mode: file.mode,
sha,
});
}
}
return preparedFiles; return preparedFiles;
}; };

View File

@@ -0,0 +1 @@
foo

View File

@@ -0,0 +1 @@
bar

View File

@@ -0,0 +1 @@
bar

View File

@@ -0,0 +1 @@
baz

View File

@@ -0,0 +1 @@
qux

View File

@@ -0,0 +1 @@
foo

View File

@@ -0,0 +1 @@
bar

View File

@@ -0,0 +1,4 @@
{
"extends": "../tsconfig.json",
"include": ["*.test.ts"]
}

View File

@@ -17,7 +17,11 @@ const toAbsolutePaths = (cwd: string, files: string[]) =>
describe('buildFileTree()', () => { describe('buildFileTree()', () => {
it('should exclude files using `.nowignore` blocklist', async () => { it('should exclude files using `.nowignore` blocklist', async () => {
const cwd = fixture('nowignore'); const cwd = fixture('nowignore');
const { fileList, ignoreList } = await buildFileTree(cwd, true, noop); const { fileList, ignoreList } = await buildFileTree(
cwd,
{ isDirectory: true },
noop
);
const expectedFileList = toAbsolutePaths(cwd, ['.nowignore', 'index.txt']); const expectedFileList = toAbsolutePaths(cwd, ['.nowignore', 'index.txt']);
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual( expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
@@ -36,7 +40,11 @@ describe('buildFileTree()', () => {
it('should include the node_modules using `.vercelignore` allowlist', async () => { it('should include the node_modules using `.vercelignore` allowlist', async () => {
const cwd = fixture('vercelignore-allow-nodemodules'); const cwd = fixture('vercelignore-allow-nodemodules');
const { fileList, ignoreList } = await buildFileTree(cwd, true, noop); const { fileList, ignoreList } = await buildFileTree(
cwd,
{ isDirectory: true },
noop
);
const expected = toAbsolutePaths(cwd, [ const expected = toAbsolutePaths(cwd, [
'node_modules/one.txt', 'node_modules/one.txt',
@@ -54,4 +62,90 @@ describe('buildFileTree()', () => {
normalizeWindowsPaths(ignoreList).sort() normalizeWindowsPaths(ignoreList).sort()
); );
}); });
it('should find root files but ignore .output files when prebuilt=false', async () => {
const cwd = fixture('file-system-api');
const { fileList, ignoreList } = await buildFileTree(
cwd,
{ isDirectory: true, prebuilt: false },
noop
);
const expectedFileList = toAbsolutePaths(cwd, ['foo.txt', 'sub/bar.txt']);
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
normalizeWindowsPaths(fileList).sort()
);
const expectedIgnoreList = ['.output'];
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
normalizeWindowsPaths(ignoreList).sort()
);
});
it('should find .output files but ignore other files when prebuilt=true', async () => {
const cwd = fixture('file-system-api');
const { fileList, ignoreList } = await buildFileTree(
cwd,
{ isDirectory: true, prebuilt: true },
noop
);
const expectedFileList = toAbsolutePaths(cwd, [
'.output/baz.txt',
'.output/sub/qux.txt',
]);
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
normalizeWindowsPaths(fileList).sort()
);
const expectedIgnoreList = ['foo.txt', 'sub'];
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
normalizeWindowsPaths(ignoreList).sort()
);
});
it('should find root files but ignore all .output files when prebuilt=false and rootDirectory=root', async () => {
const cwd = fixture('file-system-api-root-directory');
const { fileList, ignoreList } = await buildFileTree(
cwd,
{ isDirectory: true, prebuilt: false, rootDirectory: 'root' },
noop
);
const expectedFileList = toAbsolutePaths(cwd, [
'foo.txt',
'root/bar.txt',
'someother/bar.txt',
]);
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
normalizeWindowsPaths(fileList).sort()
);
const expectedIgnoreList = ['root/.output', 'someother/.output'];
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
normalizeWindowsPaths(ignoreList).sort()
);
});
it('should find root/.output files but ignore other files when prebuilt=true and rootDirectory=root', async () => {
const cwd = fixture('file-system-api-root-directory');
const { fileList, ignoreList } = await buildFileTree(
cwd,
{ isDirectory: true, prebuilt: true, rootDirectory: 'root' },
noop
);
const expectedFileList = toAbsolutePaths(cwd, [
'root/.output/baz.txt',
'root/.output/sub/qux.txt',
]);
expect(normalizeWindowsPaths(expectedFileList).sort()).toEqual(
normalizeWindowsPaths(fileList).sort()
);
const expectedIgnoreList = ['foo.txt', 'root/bar.txt', 'someother'];
expect(normalizeWindowsPaths(expectedIgnoreList).sort()).toEqual(
normalizeWindowsPaths(ignoreList).sort()
);
});
}); });

View File

@@ -1,6 +1,6 @@
{ {
"name": "vercel-plugin-middleware", "name": "vercel-plugin-middleware",
"version": "0.0.0-canary.15", "version": "0.0.0-canary.19",
"license": "MIT", "license": "MIT",
"main": "./dist/index", "main": "./dist/index",
"homepage": "", "homepage": "",
@@ -30,7 +30,7 @@
"@types/node-fetch": "^2", "@types/node-fetch": "^2",
"@types/ua-parser-js": "0.7.36", "@types/ua-parser-js": "0.7.36",
"@types/uuid": "8.3.1", "@types/uuid": "8.3.1",
"@vercel/build-utils": "2.12.3-canary.39", "@vercel/build-utils": "2.12.3-canary.42",
"@vercel/ncc": "0.24.0", "@vercel/ncc": "0.24.0",
"cookie": "0.4.1", "cookie": "0.4.1",
"formdata-node": "4.3.1", "formdata-node": "4.3.1",

View File

@@ -0,0 +1,52 @@
import path from 'path';
import * as esbuild from 'esbuild';
const processInjectFile = `
// envOverride is passed by esbuild plugin
const env = envOverride
function cwd() {
return '/'
}
function chdir(dir) {
throw new Error('process.chdir is not supported')
}
export const process = {
argv: [],
env,
chdir,
cwd,
};
`;
export function nodeProcessPolyfillPlugin({ env = {} } = {}): esbuild.Plugin {
return {
name: 'node-process-polyfill',
setup({ initialOptions, onResolve, onLoad }) {
onResolve({ filter: /_virtual-process-polyfill_\.js/ }, ({ path }) => {
return {
path,
sideEffects: false,
};
});
onLoad({ filter: /_virtual-process-polyfill_\.js/ }, () => {
const contents = `const envOverride = ${JSON.stringify(
env
)};\n${processInjectFile}`;
return {
loader: 'js',
contents,
};
});
const polyfills = [
path.resolve(__dirname, '_virtual-process-polyfill_.js'),
];
if (initialOptions.inject) {
initialOptions.inject.push(...polyfills);
} else {
initialOptions.inject = [...polyfills];
}
},
};
}

View File

@@ -17,6 +17,7 @@ import {
UrlWithParsedQuery, UrlWithParsedQuery,
} from 'url'; } from 'url';
import { toNodeHeaders } from './websandbox/utils'; import { toNodeHeaders } from './websandbox/utils';
import { nodeProcessPolyfillPlugin } from './esbuild-plugins';
const glob = util.promisify(libGlob); const glob = util.promisify(libGlob);
const SUPPORTED_EXTENSIONS = ['.js', '.ts']; const SUPPORTED_EXTENSIONS = ['.js', '.ts'];
@@ -80,6 +81,7 @@ export async function build({ workPath }: { workPath: string }) {
banner: { banner: {
js: '"use strict";', js: '"use strict";',
}, },
plugins: [nodeProcessPolyfillPlugin({ env: process.env })],
format: 'cjs', format: 'cjs',
}); });
// Create `_ENTRIES` wrapper // Create `_ENTRIES` wrapper

View File

@@ -15,6 +15,6 @@ Object {
"sortingIndex": 1, "sortingIndex": 1,
}, },
}, },
"version": 1, "version": 2,
} }
`; `;

View File

@@ -63,6 +63,32 @@ describe('build()', () => {
).toEqual('1'); ).toEqual('1');
}); });
it('should build simple middleware with env vars', async () => {
const expectedEnvVar = 'expected-env-var';
const fixture = join(__dirname, 'fixtures/env');
process.env.ENV_VAR_SHOULD_BE_DEFINED = expectedEnvVar;
await build({
workPath: fixture,
});
// env var should be inlined in the output
delete process.env.ENV_VAR_SHOULD_BE_DEFINED;
const outputFile = join(fixture, '.output/server/pages/_middleware.js');
expect(await fsp.stat(outputFile)).toBeTruthy();
require(outputFile);
//@ts-ignore
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
expect(typeof middleware).toStrictEqual('function');
const handledResponse = await middleware({
request: {},
});
expect(String(handledResponse.response.body)).toEqual(expectedEnvVar);
expect(
(handledResponse.response as Response).headers.get('x-middleware-next')
).toEqual(null);
});
it('should create a middleware that runs in strict mode', async () => { it('should create a middleware that runs in strict mode', async () => {
const { middleware } = await setupFixture('use-strict'); const { middleware } = await setupFixture('use-strict');
const response = await middleware({ const response = await middleware({

View File

@@ -0,0 +1,3 @@
export default req => {
return new Response(process.env.ENV_VAR_SHOULD_BE_DEFINED);
};

View File

@@ -1,7 +1,7 @@
{ {
"private": false, "private": false,
"name": "vercel-plugin-go", "name": "vercel-plugin-go",
"version": "1.0.0-canary.27", "version": "1.0.0-canary.30",
"main": "dist/index.js", "main": "dist/index.js",
"license": "MIT", "license": "MIT",
"files": [ "files": [
@@ -17,7 +17,7 @@
"prepublishOnly": "tsc" "prepublishOnly": "tsc"
}, },
"dependencies": { "dependencies": {
"@vercel/build-utils": "2.12.3-canary.39", "@vercel/build-utils": "2.12.3-canary.42",
"@vercel/go": "1.2.4-canary.4" "@vercel/go": "1.2.4-canary.4"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -1,6 +1,6 @@
{ {
"name": "vercel-plugin-node", "name": "vercel-plugin-node",
"version": "1.12.2-canary.31", "version": "1.12.2-canary.34",
"license": "MIT", "license": "MIT",
"main": "./dist/index", "main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js", "homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
@@ -34,7 +34,7 @@
"@types/node-fetch": "2", "@types/node-fetch": "2",
"@types/test-listen": "1.1.0", "@types/test-listen": "1.1.0",
"@types/yazl": "2.4.2", "@types/yazl": "2.4.2",
"@vercel/build-utils": "2.12.3-canary.39", "@vercel/build-utils": "2.12.3-canary.42",
"@vercel/fun": "1.0.3", "@vercel/fun": "1.0.3",
"@vercel/ncc": "0.24.0", "@vercel/ncc": "0.24.0",
"@vercel/nft": "0.14.0", "@vercel/nft": "0.14.0",

View File

@@ -1,7 +1,7 @@
{ {
"private": false, "private": false,
"name": "vercel-plugin-python", "name": "vercel-plugin-python",
"version": "1.0.0-canary.28", "version": "1.0.0-canary.31",
"main": "dist/index.js", "main": "dist/index.js",
"license": "MIT", "license": "MIT",
"files": [ "files": [
@@ -17,8 +17,8 @@
"prepublishOnly": "tsc" "prepublishOnly": "tsc"
}, },
"dependencies": { "dependencies": {
"@vercel/build-utils": "2.12.3-canary.39", "@vercel/build-utils": "2.12.3-canary.42",
"@vercel/python": "2.1.2-canary.1" "@vercel/python": "2.1.2-canary.2"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "*", "@types/node": "*",

View File

@@ -1,7 +1,7 @@
{ {
"private": false, "private": false,
"name": "vercel-plugin-ruby", "name": "vercel-plugin-ruby",
"version": "1.0.0-canary.27", "version": "1.0.0-canary.30",
"main": "dist/index.js", "main": "dist/index.js",
"license": "MIT", "license": "MIT",
"files": [ "files": [
@@ -17,7 +17,7 @@
"prepublishOnly": "tsc" "prepublishOnly": "tsc"
}, },
"dependencies": { "dependencies": {
"@vercel/build-utils": "2.12.3-canary.39", "@vercel/build-utils": "2.12.3-canary.42",
"@vercel/ruby": "1.2.10-canary.0" "@vercel/ruby": "1.2.10-canary.0"
}, },
"devDependencies": { "devDependencies": {

View File

@@ -1,6 +1,6 @@
{ {
"name": "@vercel/python", "name": "@vercel/python",
"version": "2.1.2-canary.1", "version": "2.1.2-canary.2",
"main": "./dist/index.js", "main": "./dist/index.js",
"license": "MIT", "license": "MIT",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python", "homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",

View File

@@ -1,4 +1,3 @@
import { relative, basename } from 'path';
import execa from 'execa'; import execa from 'execa';
import { Meta, debug } from '@vercel/build-utils'; import { Meta, debug } from '@vercel/build-utils';
@@ -136,17 +135,10 @@ export async function installRequirementsFile({
meta, meta,
args = [], args = [],
}: InstallRequirementsFileArg) { }: InstallRequirementsFileArg) {
const fileAtRoot = relative(workPath, filePath) === basename(filePath); // The Vercel platform already handles `requirements.txt` for frontend projects,
// but the installation logic there is different, because it seems to install all
// If the `requirements.txt` file is located in the Root Directory of the project and // of the dependencies globally, whereas, for this Runtime, we want it to happen only
// the new File System API is used (`avoidTopLevelInstall`), the Install Command // locally, so we'll run a separate installation.
// will have already installed its dependencies, so we don't need to do it again.
if (meta.avoidTopLevelInstall && fileAtRoot) {
debug(
`Skipping requirements file installation, already installed by Install Command`
);
return;
}
if ( if (
meta.isDev && meta.isDev &&