mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
37 Commits
@vercel/bu
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4bf6295d7a | ||
|
|
a4001ce10b | ||
|
|
2df3432d88 | ||
|
|
bcfc19de12 | ||
|
|
04381c669b | ||
|
|
0c7b54edad | ||
|
|
6d42816395 | ||
|
|
6fe6d05a42 | ||
|
|
50a201f145 | ||
|
|
701a02ae9d | ||
|
|
39f7586621 | ||
|
|
c4a39c8d29 | ||
|
|
3ac238cf08 | ||
|
|
8384813a0d | ||
|
|
c4587de439 | ||
|
|
d997dc4fbc | ||
|
|
d15b90bd4d | ||
|
|
5b31297f0c | ||
|
|
e232566cbe | ||
|
|
592689cad1 | ||
|
|
9b08e72f76 | ||
|
|
bd0e10cfe7 | ||
|
|
28436ade60 | ||
|
|
de0d2fba0b | ||
|
|
e0900128d6 | ||
|
|
8d15f30579 | ||
|
|
960c66584c | ||
|
|
1c8f91031a | ||
|
|
68cb23c3cc | ||
|
|
94f6ae2595 | ||
|
|
b92aeac84d | ||
|
|
00420b7a01 | ||
|
|
a5128790d0 | ||
|
|
ae9aa91f4f | ||
|
|
d4cef69cc9 | ||
|
|
323f67c31a | ||
|
|
63c499a826 |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "esnext",
|
||||
"target": "ES2020",
|
||||
"skipLibCheck": true,
|
||||
"strict": false,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
|
||||
@@ -22,8 +22,5 @@
|
||||
"@types/react-dom": "^17.0.9",
|
||||
"typescript": "^4.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "14.x"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"build": {
|
||||
"env": {
|
||||
"ENABLE_FILE_SYSTEM_API": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "3.1.1",
|
||||
"version": "4.1.1-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -23,7 +23,7 @@
|
||||
"@types/cross-spawn": "6.0.0",
|
||||
"@types/end-of-stream": "^1.4.0",
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/glob": "7.2.0",
|
||||
"@types/jest": "27.4.1",
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/ms": "0.7.31",
|
||||
@@ -31,7 +31,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "2.4.2",
|
||||
"@vercel/frameworks": "1.0.0",
|
||||
"@vercel/frameworks": "1.0.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
@@ -40,7 +40,7 @@
|
||||
"cross-spawn": "6.0.5",
|
||||
"end-of-stream": "1.4.1",
|
||||
"fs-extra": "10.0.0",
|
||||
"glob": "7.1.3",
|
||||
"glob": "8.0.3",
|
||||
"into-stream": "5.0.0",
|
||||
"js-yaml": "3.13.1",
|
||||
"minimatch": "3.0.4",
|
||||
|
||||
@@ -23,7 +23,7 @@ interface ErrorResponse {
|
||||
}
|
||||
|
||||
interface Options {
|
||||
tag?: 'canary' | 'latest' | string;
|
||||
tag?: string;
|
||||
functions?: BuilderFunctions;
|
||||
ignoreBuildScript?: boolean;
|
||||
projectSettings?: ProjectSettings;
|
||||
@@ -278,7 +278,7 @@ export async function detectBuilders(
|
||||
// and package.json can be served as static files
|
||||
frontendBuilder = {
|
||||
use: '@vercel/static',
|
||||
src: '!{api/**,package.json}',
|
||||
src: '!{api/**,package.json,middleware.[jt]s}',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
@@ -355,7 +355,16 @@ function maybeGetApiBuilder(
|
||||
apiMatches: Builder[],
|
||||
options: Options
|
||||
) {
|
||||
if (!fileName.startsWith('api/')) {
|
||||
const middleware =
|
||||
fileName === 'middleware.js' || fileName === 'middleware.ts';
|
||||
|
||||
// Root-level Middleware file is handled by `@vercel/next`, so don't
|
||||
// schedule a separate Builder when "nextjs" framework is selected
|
||||
if (middleware && options.projectSettings?.framework === 'nextjs') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!(fileName.startsWith('api/') || middleware)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -381,7 +390,7 @@ function maybeGetApiBuilder(
|
||||
|
||||
const { fnPattern, func } = getFunction(fileName, options);
|
||||
|
||||
const use = (func && func.runtime) || (match && match.use);
|
||||
const use = func?.runtime || match?.use;
|
||||
|
||||
if (!use) {
|
||||
return null;
|
||||
@@ -389,6 +398,10 @@ function maybeGetApiBuilder(
|
||||
|
||||
const config: Config = { zeroConfig: true };
|
||||
|
||||
if (middleware) {
|
||||
config.middleware = true;
|
||||
}
|
||||
|
||||
if (fnPattern && func) {
|
||||
config.functions = { [fnPattern]: func };
|
||||
|
||||
@@ -428,6 +441,7 @@ function getApiMatches() {
|
||||
const config = { zeroConfig: true };
|
||||
|
||||
return [
|
||||
{ src: 'middleware.[jt]s', use: `@vercel/node`, config },
|
||||
{ src: 'api/**/*.js', use: `@vercel/node`, config },
|
||||
{ src: 'api/**/*.mjs', use: `@vercel/node`, config },
|
||||
{ src: 'api/**/*.ts', use: `@vercel/node`, config },
|
||||
|
||||
85
packages/build-utils/src/fs/get-glob-fs.ts
Normal file
85
packages/build-utils/src/fs/get-glob-fs.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import fs from 'fs';
|
||||
import { DetectorFilesystem } from '../detectors/filesystem';
|
||||
|
||||
type GlobFs = typeof fs;
|
||||
|
||||
function normalizePath(path: string) {
|
||||
// on windows, this will return a path like
|
||||
// D:/c/package.json
|
||||
// since we abstract the filesystem, we need to remove windows specific info from the path
|
||||
// and let the FS decide how to process the path
|
||||
// D:/c/package.json => /c/package.json
|
||||
return path.replace(/^[a-zA-Z]:/, '');
|
||||
}
|
||||
|
||||
export function getGlobFs(_fs: DetectorFilesystem): GlobFs {
|
||||
const readdir = (
|
||||
path: fs.PathLike,
|
||||
callback: (err: NodeJS.ErrnoException | null, files: string[]) => void
|
||||
): void => {
|
||||
_fs
|
||||
.readdir(normalizePath(String(path)))
|
||||
.then(stats =>
|
||||
callback(
|
||||
null,
|
||||
stats.map(stat => stat.name)
|
||||
)
|
||||
)
|
||||
.catch(err => callback(err, []));
|
||||
};
|
||||
|
||||
const stat = (
|
||||
path: fs.PathLike,
|
||||
callback: (
|
||||
err: NodeJS.ErrnoException | null,
|
||||
stats: fs.Stats | null
|
||||
) => void
|
||||
): void => {
|
||||
_fs
|
||||
.isFile(normalizePath(String(path)))
|
||||
.then(isPathAFile => {
|
||||
callback(null, {
|
||||
ino: 0,
|
||||
mode: 0,
|
||||
nlink: 0,
|
||||
uid: 0,
|
||||
gid: 0,
|
||||
rdev: 0,
|
||||
size: 0,
|
||||
blksize: 0,
|
||||
blocks: 0,
|
||||
atimeMs: 0,
|
||||
mtimeMs: 0,
|
||||
ctimeMs: 0,
|
||||
birthtimeMs: 0,
|
||||
atime: new Date(),
|
||||
mtime: new Date(),
|
||||
ctime: new Date(),
|
||||
birthtime: new Date(),
|
||||
dev: 0,
|
||||
isBlockDevice: () => false,
|
||||
isCharacterDevice: () => false,
|
||||
isDirectory: () => !isPathAFile,
|
||||
isFIFO: () => false,
|
||||
isFile: () => isPathAFile,
|
||||
isSocket: () => false,
|
||||
isSymbolicLink: () => false,
|
||||
});
|
||||
})
|
||||
.catch(err => callback(err, null));
|
||||
};
|
||||
|
||||
return new Proxy(fs, {
|
||||
get(_target, prop) {
|
||||
switch (prop) {
|
||||
case 'readdir':
|
||||
return readdir;
|
||||
case 'lstat':
|
||||
case 'stat':
|
||||
return stat;
|
||||
default:
|
||||
throw new Error('Not Implemented');
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import Sema from 'async-sema';
|
||||
import spawn from 'cross-spawn';
|
||||
import { coerce, intersects, validRange } from 'semver';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { deprecate } from 'util';
|
||||
import debug from '../debug';
|
||||
@@ -219,9 +220,9 @@ export async function getNodeVersion(
|
||||
config: Config = {},
|
||||
meta: Meta = {}
|
||||
): Promise<NodeVersion> {
|
||||
const latest = getLatestNodeVersion();
|
||||
if (meta && meta.isDev) {
|
||||
// Use the system-installed version of `node` in PATH for `vercel dev`
|
||||
const latest = getLatestNodeVersion();
|
||||
return { ...latest, runtime: 'nodejs' };
|
||||
}
|
||||
const { packageJson } = await scanParentDirs(destPath, true);
|
||||
@@ -229,10 +230,27 @@ export async function getNodeVersion(
|
||||
let isAuto = true;
|
||||
if (packageJson && packageJson.engines && packageJson.engines.node) {
|
||||
const { node } = packageJson.engines;
|
||||
if (nodeVersion && nodeVersion !== node && !meta.isDev) {
|
||||
if (
|
||||
nodeVersion &&
|
||||
validRange(node) &&
|
||||
!intersects(nodeVersion, node) &&
|
||||
!meta.isDev
|
||||
) {
|
||||
console.warn(
|
||||
`Warning: Due to "engines": { "node": "${node}" } in your \`package.json\` file, the Node.js Version defined in your Project Settings ("${nodeVersion}") will not apply. Learn More: http://vercel.link/node-version`
|
||||
);
|
||||
} else if (coerce(node)?.raw === node && !meta.isDev) {
|
||||
console.warn(
|
||||
`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` with major.minor.patch, but only major Node.js Version can be selected. Learn More: http://vercel.link/node-version`
|
||||
);
|
||||
} else if (
|
||||
validRange(node) &&
|
||||
intersects(`${latest.major + 1}.x`, node) &&
|
||||
!meta.isDev
|
||||
) {
|
||||
console.warn(
|
||||
`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` that will automatically upgrade when a new major Node.js Version is released. Learn More: http://vercel.link/node-version`
|
||||
);
|
||||
}
|
||||
nodeVersion = node;
|
||||
isAuto = false;
|
||||
|
||||
60
packages/build-utils/src/get-project-paths.ts
Normal file
60
packages/build-utils/src/get-project-paths.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { detectFramework } from './detect-framework';
|
||||
import { DetectorFilesystem } from './detectors/filesystem';
|
||||
import frameworks from '@vercel/frameworks';
|
||||
|
||||
const MAX_DEPTH_TRAVERSE = 3;
|
||||
|
||||
export interface GetProjectPathsOptions {
|
||||
fs: DetectorFilesystem;
|
||||
path?: string;
|
||||
skipPaths?: string[];
|
||||
depth?: number;
|
||||
}
|
||||
|
||||
export type ProjectPath = string;
|
||||
|
||||
export const getProjectPaths = async ({
|
||||
fs,
|
||||
path,
|
||||
skipPaths,
|
||||
depth = MAX_DEPTH_TRAVERSE,
|
||||
}: GetProjectPathsOptions): Promise<ProjectPath[]> => {
|
||||
if (depth === 0) return [];
|
||||
|
||||
const allPaths: Array<ProjectPath> = [];
|
||||
const topPath: string = path ?? './';
|
||||
|
||||
if (path && skipPaths?.includes(path)) {
|
||||
return allPaths;
|
||||
}
|
||||
const framework = await detectFramework({
|
||||
fs: fs.chdir(topPath),
|
||||
frameworkList: frameworks,
|
||||
});
|
||||
|
||||
if (framework !== null) allPaths.push(topPath);
|
||||
|
||||
if (depth > 1) {
|
||||
const directoryContents = await fs.readdir(topPath);
|
||||
const childDirectories = directoryContents.filter(
|
||||
stat => stat.type === 'dir' && !skipPaths?.includes(stat.path)
|
||||
);
|
||||
|
||||
const paths = (
|
||||
await Promise.all(
|
||||
childDirectories.map(({ path }) => {
|
||||
return getProjectPaths({
|
||||
fs,
|
||||
path,
|
||||
depth: depth - 1,
|
||||
skipPaths,
|
||||
});
|
||||
})
|
||||
)
|
||||
).flat();
|
||||
|
||||
return [...paths, ...allPaths];
|
||||
}
|
||||
|
||||
return allPaths;
|
||||
};
|
||||
@@ -88,6 +88,7 @@ export {
|
||||
} from './detect-builders';
|
||||
export { detectFileSystemAPI } from './detect-file-system-api';
|
||||
export { detectFramework } from './detect-framework';
|
||||
export { getProjectPaths } from './get-project-paths';
|
||||
export { DetectorFilesystem } from './detectors/filesystem';
|
||||
export { readConfigFile } from './fs/read-config-file';
|
||||
export { normalizePath } from './fs/normalize-path';
|
||||
@@ -117,5 +118,14 @@ export const isStaticRuntime = (name?: string): boolean => {
|
||||
};
|
||||
|
||||
export { workspaceManagers } from './workspaces/workspace-managers';
|
||||
export { getWorkspaces } from './workspaces/get-workspaces';
|
||||
export {
|
||||
getWorkspaces,
|
||||
GetWorkspaceOptions,
|
||||
Workspace,
|
||||
WorkspaceType,
|
||||
} from './workspaces/get-workspaces';
|
||||
export {
|
||||
getWorkspacePackagePaths,
|
||||
GetWorkspacePackagePathsOptions,
|
||||
} from './workspaces/get-workspace-package-paths';
|
||||
export { monorepoManagers } from './monorepos/monorepo-managers';
|
||||
|
||||
@@ -82,7 +82,7 @@ export interface BuildOptions {
|
||||
* is the Git Repository Root. This is only relevant for Monorepos.
|
||||
* See https://vercel.com/blog/monorepos
|
||||
*/
|
||||
repoRootPath?: string;
|
||||
repoRootPath: string;
|
||||
|
||||
/**
|
||||
* An arbitrary object passed by the user in the build definition defined
|
||||
@@ -123,7 +123,7 @@ export interface PrepareCacheOptions {
|
||||
* is the Git Repository Root. This is only relevant for Monorepos.
|
||||
* See https://vercel.com/blog/monorepos
|
||||
*/
|
||||
repoRootPath?: string;
|
||||
repoRootPath: string;
|
||||
|
||||
/**
|
||||
* An arbitrary object passed by the user in the build definition defined
|
||||
@@ -295,6 +295,7 @@ export interface PackageJson {
|
||||
readonly preferGlobal?: boolean;
|
||||
readonly private?: boolean;
|
||||
readonly publishConfig?: PackageJson.PublishConfig;
|
||||
readonly packageManager?: string;
|
||||
}
|
||||
|
||||
export interface NodeVersion {
|
||||
@@ -427,7 +428,9 @@ export interface BuildResultV2Typical {
|
||||
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
|
||||
|
||||
export interface BuildResultV3 {
|
||||
output: Lambda;
|
||||
// TODO: use proper `Route` type from `routing-utils` (perhaps move types to a common package)
|
||||
routes?: any[];
|
||||
output: Lambda | EdgeFunction;
|
||||
}
|
||||
|
||||
export type BuildV2 = (options: BuildOptions) => Promise<BuildResultV2>;
|
||||
|
||||
@@ -0,0 +1,113 @@
|
||||
import _path from 'path';
|
||||
import yaml from 'js-yaml';
|
||||
import glob from 'glob';
|
||||
import { DetectorFilesystem } from '../detectors/filesystem';
|
||||
import { Workspace } from './get-workspaces';
|
||||
import { getGlobFs } from '../fs/get-glob-fs';
|
||||
import { normalizePath } from '../fs/normalize-path';
|
||||
|
||||
const posixPath = _path.posix;
|
||||
|
||||
interface GetPackagePathOptions {
|
||||
fs: DetectorFilesystem;
|
||||
}
|
||||
|
||||
export interface GetWorkspacePackagePathsOptions extends GetPackagePathOptions {
|
||||
fs: DetectorFilesystem;
|
||||
workspace: Workspace;
|
||||
}
|
||||
|
||||
export async function getWorkspacePackagePaths({
|
||||
fs,
|
||||
workspace,
|
||||
}: GetWorkspacePackagePathsOptions): Promise<string[]> {
|
||||
const { type, rootPath } = workspace;
|
||||
const workspaceFs = fs.chdir(rootPath);
|
||||
|
||||
let results: string[] = [];
|
||||
|
||||
switch (type) {
|
||||
case 'yarn':
|
||||
case 'npm':
|
||||
results = await getPackageJsonWorkspacePackagePaths({ fs: workspaceFs });
|
||||
break;
|
||||
case 'pnpm':
|
||||
results = await getPnpmWorkspacePackagePaths({ fs: workspaceFs });
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown workspace implementation: ${type}`);
|
||||
}
|
||||
|
||||
return results.map(packagePath => {
|
||||
return posixPath.join(rootPath, posixPath.dirname(packagePath));
|
||||
});
|
||||
}
|
||||
|
||||
type PackageJsonWithWorkspace = {
|
||||
workspaces?:
|
||||
| {
|
||||
packages?: string[];
|
||||
noHoist?: string[];
|
||||
}
|
||||
| string[];
|
||||
};
|
||||
|
||||
type PnpmWorkspaces = {
|
||||
packages?: string[];
|
||||
};
|
||||
|
||||
async function getPackagePaths(
|
||||
packages: string[],
|
||||
fs: DetectorFilesystem
|
||||
): Promise<string[]> {
|
||||
return (
|
||||
await Promise.all(
|
||||
packages.map(
|
||||
packageGlob =>
|
||||
new Promise<string[]>((resolve, reject) => {
|
||||
glob(
|
||||
normalizePath(posixPath.join(packageGlob, 'package.json')),
|
||||
{
|
||||
cwd: '/',
|
||||
fs: getGlobFs(fs),
|
||||
},
|
||||
(err, matches) => {
|
||||
if (err) reject(err);
|
||||
else resolve(matches);
|
||||
}
|
||||
);
|
||||
})
|
||||
)
|
||||
)
|
||||
).flat();
|
||||
}
|
||||
|
||||
async function getPackageJsonWorkspacePackagePaths({
|
||||
fs,
|
||||
}: GetPackagePathOptions): Promise<string[]> {
|
||||
const packageJsonAsBuffer = await fs.readFile('package.json');
|
||||
const { workspaces } = JSON.parse(
|
||||
packageJsonAsBuffer.toString()
|
||||
) as PackageJsonWithWorkspace;
|
||||
|
||||
let packages: string[] = [];
|
||||
|
||||
if (Array.isArray(workspaces)) {
|
||||
packages = workspaces;
|
||||
} else {
|
||||
packages = workspaces?.packages ?? [];
|
||||
}
|
||||
|
||||
return getPackagePaths(packages, fs);
|
||||
}
|
||||
|
||||
async function getPnpmWorkspacePackagePaths({
|
||||
fs,
|
||||
}: GetPackagePathOptions): Promise<string[]> {
|
||||
const pnpmWorkspaceAsBuffer = await fs.readFile('pnpm-workspace.yaml');
|
||||
const { packages = [] } = yaml.load(
|
||||
pnpmWorkspaceAsBuffer.toString()
|
||||
) as PnpmWorkspaces;
|
||||
|
||||
return getPackagePaths(packages, fs);
|
||||
}
|
||||
@@ -36,7 +36,7 @@ export async function getWorkspaces({
|
||||
const childDirectories = directoryContents.filter(
|
||||
stat => stat.type === 'dir'
|
||||
);
|
||||
|
||||
|
||||
return (
|
||||
await Promise.all(
|
||||
childDirectories.map(childDirectory =>
|
||||
|
||||
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/a/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/a/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "a",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.2"
|
||||
}
|
||||
}
|
||||
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/b/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/b/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "b",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"cowsay": "^1.5.0"
|
||||
}
|
||||
}
|
||||
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/a/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/a/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "a",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.2"
|
||||
}
|
||||
}
|
||||
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/b/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/b/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "b",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"cowsay": "^1.5.0"
|
||||
}
|
||||
}
|
||||
251
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/pnpm-lock.yaml
generated
vendored
251
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/pnpm-lock.yaml
generated
vendored
@@ -1,19 +1,260 @@
|
||||
lockfileVersion: 5.3
|
||||
lockfileVersion: 5.4
|
||||
|
||||
specifiers:
|
||||
once: ^1.4.0
|
||||
importers:
|
||||
|
||||
dependencies:
|
||||
once: 1.4.0
|
||||
.:
|
||||
specifiers:
|
||||
once: ^1.4.0
|
||||
dependencies:
|
||||
once: 1.4.0
|
||||
|
||||
a:
|
||||
specifiers:
|
||||
debug: ^4.3.2
|
||||
dependencies:
|
||||
debug: 4.3.4
|
||||
|
||||
b:
|
||||
specifiers:
|
||||
cowsay: ^1.5.0
|
||||
dependencies:
|
||||
cowsay: 1.5.0
|
||||
|
||||
packages:
|
||||
|
||||
/ansi-regex/3.0.1:
|
||||
resolution: {integrity: sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==}
|
||||
engines: {node: '>=4'}
|
||||
dev: false
|
||||
|
||||
/ansi-regex/5.0.1:
|
||||
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/ansi-styles/4.3.0:
|
||||
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
color-convert: 2.0.1
|
||||
dev: false
|
||||
|
||||
/camelcase/5.3.1:
|
||||
resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==}
|
||||
engines: {node: '>=6'}
|
||||
dev: false
|
||||
|
||||
/cliui/6.0.0:
|
||||
resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==}
|
||||
dependencies:
|
||||
string-width: 4.2.3
|
||||
strip-ansi: 6.0.1
|
||||
wrap-ansi: 6.2.0
|
||||
dev: false
|
||||
|
||||
/color-convert/2.0.1:
|
||||
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
|
||||
engines: {node: '>=7.0.0'}
|
||||
dependencies:
|
||||
color-name: 1.1.4
|
||||
dev: false
|
||||
|
||||
/color-name/1.1.4:
|
||||
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
|
||||
dev: false
|
||||
|
||||
/cowsay/1.5.0:
|
||||
resolution: {integrity: sha512-8Ipzr54Z8zROr/62C8f0PdhQcDusS05gKTS87xxdji8VbWefWly0k8BwGK7+VqamOrkv3eGsCkPtvlHzrhWsCA==}
|
||||
engines: {node: '>= 4'}
|
||||
hasBin: true
|
||||
dependencies:
|
||||
get-stdin: 8.0.0
|
||||
string-width: 2.1.1
|
||||
strip-final-newline: 2.0.0
|
||||
yargs: 15.4.1
|
||||
dev: false
|
||||
|
||||
/debug/4.3.4:
|
||||
resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==}
|
||||
engines: {node: '>=6.0'}
|
||||
peerDependencies:
|
||||
supports-color: '*'
|
||||
peerDependenciesMeta:
|
||||
supports-color:
|
||||
optional: true
|
||||
dependencies:
|
||||
ms: 2.1.2
|
||||
dev: false
|
||||
|
||||
/decamelize/1.2.0:
|
||||
resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
dev: false
|
||||
|
||||
/emoji-regex/8.0.0:
|
||||
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
|
||||
dev: false
|
||||
|
||||
/find-up/4.1.0:
|
||||
resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
locate-path: 5.0.0
|
||||
path-exists: 4.0.0
|
||||
dev: false
|
||||
|
||||
/get-caller-file/2.0.5:
|
||||
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
|
||||
engines: {node: 6.* || 8.* || >= 10.*}
|
||||
dev: false
|
||||
|
||||
/get-stdin/8.0.0:
|
||||
resolution: {integrity: sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==}
|
||||
engines: {node: '>=10'}
|
||||
dev: false
|
||||
|
||||
/is-fullwidth-code-point/2.0.0:
|
||||
resolution: {integrity: sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==}
|
||||
engines: {node: '>=4'}
|
||||
dev: false
|
||||
|
||||
/is-fullwidth-code-point/3.0.0:
|
||||
resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/locate-path/5.0.0:
|
||||
resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
p-locate: 4.1.0
|
||||
dev: false
|
||||
|
||||
/ms/2.1.2:
|
||||
resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
|
||||
dev: false
|
||||
|
||||
/once/1.4.0:
|
||||
resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
|
||||
dependencies:
|
||||
wrappy: 1.0.2
|
||||
dev: false
|
||||
|
||||
/p-limit/2.3.0:
|
||||
resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==}
|
||||
engines: {node: '>=6'}
|
||||
dependencies:
|
||||
p-try: 2.2.0
|
||||
dev: false
|
||||
|
||||
/p-locate/4.1.0:
|
||||
resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
p-limit: 2.3.0
|
||||
dev: false
|
||||
|
||||
/p-try/2.2.0:
|
||||
resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==}
|
||||
engines: {node: '>=6'}
|
||||
dev: false
|
||||
|
||||
/path-exists/4.0.0:
|
||||
resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/require-directory/2.1.1:
|
||||
resolution: {integrity: sha1-jGStX9MNqxyXbiNE/+f3kqam30I=}
|
||||
engines: {node: '>=0.10.0'}
|
||||
dev: false
|
||||
|
||||
/require-main-filename/2.0.0:
|
||||
resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==}
|
||||
dev: false
|
||||
|
||||
/set-blocking/2.0.0:
|
||||
resolution: {integrity: sha1-BF+XgtARrppoA93TgrJDkrPYkPc=}
|
||||
dev: false
|
||||
|
||||
/string-width/2.1.1:
|
||||
resolution: {integrity: sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==}
|
||||
engines: {node: '>=4'}
|
||||
dependencies:
|
||||
is-fullwidth-code-point: 2.0.0
|
||||
strip-ansi: 4.0.0
|
||||
dev: false
|
||||
|
||||
/string-width/4.2.3:
|
||||
resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
emoji-regex: 8.0.0
|
||||
is-fullwidth-code-point: 3.0.0
|
||||
strip-ansi: 6.0.1
|
||||
dev: false
|
||||
|
||||
/strip-ansi/4.0.0:
|
||||
resolution: {integrity: sha1-qEeQIusaw2iocTibY1JixQXuNo8=}
|
||||
engines: {node: '>=4'}
|
||||
dependencies:
|
||||
ansi-regex: 3.0.1
|
||||
dev: false
|
||||
|
||||
/strip-ansi/6.0.1:
|
||||
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
ansi-regex: 5.0.1
|
||||
dev: false
|
||||
|
||||
/strip-final-newline/2.0.0:
|
||||
resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==}
|
||||
engines: {node: '>=6'}
|
||||
dev: false
|
||||
|
||||
/which-module/2.0.0:
|
||||
resolution: {integrity: sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=}
|
||||
dev: false
|
||||
|
||||
/wrap-ansi/6.2.0:
|
||||
resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
ansi-styles: 4.3.0
|
||||
string-width: 4.2.3
|
||||
strip-ansi: 6.0.1
|
||||
dev: false
|
||||
|
||||
/wrappy/1.0.2:
|
||||
resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
|
||||
dev: false
|
||||
|
||||
/y18n/4.0.3:
|
||||
resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==}
|
||||
dev: false
|
||||
|
||||
/yargs-parser/18.1.3:
|
||||
resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==}
|
||||
engines: {node: '>=6'}
|
||||
dependencies:
|
||||
camelcase: 5.3.1
|
||||
decamelize: 1.2.0
|
||||
dev: false
|
||||
|
||||
/yargs/15.4.1:
|
||||
resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
cliui: 6.0.0
|
||||
decamelize: 1.2.0
|
||||
find-up: 4.1.0
|
||||
get-caller-file: 2.0.5
|
||||
require-directory: 2.1.1
|
||||
require-main-filename: 2.0.0
|
||||
set-blocking: 2.0.0
|
||||
string-width: 4.2.3
|
||||
which-module: 2.0.0
|
||||
y18n: 4.0.3
|
||||
yargs-parser: 18.1.3
|
||||
dev: false
|
||||
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-three",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-one",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-one",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-three",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "a",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "b",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"cowsay": "^1.5.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "21-npm-workspaces",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"workspaces": [
|
||||
"a",
|
||||
"b"
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-three",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "backend",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-one",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-two",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"name": "app-one",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC"
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-one",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-two",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "frontend",
|
||||
"license": "MIT",
|
||||
"version": "0.1.0"
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "app-one",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"next": "^4.3.2"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": "16.14.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
@@ -78,7 +78,7 @@ describe('Test `detectBuilders`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/users.js');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
});
|
||||
@@ -89,7 +89,7 @@ describe('Test `detectBuilders`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/[endpoint].js');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
});
|
||||
@@ -144,7 +144,7 @@ describe('Test `detectBuilders`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/endpoint.js');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
});
|
||||
|
||||
@@ -347,7 +347,7 @@ describe('Test `detectBuilders`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/index.ts');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
});
|
||||
|
||||
it('functions with nextjs', async () => {
|
||||
@@ -1010,7 +1010,7 @@ describe('Test `detectBuilders` with `featHandleMiss=true`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/users.js');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
|
||||
@@ -1032,7 +1032,7 @@ describe('Test `detectBuilders` with `featHandleMiss=true`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/[endpoint].js');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
});
|
||||
@@ -1258,7 +1258,7 @@ describe('Test `detectBuilders` with `featHandleMiss=true`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/endpoint.js');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
|
||||
expect(defaultRoutes!.length).toBe(2);
|
||||
@@ -1288,7 +1288,7 @@ describe('Test `detectBuilders` with `featHandleMiss=true`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/version.js');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
|
||||
expect(defaultRoutes!.length).toBe(2);
|
||||
@@ -1567,7 +1567,7 @@ describe('Test `detectBuilders` with `featHandleMiss=true`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('api/index.ts');
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(errorRoutes!.length).toBe(1);
|
||||
expect((errorRoutes![0] as Source).status).toBe(404);
|
||||
});
|
||||
@@ -2228,6 +2228,55 @@ describe('Test `detectBuilders` with `featHandleMiss=true`', () => {
|
||||
expect(builders).toBe(null);
|
||||
expect(errors).toBe(null);
|
||||
});
|
||||
|
||||
it('no package.json + no build + root-level "middleware.js"', async () => {
|
||||
const files = ['middleware.js', 'index.html', 'web/middleware.js'];
|
||||
const { builders, errors } = await detectBuilders(files, null, {
|
||||
featHandleMiss,
|
||||
});
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('middleware.js');
|
||||
expect(builders![0].config?.middleware).toEqual(true);
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
});
|
||||
|
||||
it('no package.json + no build + root-level "middleware.ts"', async () => {
|
||||
const files = ['middleware.ts', 'index.html', 'web/middleware.js'];
|
||||
const { builders, errors } = await detectBuilders(files, null, {
|
||||
featHandleMiss,
|
||||
});
|
||||
expect(builders![0].use).toBe('@vercel/node');
|
||||
expect(builders![0].src).toBe('middleware.ts');
|
||||
expect(builders![0].config?.middleware).toEqual(true);
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
expect(builders![1].src).toBe('!{api/**,package.json,middleware.[jt]s}');
|
||||
expect(builders!.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
});
|
||||
|
||||
it('should not add middleware builder when "nextjs" framework is selected', async () => {
|
||||
const files = ['package.json', 'pages/index.ts', 'middleware.ts'];
|
||||
const projectSettings = {
|
||||
framework: 'nextjs',
|
||||
};
|
||||
const { builders } = await detectBuilders(files, null, {
|
||||
projectSettings,
|
||||
featHandleMiss,
|
||||
});
|
||||
expect(builders).toEqual([
|
||||
{
|
||||
use: '@vercel/next',
|
||||
src: 'package.json',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
framework: projectSettings.framework,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
it('Test `detectRoutes`', async () => {
|
||||
|
||||
70
packages/build-utils/test/unit.get-project-paths.test.ts
vendored
Normal file
70
packages/build-utils/test/unit.get-project-paths.test.ts
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
import path from 'path';
|
||||
import { normalizePath } from '../src';
|
||||
import { getProjectPaths, ProjectPath } from '../src/get-project-paths';
|
||||
import { FixtureFilesystem } from './utils/fixture-filesystem';
|
||||
|
||||
describe.each<{
|
||||
fixturePath: string;
|
||||
resultPaths: ProjectPath[];
|
||||
skipPaths?: ProjectPath[];
|
||||
readdirCalls: number;
|
||||
}>([
|
||||
{
|
||||
fixturePath: '32-monorepo-highly-nested',
|
||||
resultPaths: [],
|
||||
readdirCalls: 2,
|
||||
},
|
||||
{
|
||||
fixturePath: '33-hybrid-monorepo',
|
||||
resultPaths: ['backend/app-three'],
|
||||
readdirCalls: 2,
|
||||
skipPaths: ['frontend'],
|
||||
},
|
||||
{
|
||||
fixturePath: '34-monorepo-no-workspaces',
|
||||
resultPaths: ['backend/app-three', 'frontend/app-one', 'frontend/app-two'],
|
||||
readdirCalls: 3,
|
||||
},
|
||||
{
|
||||
fixturePath: '35-no-monorepo',
|
||||
resultPaths: [],
|
||||
readdirCalls: 1,
|
||||
},
|
||||
{
|
||||
fixturePath: '36-monorepo-some-nested',
|
||||
resultPaths: ['frontend/app-two'],
|
||||
readdirCalls: 2,
|
||||
},
|
||||
{
|
||||
fixturePath: '37-project-depth-one-level',
|
||||
resultPaths: ['./'],
|
||||
readdirCalls: 1,
|
||||
},
|
||||
])(
|
||||
'`getProjectPaths()`',
|
||||
({ resultPaths, readdirCalls, fixturePath, skipPaths }) => {
|
||||
const testName =
|
||||
resultPaths.length > 0
|
||||
? `should detect ${resultPaths.join()} project${
|
||||
resultPaths.length > 1 ? 's' : ''
|
||||
} for ${fixturePath}`
|
||||
: `should not detect any path for ${fixturePath}`;
|
||||
|
||||
it(testName, async () => {
|
||||
const fixture = path.join(
|
||||
__dirname,
|
||||
'non-deployed-fixtures',
|
||||
fixturePath
|
||||
);
|
||||
const fs = new FixtureFilesystem(fixture);
|
||||
const mockReaddir = jest.fn().mockImplementation(fs.readdir);
|
||||
const mockHasPath = jest.fn().mockImplementation(fs.hasPath);
|
||||
fs.readdir = mockReaddir;
|
||||
fs.hasPath = mockHasPath;
|
||||
const actualPaths = await getProjectPaths({ fs, skipPaths });
|
||||
const normalizedPaths = actualPaths.map(path => normalizePath(path));
|
||||
expect(normalizedPaths).toEqual(resultPaths);
|
||||
expect(fs.readdir).toHaveBeenCalledTimes(readdirCalls);
|
||||
});
|
||||
}
|
||||
);
|
||||
38
packages/build-utils/test/unit.get-workspaces-package-paths.test.ts
vendored
Normal file
38
packages/build-utils/test/unit.get-workspaces-package-paths.test.ts
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
import path from 'path';
|
||||
import { getWorkspaces } from '../src/workspaces/get-workspaces';
|
||||
import { getWorkspacePackagePaths } from '../src/workspaces/get-workspace-package-paths';
|
||||
import { FixtureFilesystem } from './utils/fixture-filesystem';
|
||||
|
||||
describe.each<[string, string[]]>([
|
||||
['21-npm-workspaces', ['/a', '/b']],
|
||||
['23-pnpm-workspaces', ['/c', '/d']],
|
||||
['27-yarn-workspaces', ['/a', '/b']],
|
||||
['25-multiple-lock-files-yarn', ['/a', '/b']],
|
||||
['26-multiple-lock-files-pnpm', ['/a', '/b']],
|
||||
[
|
||||
'29-nested-workspaces',
|
||||
['/backend/c', '/backend/d', '/frontend/a', '/frontend/b'],
|
||||
],
|
||||
['22-pnpm', []],
|
||||
])('`getWorkspacesPackagePaths()`', (fixturePath, packagePaths) => {
|
||||
const testName =
|
||||
packagePaths.length > 0
|
||||
? `should detect ${packagePaths.join()} package${
|
||||
packagePaths.length > 1 ? 's' : ''
|
||||
} for ${fixturePath}`
|
||||
: `should not detect any workspace for ${fixturePath}`;
|
||||
|
||||
it(testName, async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', fixturePath);
|
||||
const fs = new FixtureFilesystem(fixture);
|
||||
|
||||
const workspaces = await getWorkspaces({ fs });
|
||||
const actualPackagePaths = (
|
||||
await Promise.all(
|
||||
workspaces.map(workspace => getWorkspacePackagePaths({ fs, workspace }))
|
||||
)
|
||||
).flat();
|
||||
|
||||
expect(actualPackagePaths).toEqual(packagePaths);
|
||||
});
|
||||
});
|
||||
48
packages/build-utils/test/unit.test.ts
vendored
48
packages/build-utils/test/unit.test.ts
vendored
@@ -277,7 +277,45 @@ it('should prefer package.json engines over project setting from config and warn
|
||||
]);
|
||||
});
|
||||
|
||||
it('should warn when package.json engines is exact version', async () => {
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node-exact'),
|
||||
undefined,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '16.x');
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Warning: Detected "engines": { "node": "16.14.0" } in your `package.json` with major.minor.patch, but only major Node.js Version can be selected. Learn More: http://vercel.link/node-version',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should warn when package.json engines is greater than', async () => {
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node-greaterthan'),
|
||||
undefined,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '16.x');
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Warning: Detected "engines": { "node": ">=16" } in your `package.json` that will automatically upgrade when a new major Node.js Version is released. Learn More: http://vercel.link/node-version',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not warn when package.json engines matches project setting from config', async () => {
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node'),
|
||||
undefined,
|
||||
{ nodeVersion: '14' },
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '14.x');
|
||||
expect(warningMessages).toStrictEqual([]);
|
||||
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node'),
|
||||
@@ -287,6 +325,16 @@ it('should not warn when package.json engines matches project setting from confi
|
||||
)
|
||||
).toHaveProperty('range', '14.x');
|
||||
expect(warningMessages).toStrictEqual([]);
|
||||
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node'),
|
||||
undefined,
|
||||
{ nodeVersion: '<15' },
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '14.x');
|
||||
expect(warningMessages).toStrictEqual([]);
|
||||
});
|
||||
|
||||
it('should get latest node version', async () => {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
@@ -13,7 +13,7 @@
|
||||
"outDir": "./dist",
|
||||
"types": ["node", "jest"],
|
||||
"strict": true,
|
||||
"target": "es2019"
|
||||
"target": "ES2020"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "24.2.5",
|
||||
"version": "25.1.1-canary.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -30,7 +30,6 @@
|
||||
"scripts/preinstall.js"
|
||||
],
|
||||
"ava": {
|
||||
"compileEnhancements": false,
|
||||
"extensions": [
|
||||
"ts"
|
||||
],
|
||||
@@ -40,19 +39,19 @@
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "3.1.1",
|
||||
"@vercel/go": "1.4.4",
|
||||
"@vercel/next": "2.9.0",
|
||||
"@vercel/node": "1.15.4",
|
||||
"@vercel/python": "2.3.4",
|
||||
"@vercel/redwood": "0.8.4",
|
||||
"@vercel/remix": "0.0.2",
|
||||
"@vercel/ruby": "1.3.7",
|
||||
"@vercel/static-build": "0.26.0",
|
||||
"update-notifier": "4.1.0"
|
||||
"@vercel/build-utils": "4.1.1-canary.0",
|
||||
"@vercel/go": "2.0.2-canary.0",
|
||||
"@vercel/next": "3.0.2-canary.0",
|
||||
"@vercel/node": "2.1.1-canary.0",
|
||||
"@vercel/python": "3.0.2-canary.0",
|
||||
"@vercel/redwood": "1.0.2-canary.0",
|
||||
"@vercel/remix": "1.0.2-canary.0",
|
||||
"@vercel/ruby": "1.3.10-canary.0",
|
||||
"@vercel/static-build": "1.0.2-canary.0",
|
||||
"update-notifier": "5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@alex_neo/jest-expect-message": "1.0.5",
|
||||
@@ -95,8 +94,8 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "11.0.4",
|
||||
"@vercel/frameworks": "1.0.0",
|
||||
"@vercel/client": "12.0.2-canary.0",
|
||||
"@vercel/frameworks": "1.0.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
|
||||
@@ -4,6 +4,7 @@ import dotenv from 'dotenv';
|
||||
import { join, relative } from 'path';
|
||||
import {
|
||||
detectBuilders,
|
||||
normalizePath,
|
||||
Files,
|
||||
FileFsRef,
|
||||
PackageJson,
|
||||
@@ -45,6 +46,7 @@ import {
|
||||
writeBuildResult,
|
||||
} from '../util/build/write-build-result';
|
||||
import { importBuilders, BuilderWithPkg } from '../util/build/import-builders';
|
||||
import { initCorepack, cleanupCorepack } from '../util/build/corepack';
|
||||
|
||||
type BuildResult = BuildResultV2 | BuildResultV3;
|
||||
|
||||
@@ -99,6 +101,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
const argv = getArgs(client.argv.slice(2), {
|
||||
'--cwd': String,
|
||||
'--prod': Boolean,
|
||||
'--yes': Boolean,
|
||||
});
|
||||
|
||||
if (argv['--help']) {
|
||||
@@ -112,42 +115,53 @@ export default async function main(client: Client): Promise<number> {
|
||||
}
|
||||
const cwd = process.cwd();
|
||||
|
||||
// Build `target` influences which environment variables will be used
|
||||
const target = argv['--prod'] ? 'production' : 'preview';
|
||||
const yes = Boolean(argv['--yes']);
|
||||
|
||||
// TODO: read project settings from the API, fall back to local `project.json` if that fails
|
||||
|
||||
// Read project settings, and pull them from Vercel if necessary
|
||||
let project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
const isTTY = process.stdin.isTTY;
|
||||
while (!project?.settings) {
|
||||
if (!isTTY) {
|
||||
client.output.print(
|
||||
`No Project Settings found locally. Run ${cli.getCommandName(
|
||||
'pull --yes'
|
||||
)} to retreive them.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
let confirmed = yes;
|
||||
if (!confirmed) {
|
||||
if (!isTTY) {
|
||||
client.output.print(
|
||||
`No Project Settings found locally. Run ${cli.getCommandName(
|
||||
'pull --yes'
|
||||
)} to retreive them.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const confirmed = await confirm(
|
||||
`No Project Settings found locally. Run ${cli.getCommandName(
|
||||
'pull'
|
||||
)} for retrieving them?`,
|
||||
true
|
||||
);
|
||||
confirmed = await confirm(
|
||||
`No Project Settings found locally. Run ${cli.getCommandName(
|
||||
'pull'
|
||||
)} for retrieving them?`,
|
||||
true
|
||||
);
|
||||
}
|
||||
if (!confirmed) {
|
||||
client.output.print(`Aborted. No Project Settings retrieved.\n`);
|
||||
return 0;
|
||||
}
|
||||
client.argv = [];
|
||||
const { argv: originalArgv } = client;
|
||||
client.argv = [
|
||||
...originalArgv.slice(0, 2),
|
||||
'pull',
|
||||
`--environment`,
|
||||
target,
|
||||
];
|
||||
const result = await pull(client);
|
||||
if (result !== 0) {
|
||||
return result;
|
||||
}
|
||||
client.argv = originalArgv;
|
||||
project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
}
|
||||
|
||||
// Build `target` influences which environment variables will be used
|
||||
const target = argv['--prod'] ? 'production' : 'preview';
|
||||
|
||||
// TODO: load env vars from the API, fall back to local files if that fails
|
||||
|
||||
const envPath = await checkExists([
|
||||
@@ -177,7 +191,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
|
||||
// Get a list of source files
|
||||
const files = (await getFiles(workPath, client)).map(f =>
|
||||
relative(workPath, f)
|
||||
normalizePath(relative(workPath, f))
|
||||
);
|
||||
|
||||
const routesResult = getTransformedRoutes({ nowConfig: vercelConfig || {} });
|
||||
@@ -265,19 +279,20 @@ export default async function main(client: Client): Promise<number> {
|
||||
}
|
||||
|
||||
// Delete output directory from potential previous build
|
||||
await fs.remove(OUTPUT_DIR);
|
||||
const outputDir = join(cwd, OUTPUT_DIR);
|
||||
await fs.remove(outputDir);
|
||||
|
||||
const buildStamp = stamp();
|
||||
|
||||
// Create fresh new output directory
|
||||
await fs.mkdirp(OUTPUT_DIR);
|
||||
await fs.mkdirp(outputDir);
|
||||
|
||||
const ops: Promise<Error | void>[] = [];
|
||||
|
||||
// Write the `detectedBuilders` result to output dir
|
||||
ops.push(
|
||||
fs.writeJSON(
|
||||
join(OUTPUT_DIR, 'builds.json'),
|
||||
join(outputDir, 'builds.json'),
|
||||
{
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
@@ -312,6 +327,10 @@ export default async function main(client: Client): Promise<number> {
|
||||
// TODO: parallelize builds
|
||||
const buildResults: Map<Builder, BuildResult> = new Map();
|
||||
const overrides: PathOverride[] = [];
|
||||
const repoRootPath = cwd;
|
||||
const rootPackageJsonPath = repoRootPath || workPath;
|
||||
const corepackShimDir = await initCorepack({ cwd, rootPackageJsonPath });
|
||||
|
||||
for (const build of builds) {
|
||||
if (typeof build.src !== 'string') continue;
|
||||
|
||||
@@ -331,7 +350,6 @@ export default async function main(client: Client): Promise<number> {
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const repoRootPath = cwd === workPath ? undefined : cwd;
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
@@ -352,6 +370,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
@@ -366,6 +385,10 @@ export default async function main(client: Client): Promise<number> {
|
||||
);
|
||||
}
|
||||
|
||||
if (corepackShimDir) {
|
||||
cleanupCorepack(corepackShimDir);
|
||||
}
|
||||
|
||||
// Wait for filesystem operations to complete
|
||||
// TODO render progress bar?
|
||||
let hadError = false;
|
||||
@@ -379,7 +402,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
if (hadError) return 1;
|
||||
|
||||
// Merge existing `config.json` file into the one that will be produced
|
||||
const configPath = join(OUTPUT_DIR, 'config.json');
|
||||
const configPath = join(outputDir, 'config.json');
|
||||
// TODO: properly type
|
||||
const existingConfig = await readJSONFile<any>(configPath);
|
||||
if (existingConfig instanceof CantParseJSONFile) {
|
||||
@@ -437,7 +460,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
wildcard: mergedWildcard,
|
||||
overrides: mergedOverrides,
|
||||
};
|
||||
await fs.writeJSON(join(OUTPUT_DIR, 'config.json'), config, { spaces: 2 });
|
||||
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
|
||||
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { resolve, join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import DevServer from '../../util/dev/server';
|
||||
import parseListen from '../../util/dev/parse-listen';
|
||||
@@ -12,6 +13,7 @@ import setupAndLink from '../../util/link/setup-and-link';
|
||||
import getSystemEnvValues from '../../util/env/get-system-env-values';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
import param from '../../util/output/param';
|
||||
import { OUTPUT_DIR } from '../../util/build/write-build-result';
|
||||
|
||||
type Options = {
|
||||
'--listen': string;
|
||||
@@ -104,6 +106,15 @@ export default async function dev(
|
||||
devCommand = process.env.VERCEL_DEV_COMMAND;
|
||||
}
|
||||
|
||||
// If there is no Development Command, we must delete the
|
||||
// v3 Build Output because it will incorrectly be detected by
|
||||
// @vercel/static-build in BuildOutputV3.getBuildOutputDirectory()
|
||||
if (!devCommand) {
|
||||
output.log(`Removing ${OUTPUT_DIR}`);
|
||||
const outputDir = join(cwd, OUTPUT_DIR);
|
||||
await fs.remove(outputDir);
|
||||
}
|
||||
|
||||
const devServer = new DevServer(cwd, {
|
||||
output,
|
||||
devCommand,
|
||||
|
||||
7
packages/cli/src/commands/env/index.ts
vendored
7
packages/cli/src/commands/env/index.ts
vendored
@@ -42,6 +42,13 @@ const help = () => {
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray(
|
||||
'–'
|
||||
)} Pull all Development Environment Variables down from the cloud
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} env pull <file>`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} env pull .env.development.local`)}
|
||||
|
||||
${chalk.gray('–')} Add a new variable to multiple Environments
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} env add <name>`)}
|
||||
|
||||
@@ -25,7 +25,7 @@ import {
|
||||
|
||||
const help = () => {
|
||||
return console.log(`
|
||||
${chalk.bold(`${logo} ${getPkgName()} pull`)} [path]
|
||||
${chalk.bold(`${logo} ${getPkgName()} pull`)} [project-path]
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
@@ -42,25 +42,29 @@ const help = () => {
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} Pull the latest Project Settings from the cloud
|
||||
${chalk.gray(
|
||||
'–'
|
||||
)} Pull the latest Environment Variables and Project Settings from the cloud
|
||||
and stores them in \`.vercel/.env.\${target}.local\` and \`.vercel/project.json\` respectively.
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} pull`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} pull ./path-to-project`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} pull --env .env.local`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} pull ./path-to-project --env .env.local`)}
|
||||
|
||||
${chalk.gray('–')} Pull specific environment's Project Settings from the cloud
|
||||
${chalk.gray('–')} Pull for a specific environment
|
||||
|
||||
${chalk.cyan(
|
||||
`$ ${getPkgName()} pull --environment=${getEnvTargetPlaceholder()}`
|
||||
)}
|
||||
|
||||
${chalk.gray(
|
||||
'If you want to download environment variables to a specific file, use `vercel env pull` instead.'
|
||||
)}
|
||||
`);
|
||||
};
|
||||
|
||||
function processArgs(client: Client) {
|
||||
return getArgs(client.argv.slice(2), {
|
||||
'--yes': Boolean,
|
||||
'--env': String, // deprecated
|
||||
'--environment': String,
|
||||
'--debug': Boolean,
|
||||
'-d': '--debug',
|
||||
|
||||
@@ -58,6 +58,7 @@ const isCanary = pkg.version.includes('canary');
|
||||
const notifier = updateNotifier({
|
||||
pkg,
|
||||
distTag: isCanary ? 'canary' : 'latest',
|
||||
updateCheckInterval: 1000 * 60 * 60 * 24 * 7, // 1 week
|
||||
});
|
||||
|
||||
const VERCEL_DIR = getGlobalPathConfig();
|
||||
|
||||
82
packages/cli/src/util/build/corepack.ts
Normal file
82
packages/cli/src/util/build/corepack.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { delimiter, join } from 'path';
|
||||
import { PackageJson, spawnAsync } from '@vercel/build-utils';
|
||||
import fs from 'fs-extra';
|
||||
import { CantParseJSONFile } from '../errors-ts';
|
||||
import { VERCEL_DIR } from '../projects/link';
|
||||
import readJSONFile from '../read-json-file';
|
||||
|
||||
export async function initCorepack({
|
||||
cwd,
|
||||
rootPackageJsonPath,
|
||||
}: {
|
||||
cwd: string;
|
||||
rootPackageJsonPath: string;
|
||||
}): Promise<string | null> {
|
||||
if (process.env.ENABLE_EXPERIMENTAL_COREPACK !== '1') {
|
||||
// Since corepack is experimental, we need to exit early
|
||||
// unless the user explicitly enables it with the env var.
|
||||
return null;
|
||||
}
|
||||
const pkg = await readJSONFile<PackageJson>(
|
||||
join(rootPackageJsonPath, 'package.json')
|
||||
);
|
||||
if (pkg instanceof CantParseJSONFile) {
|
||||
console.warn(
|
||||
'Warning: Could not enable corepack because package.json is invalid JSON'
|
||||
);
|
||||
} else if (!pkg?.packageManager) {
|
||||
console.warn(
|
||||
'Warning: Could not enable corepack because package.json is missing "packageManager" property'
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`Detected ENABLE_EXPERIMENTAL_COREPACK=1 and "${pkg.packageManager}" in package.json`
|
||||
);
|
||||
const corepackRootDir = join(cwd, VERCEL_DIR, 'cache', 'corepack');
|
||||
const corepackHomeDir = join(corepackRootDir, 'home');
|
||||
const corepackShimDir = join(corepackRootDir, 'shim');
|
||||
await fs.mkdirp(corepackHomeDir);
|
||||
await fs.mkdirp(corepackShimDir);
|
||||
process.env.COREPACK_HOME = corepackHomeDir;
|
||||
process.env.PATH = `${corepackShimDir}${delimiter}${process.env.PATH}`;
|
||||
process.env.DEBUG = process.env.DEBUG
|
||||
? `corepack,${process.env.DEBUG}`
|
||||
: 'corepack';
|
||||
const pkgManagerName = pkg.packageManager.split('@')[0];
|
||||
// We must explicitly call `corepack enable npm` since `corepack enable`
|
||||
// doesn't work with npm. See https://github.com/nodejs/corepack/pull/24
|
||||
// Also, `corepack enable` is too broad and will change the verison of
|
||||
// yarn & pnpm even though those versions are not specified by the user.
|
||||
// See https://github.com/nodejs/corepack#known-good-releases
|
||||
// Finally, we use `--install-directory` so we can cache the result to
|
||||
// reuse for subsequent builds. See `@vercel/vc-build` for `prepareCache`.
|
||||
await spawnAsync(
|
||||
'corepack',
|
||||
['enable', pkgManagerName, '--install-directory', corepackShimDir],
|
||||
{
|
||||
prettyCommand: `corepack enable ${pkgManagerName}`,
|
||||
}
|
||||
);
|
||||
return corepackShimDir;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function cleanupCorepack(corepackShimDir: string) {
|
||||
if (process.env.COREPACK_HOME) {
|
||||
delete process.env.COREPACK_HOME;
|
||||
}
|
||||
if (process.env.PATH) {
|
||||
process.env.PATH = process.env.PATH.replace(
|
||||
`${corepackShimDir}${delimiter}`,
|
||||
''
|
||||
);
|
||||
}
|
||||
if (process.env.DEBUG) {
|
||||
if (process.env.DEBUG === 'corepack') {
|
||||
delete process.env.DEBUG;
|
||||
} else {
|
||||
process.env.DEBUG = process.env.DEBUG.replace('corepack,', '');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -106,9 +106,10 @@ export async function resolveBuilders(
|
||||
// If `pkgPath` wasn't found in `.vercel/builders` then try as a CLI local
|
||||
// dependency. `require.resolve()` will throw if the Builder is not a CLI
|
||||
// dep, in which case we'll install it into `.vercel/builders`.
|
||||
pkgPath = require.resolve(`${name}/package.json`, {
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
pkgPath = eval('require').resolve(`${name}/package.json`, {
|
||||
paths: [__dirname],
|
||||
});
|
||||
}) as string;
|
||||
builderPkg = await readJSON(pkgPath);
|
||||
}
|
||||
|
||||
@@ -148,7 +149,9 @@ export async function resolveBuilders(
|
||||
// TODO: handle `parsed.type === 'tag'` ("latest" vs. anything else?)
|
||||
|
||||
const path = join(dirname(pkgPath), builderPkg.main || 'index.js');
|
||||
const builder = require(path);
|
||||
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
const builder = eval('require')(path);
|
||||
|
||||
builders.set(spec, {
|
||||
builder,
|
||||
|
||||
@@ -23,6 +23,7 @@ import { VERCEL_DIR } from '../projects/link';
|
||||
export const OUTPUT_DIR = join(VERCEL_DIR, 'output');
|
||||
|
||||
export async function writeBuildResult(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV2 | BuildResultV3,
|
||||
build: Builder,
|
||||
builder: BuilderV2 | BuilderV3,
|
||||
@@ -31,9 +32,13 @@ export async function writeBuildResult(
|
||||
) {
|
||||
const { version } = builder;
|
||||
if (version === 2) {
|
||||
return writeBuildResultV2(buildResult as BuildResultV2, cleanUrls);
|
||||
return writeBuildResultV2(
|
||||
outputDir,
|
||||
buildResult as BuildResultV2,
|
||||
cleanUrls
|
||||
);
|
||||
} else if (version === 3) {
|
||||
return writeBuildResultV3(buildResult as BuildResultV3, build);
|
||||
return writeBuildResultV3(outputDir, buildResult as BuildResultV3, build);
|
||||
}
|
||||
throw new Error(
|
||||
`Unsupported Builder version \`${version}\` from "${builderPkg.name}"`
|
||||
@@ -67,11 +72,12 @@ export interface PathOverride {
|
||||
* the filesystem.
|
||||
*/
|
||||
async function writeBuildResultV2(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV2,
|
||||
cleanUrls?: boolean
|
||||
) {
|
||||
if ('buildOutputPath' in buildResult) {
|
||||
await mergeBuilderOutput(buildResult);
|
||||
await mergeBuilderOutput(outputDir, buildResult);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -79,16 +85,16 @@ async function writeBuildResultV2(
|
||||
const overrides: Record<string, PathOverride> = {};
|
||||
for (const [path, output] of Object.entries(buildResult.output)) {
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(output, path, lambdas);
|
||||
await writeLambda(outputDir, output, path, lambdas);
|
||||
} else if (isPrerender(output)) {
|
||||
await writeLambda(output.lambda, path, lambdas);
|
||||
await writeLambda(outputDir, output.lambda, path, lambdas);
|
||||
|
||||
// Write the fallback file alongside the Lambda directory
|
||||
let fallback = output.fallback;
|
||||
if (fallback) {
|
||||
const ext = getFileExtension(fallback);
|
||||
const fallbackName = `${path}.prerender-fallback${ext}`;
|
||||
const fallbackPath = join(OUTPUT_DIR, 'functions', fallbackName);
|
||||
const fallbackPath = join(outputDir, 'functions', fallbackName);
|
||||
const stream = fallback.toStream();
|
||||
await pipe(
|
||||
stream,
|
||||
@@ -101,7 +107,7 @@ async function writeBuildResultV2(
|
||||
}
|
||||
|
||||
const prerenderConfigPath = join(
|
||||
OUTPUT_DIR,
|
||||
outputDir,
|
||||
'functions',
|
||||
`${path}.prerender-config.json`
|
||||
);
|
||||
@@ -112,9 +118,9 @@ async function writeBuildResultV2(
|
||||
};
|
||||
await fs.writeJSON(prerenderConfigPath, prerenderConfig, { spaces: 2 });
|
||||
} else if (isFile(output)) {
|
||||
await writeStaticFile(output, path, overrides, cleanUrls);
|
||||
await writeStaticFile(outputDir, output, path, overrides, cleanUrls);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(output, path);
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${(output as any).type}" for ${path}`
|
||||
@@ -128,15 +134,24 @@ async function writeBuildResultV2(
|
||||
* Writes the output from the `build()` return value of a v3 Builder to
|
||||
* the filesystem.
|
||||
*/
|
||||
async function writeBuildResultV3(buildResult: BuildResultV3, build: Builder) {
|
||||
async function writeBuildResultV3(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV3,
|
||||
build: Builder
|
||||
) {
|
||||
const { output } = buildResult;
|
||||
const src = build.src;
|
||||
if (typeof src !== 'string') {
|
||||
throw new Error(`Expected "build.src" to be a string`);
|
||||
}
|
||||
const ext = extname(src);
|
||||
const path = build.config?.zeroConfig
|
||||
? src.substring(0, src.length - ext.length)
|
||||
: src;
|
||||
if (isLambda(output)) {
|
||||
const src = build.src!;
|
||||
const ext = extname(src);
|
||||
const path = build.config?.zeroConfig
|
||||
? src.substring(0, src.length - ext.length)
|
||||
: src;
|
||||
await writeLambda(output, path);
|
||||
await writeLambda(outputDir, output, path);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${(output as any).type}" for ${build.src}`
|
||||
@@ -154,6 +169,7 @@ async function writeBuildResultV3(buildResult: BuildResultV3, build: Builder) {
|
||||
* @param overrides Record of override configuration when a File is renamed or has other metadata
|
||||
*/
|
||||
async function writeStaticFile(
|
||||
outputDir: string,
|
||||
file: File,
|
||||
path: string,
|
||||
overrides: Record<string, PathOverride>,
|
||||
@@ -191,7 +207,7 @@ async function writeStaticFile(
|
||||
overrides[fsPath] = override;
|
||||
}
|
||||
|
||||
const dest = join(OUTPUT_DIR, 'static', fsPath);
|
||||
const dest = join(outputDir, 'static', fsPath);
|
||||
await fs.mkdirp(dirname(dest));
|
||||
|
||||
// TODO: handle (or skip) symlinks?
|
||||
@@ -205,8 +221,12 @@ async function writeStaticFile(
|
||||
* @param edgeFunction The `EdgeFunction` instance
|
||||
* @param path The URL path where the `EdgeFunction` can be accessed from
|
||||
*/
|
||||
async function writeEdgeFunction(edgeFunction: EdgeFunction, path: string) {
|
||||
const dest = join(OUTPUT_DIR, 'functions', `${path}.func`);
|
||||
async function writeEdgeFunction(
|
||||
outputDir: string,
|
||||
edgeFunction: EdgeFunction,
|
||||
path: string
|
||||
) {
|
||||
const dest = join(outputDir, 'functions', `${path}.func`);
|
||||
|
||||
await fs.mkdirp(dest);
|
||||
const ops: Promise<any>[] = [];
|
||||
@@ -235,11 +255,12 @@ async function writeEdgeFunction(edgeFunction: EdgeFunction, path: string) {
|
||||
* @param lambdas (optional) Map of `Lambda` instances that have previously been written
|
||||
*/
|
||||
async function writeLambda(
|
||||
outputDir: string,
|
||||
lambda: Lambda,
|
||||
path: string,
|
||||
lambdas?: Map<Lambda, string>
|
||||
) {
|
||||
const dest = join(OUTPUT_DIR, 'functions', `${path}.func`);
|
||||
const dest = join(outputDir, 'functions', `${path}.func`);
|
||||
|
||||
// If the `lambda` has already been written to the filesystem at a different
|
||||
// location then create a symlink to the previous location instead of copying
|
||||
@@ -248,7 +269,7 @@ async function writeLambda(
|
||||
if (existingLambdaPath) {
|
||||
const destDir = dirname(dest);
|
||||
const targetDest = join(
|
||||
OUTPUT_DIR,
|
||||
outputDir,
|
||||
'functions',
|
||||
`${existingLambdaPath}.func`
|
||||
);
|
||||
@@ -312,14 +333,17 @@ async function writeLambda(
|
||||
* `.vercel/output` directory that was specified by the Builder into the
|
||||
* `vc build` output directory.
|
||||
*/
|
||||
async function mergeBuilderOutput(buildResult: BuildResultBuildOutput) {
|
||||
const absOutputDir = resolve(OUTPUT_DIR);
|
||||
async function mergeBuilderOutput(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultBuildOutput
|
||||
) {
|
||||
const absOutputDir = resolve(outputDir);
|
||||
if (absOutputDir === buildResult.buildOutputPath) {
|
||||
// `.vercel/output` dir is already in the correct location,
|
||||
// so no need to do anything
|
||||
return;
|
||||
}
|
||||
await fs.copy(buildResult.buildOutputPath, OUTPUT_DIR);
|
||||
await fs.copy(buildResult.buildOutputPath, outputDir);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -142,6 +142,7 @@ export async function executeBuild(
|
||||
files,
|
||||
entrypoint,
|
||||
workPath,
|
||||
repoRootPath: workPath,
|
||||
config,
|
||||
meta: {
|
||||
isDev: true,
|
||||
|
||||
@@ -1735,6 +1735,7 @@ export default class DevServer {
|
||||
entrypoint: match.entrypoint,
|
||||
workPath,
|
||||
config: match.config || {},
|
||||
repoRootPath: this.cwd,
|
||||
meta: {
|
||||
isDev: true,
|
||||
requestPath,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { resolve } from 'path';
|
||||
import _glob, { IOptions as GlobOptions } from 'glob';
|
||||
import fs from 'fs-extra';
|
||||
import { getVercelIgnore } from '@vercel/client';
|
||||
import uniqueStrings from './unique-strings';
|
||||
@@ -21,14 +20,6 @@ function flatten(
|
||||
return res;
|
||||
}
|
||||
|
||||
async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
_glob(pattern, options, (err, files) => {
|
||||
err ? reject(err) : resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform relative paths into absolutes,
|
||||
* and maintains absolutes as such.
|
||||
@@ -65,15 +56,16 @@ interface StaticFilesOptions {
|
||||
export async function staticFiles(
|
||||
path: string,
|
||||
{ output, src }: StaticFilesOptions
|
||||
) {
|
||||
): Promise<string[]> {
|
||||
const { debug, time } = output;
|
||||
let files: string[] = [];
|
||||
|
||||
// The package.json `files` whitelist still
|
||||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
||||
const source = src || '.';
|
||||
// Convert all filenames into absolute paths
|
||||
const search = await glob(source, { cwd: path, absolute: true, dot: true });
|
||||
|
||||
// Ensure that `path` is an absolute path
|
||||
const search = resolve(path, source);
|
||||
|
||||
// Compile list of ignored patterns and files
|
||||
const { ig } = await getVercelIgnore(path);
|
||||
@@ -104,7 +96,7 @@ export async function staticFiles(
|
||||
// Locate files
|
||||
files = await time(
|
||||
`Locating files ${path}`,
|
||||
explode(search, {
|
||||
explode([search], {
|
||||
accepts,
|
||||
output,
|
||||
})
|
||||
@@ -164,7 +156,7 @@ async function explode(
|
||||
const all = await fs.readdir(file);
|
||||
/* eslint-disable no-use-before-define */
|
||||
const recursive = many(all.map(subdir => asAbsolute(subdir, file)));
|
||||
return (recursive as any) as Promise<string | null>;
|
||||
return recursive as any as Promise<string | null>;
|
||||
/* eslint-enable no-use-before-define */
|
||||
}
|
||||
if (!s.isFile()) {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
/**
|
||||
* A fast implementation of an algorithm that takes an array and returns a copy of the array without duplicates.
|
||||
* We used to use `array-unique` ( https://github.com/jonschlinkert/array-unique/blob/5d1fbe560da8125e28e4ad6fbfa9daaf9f2ec120/index.js )
|
||||
* We used to use `array-unique` (https://github.com/jonschlinkert/array-unique/blob/5d1fbe560da8125e28e4ad6fbfa9daaf9f2ec120/index.js)
|
||||
* but from running benchmarks, found the implementation to be too slow. This implementation has show to be upto ~10x faster for large
|
||||
* projects
|
||||
* @param {Array} arr Input array that potentially has duplicates
|
||||
* @returns {Array} An array of the unique values in `arr`
|
||||
*/
|
||||
export default (arr: string[]) => {
|
||||
export default (arr: string[]): string[] => {
|
||||
const len = arr.length;
|
||||
const res: string[] = [];
|
||||
const o: { [key: string]: string | number } = {};
|
||||
|
||||
2
packages/cli/test/fixtures/unit/commands/build/.gitignore
vendored
Normal file
2
packages/cli/test/fixtures/unit/commands/build/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
!/*/.vercel
|
||||
/*/.vercel/output
|
||||
1
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/.gitignore
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!node_modules
|
||||
15
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/builder.js
generated
vendored
Normal file
15
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/builder.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
const { EdgeFunction } = require('@vercel/build-utils');
|
||||
|
||||
exports.version = 3;
|
||||
|
||||
exports.build = async ({ entrypoint, files }) => {
|
||||
const output = new EdgeFunction({
|
||||
name: entrypoint,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint,
|
||||
files: {
|
||||
[entrypoint]: files[entrypoint]
|
||||
},
|
||||
});
|
||||
return { output };
|
||||
};
|
||||
6
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/package.json
generated
vendored
Normal file
6
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/package.json
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "edge-function",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"main": "builder.js"
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
5
packages/cli/test/fixtures/unit/commands/build/edge-function/api/edge.js
vendored
Normal file
5
packages/cli/test/fixtures/unit/commands/build/edge-function/api/edge.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default req => new Response('from edge');
|
||||
7
packages/cli/test/fixtures/unit/commands/build/edge-function/vercel.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/edge-function/vercel.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/*.js": {
|
||||
"runtime": "edge-function@0.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/middleware/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/middleware/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/middleware/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/middleware/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Vercel</h1>
|
||||
1
packages/cli/test/fixtures/unit/commands/build/middleware/middleware.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/middleware/middleware.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default req => new Response('middleware');
|
||||
7
packages/cli/test/fixtures/unit/commands/build/node/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/node/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node/api/es6.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node/api/es6.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node/api/index.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node/api/index.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node/api/mjs.mjs
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node/api/mjs.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
3
packages/cli/test/fixtures/unit/commands/build/node/api/typescript.ts
vendored
Normal file
3
packages/cli/test/fixtures/unit/commands/build/node/api/typescript.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
|
||||
export default (req: IncomingMessage, res: ServerResponse) => res.end('Vercel');
|
||||
4
packages/cli/test/fixtures/unit/commands/build/static-pull/.vercel/project.json
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/build/static-pull/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"projectId": "vercel-pull-next",
|
||||
"orgId": "team_dummy"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/static-pull/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/static-pull/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Vercel</h1>
|
||||
7
packages/cli/test/fixtures/unit/commands/build/static/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/static/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/static/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/static/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Vercel</h1>
|
||||
@@ -0,0 +1 @@
|
||||
!node_modules
|
||||
14
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/index.js
generated
vendored
Normal file
14
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/index.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
const { Lambda } = require('@vercel/build-utils');
|
||||
|
||||
exports.version = 3;
|
||||
|
||||
exports.build = async ({ entrypoint, files }) => {
|
||||
const output = new Lambda({
|
||||
files: {
|
||||
[entrypoint]: files[entrypoint]
|
||||
},
|
||||
runtime: 'provided',
|
||||
handler: entrypoint
|
||||
});
|
||||
return { output };
|
||||
};
|
||||
6
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/package.json
generated
vendored
Normal file
6
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/package.json
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "txt-builder",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"main": "index.js"
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/third-party-builder/api/foo.txt
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/third-party-builder/api/foo.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Text file
|
||||
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/vercel.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/vercel.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/*.txt": {
|
||||
"runtime": "txt-builder@0.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -19,7 +19,7 @@ const getRevertAliasConfigFile = () => {
|
||||
],
|
||||
});
|
||||
};
|
||||
module.exports = async function prepare(session, binaryPath) {
|
||||
module.exports = async function prepare(session, binaryPath, tmpFixturesDir) {
|
||||
const spec = {
|
||||
'static-single-file': {
|
||||
'first.png': getImageFile(session, { size: 30 }),
|
||||
@@ -440,16 +440,58 @@ module.exports = async function prepare(session, binaryPath) {
|
||||
},
|
||||
}),
|
||||
},
|
||||
'vc-build-corepack-npm': {
|
||||
'.vercel/project.json': JSON.stringify({
|
||||
orgId: '.',
|
||||
projectId: '.',
|
||||
settings: {
|
||||
framework: null,
|
||||
},
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
private: true,
|
||||
packageManager: 'npm@8.1.0',
|
||||
scripts: {
|
||||
build: 'mkdir -p public && npm --version > public/index.txt',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'vc-build-corepack-pnpm': {
|
||||
'.vercel/project.json': JSON.stringify({
|
||||
orgId: '.',
|
||||
projectId: '.',
|
||||
settings: {
|
||||
framework: null,
|
||||
},
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
private: true,
|
||||
packageManager: 'pnpm@7.1.0',
|
||||
scripts: {
|
||||
build: 'mkdir -p public && pnpm --version > public/index.txt',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'vc-build-corepack-yarn': {
|
||||
'.vercel/project.json': JSON.stringify({
|
||||
orgId: '.',
|
||||
projectId: '.',
|
||||
settings: {
|
||||
framework: null,
|
||||
},
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
private: true,
|
||||
packageManager: 'yarn@2.4.3',
|
||||
scripts: {
|
||||
build: 'mkdir -p public && yarn --version > public/index.txt',
|
||||
},
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
for (const [typeName, needed] of Object.entries(spec)) {
|
||||
const directory = join(
|
||||
__dirname,
|
||||
'..',
|
||||
'fixtures',
|
||||
'integration',
|
||||
typeName
|
||||
);
|
||||
const directory = join(tmpFixturesDir, typeName);
|
||||
|
||||
await mkdirp(directory);
|
||||
|
||||
|
||||
120
packages/cli/test/integration.js
vendored
120
packages/cli/test/integration.js
vendored
@@ -4,7 +4,7 @@ import { URL, parse as parseUrl } from 'url';
|
||||
import test from 'ava';
|
||||
import semVer from 'semver';
|
||||
import { Readable } from 'stream';
|
||||
import { homedir } from 'os';
|
||||
import { homedir, tmpdir } from 'os';
|
||||
import _execa from 'execa';
|
||||
import XDGAppPaths from 'xdg-app-paths';
|
||||
import fetch from 'node-fetch';
|
||||
@@ -31,7 +31,7 @@ function execa(file, args, options) {
|
||||
}
|
||||
|
||||
function fixture(name) {
|
||||
const directory = path.join(__dirname, 'fixtures', 'integration', name);
|
||||
const directory = path.join(tmpFixturesDir, name);
|
||||
const config = path.join(directory, 'project.json');
|
||||
|
||||
// We need to remove it, otherwise we can't re-use fixtures
|
||||
@@ -146,6 +146,7 @@ let email;
|
||||
let contextName;
|
||||
|
||||
let tmpDir;
|
||||
let tmpFixturesDir = path.join(tmpdir(), 'tmp-fixtures');
|
||||
|
||||
let globalDir = XDGAppPaths('com.vercel.cli').dataDirs()[0];
|
||||
|
||||
@@ -327,7 +328,7 @@ async function setupProject(process, projectName, overrides) {
|
||||
test.before(async () => {
|
||||
try {
|
||||
await createUser();
|
||||
await prepareFixtures(contextName, binaryPath);
|
||||
await prepareFixtures(contextName, binaryPath, tmpFixturesDir);
|
||||
} catch (err) {
|
||||
console.log('Failed `test.before`');
|
||||
console.log(err);
|
||||
@@ -335,6 +336,8 @@ test.before(async () => {
|
||||
});
|
||||
|
||||
test.after.always(async () => {
|
||||
delete process.env.ENABLE_EXPERIMENTAL_COREPACK;
|
||||
|
||||
if (loginApiServer) {
|
||||
// Stop mock server
|
||||
loginApiServer.close();
|
||||
@@ -349,6 +352,11 @@ test.after.always(async () => {
|
||||
// Remove config directory entirely
|
||||
tmpDir.removeCallback();
|
||||
}
|
||||
|
||||
if (tmpFixturesDir) {
|
||||
console.log('removing tmpFixturesDir', tmpFixturesDir);
|
||||
fs.removeSync(tmpFixturesDir);
|
||||
}
|
||||
});
|
||||
|
||||
test('default command should prompt login with empty auth.json', async t => {
|
||||
@@ -390,6 +398,99 @@ test('login', async t => {
|
||||
t.is(auth.token, token);
|
||||
});
|
||||
|
||||
test('[vc build] should build project with corepack and select npm@8.1.0', async t => {
|
||||
process.env.ENABLE_EXPERIMENTAL_COREPACK = '1';
|
||||
const directory = fixture('vc-build-corepack-npm');
|
||||
const before = await _execa('npm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
const output = await execute(['build'], { cwd: directory });
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
t.regex(output.stderr, /Build Completed/gm);
|
||||
const after = await _execa('npm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
// Ensure global npm didn't change
|
||||
t.is(before.stdout, after.stdout);
|
||||
// Ensure version is correct
|
||||
t.is(
|
||||
await fs.readFile(
|
||||
path.join(directory, '.vercel/output/static/index.txt'),
|
||||
'utf8'
|
||||
),
|
||||
'8.1.0\n'
|
||||
);
|
||||
// Ensure corepack will be cached
|
||||
const contents = fs.readdirSync(
|
||||
path.join(directory, '.vercel/cache/corepack')
|
||||
);
|
||||
t.deepEqual(contents, ['home', 'shim']);
|
||||
});
|
||||
|
||||
test('[vc build] should build project with corepack and select pnpm@7.1.0', async t => {
|
||||
process.env.ENABLE_EXPERIMENTAL_COREPACK = '1';
|
||||
const directory = fixture('vc-build-corepack-pnpm');
|
||||
const before = await _execa('pnpm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
const output = await execute(['build'], { cwd: directory });
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
t.regex(output.stderr, /Build Completed/gm);
|
||||
const after = await _execa('pnpm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
// Ensure global pnpm didn't change
|
||||
t.is(before.stdout, after.stdout);
|
||||
// Ensure version is correct
|
||||
t.is(
|
||||
await fs.readFile(
|
||||
path.join(directory, '.vercel/output/static/index.txt'),
|
||||
'utf8'
|
||||
),
|
||||
'7.1.0\n'
|
||||
);
|
||||
// Ensure corepack will be cached
|
||||
const contents = fs.readdirSync(
|
||||
path.join(directory, '.vercel/cache/corepack')
|
||||
);
|
||||
t.deepEqual(contents, ['home', 'shim']);
|
||||
});
|
||||
|
||||
test('[vc build] should build project with corepack and select yarn@2.4.3', async t => {
|
||||
process.env.ENABLE_EXPERIMENTAL_COREPACK = '1';
|
||||
const directory = fixture('vc-build-corepack-yarn');
|
||||
const before = await _execa('yarn', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
const output = await execute(['build'], { cwd: directory });
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
t.regex(output.stderr, /Build Completed/gm);
|
||||
const after = await _execa('yarn', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
// Ensure global yarn didn't change
|
||||
t.is(before.stdout, after.stdout);
|
||||
// Ensure version is correct
|
||||
t.is(
|
||||
await fs.readFile(
|
||||
path.join(directory, '.vercel/output/static/index.txt'),
|
||||
'utf8'
|
||||
),
|
||||
'2.4.3\n'
|
||||
);
|
||||
// Ensure corepack will be cached
|
||||
const contents = fs.readdirSync(
|
||||
path.join(directory, '.vercel/cache/corepack')
|
||||
);
|
||||
t.deepEqual(contents, ['home', 'shim']);
|
||||
});
|
||||
|
||||
test('default command should deploy directory', async t => {
|
||||
const projectDir = fixture('deploy-default-with-sub-directory');
|
||||
const target = 'output';
|
||||
@@ -1507,7 +1608,7 @@ test('try to purchase a domain', async t => {
|
||||
|
||||
const { stderr, stdout, exitCode } = await execa(
|
||||
binaryPath,
|
||||
['domains', 'buy', `${session}-test.org`, ...defaultArgs],
|
||||
['domains', 'buy', `${session}-test.com`, ...defaultArgs],
|
||||
{
|
||||
reject: false,
|
||||
input: stream,
|
||||
@@ -1522,10 +1623,9 @@ test('try to purchase a domain', async t => {
|
||||
console.log(exitCode);
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes(
|
||||
`Error! Could not purchase domain. Please add a payment method using \`vercel billing add\`.`
|
||||
)
|
||||
t.regex(
|
||||
stderr,
|
||||
/Error! Could not purchase domain\. Please add a payment method using/
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1537,7 +1637,7 @@ test('try to transfer-in a domain with "--code" option', async t => {
|
||||
'transfer-in',
|
||||
'--code',
|
||||
'xyz',
|
||||
`${session}-test.org`,
|
||||
`${session}-test.com`,
|
||||
...defaultArgs,
|
||||
],
|
||||
{
|
||||
@@ -1551,7 +1651,7 @@ test('try to transfer-in a domain with "--code" option', async t => {
|
||||
|
||||
t.true(
|
||||
stderr.includes(
|
||||
`Error! The domain "${session}-test.org" is not transferable.`
|
||||
`Error! The domain "${session}-test.com" is not transferable.`
|
||||
)
|
||||
);
|
||||
t.is(exitCode, 1);
|
||||
|
||||
374
packages/cli/test/unit/commands/build.test.ts
Normal file
374
packages/cli/test/unit/commands/build.test.ts
Normal file
@@ -0,0 +1,374 @@
|
||||
import ms from 'ms';
|
||||
import fs from 'fs-extra';
|
||||
import { join } from 'path';
|
||||
import build from '../../../src/commands/build';
|
||||
import { client } from '../../mocks/client';
|
||||
import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { useTeams } from '../../mocks/team';
|
||||
import { useUser } from '../../mocks/user';
|
||||
|
||||
jest.setTimeout(ms('1 minute'));
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/build', name);
|
||||
|
||||
describe('build', () => {
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
it('should build with `@vercel/static`', async () => {
|
||||
const cwd = fixture('static');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/static" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
src: '**',
|
||||
use: '@vercel/static',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory contains static files
|
||||
const files = await fs.readdir(join(output, 'static'));
|
||||
expect(files.sort()).toEqual(['index.html']);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should build with `@vercel/node`', async () => {
|
||||
const cwd = fixture('node');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/node" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/es6.js',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/index.js',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/mjs.mjs',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/typescript.ts',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory is empty
|
||||
const hasStaticFiles = await fs.pathExists(join(output, 'static'));
|
||||
expect(
|
||||
hasStaticFiles,
|
||||
'Expected ".vercel/output/static" to not exist'
|
||||
).toEqual(false);
|
||||
|
||||
// "functions/api" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions/api'));
|
||||
expect(functions.sort()).toEqual([
|
||||
'es6.func',
|
||||
'index.func',
|
||||
'mjs.func',
|
||||
'typescript.func',
|
||||
]);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should build with 3rd party Builder', async () => {
|
||||
const cwd = fixture('third-party-builder');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/node" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: 'txt-builder',
|
||||
apiVersion: 3,
|
||||
use: 'txt-builder@0.0.0',
|
||||
src: 'api/foo.txt',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
functions: {
|
||||
'api/*.txt': {
|
||||
runtime: 'txt-builder@0.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
use: '@vercel/static',
|
||||
src: '!{api/**,package.json,middleware.[jt]s}',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory is empty
|
||||
const hasStaticFiles = await fs.pathExists(join(output, 'static'));
|
||||
expect(
|
||||
hasStaticFiles,
|
||||
'Expected ".vercel/output/static" to not exist'
|
||||
).toEqual(false);
|
||||
|
||||
// "functions/api" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions/api'));
|
||||
expect(functions.sort()).toEqual(['foo.func']);
|
||||
|
||||
const vcConfig = await fs.readJSON(
|
||||
join(output, 'functions/api/foo.func/.vc-config.json')
|
||||
);
|
||||
expect(vcConfig).toMatchObject({
|
||||
handler: 'api/foo.txt',
|
||||
runtime: 'provided',
|
||||
environment: {},
|
||||
});
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should serialize `EdgeFunction` output in version 3 Builder', async () => {
|
||||
const cwd = fixture('edge-function');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
client.setArgv('build', '--prod');
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/node" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'production',
|
||||
builds: [
|
||||
{
|
||||
require: 'edge-function',
|
||||
apiVersion: 3,
|
||||
use: 'edge-function@0.0.0',
|
||||
src: 'api/edge.js',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
functions: {
|
||||
'api/*.js': {
|
||||
runtime: 'edge-function@0.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
use: '@vercel/static',
|
||||
src: '!{api/**,package.json,middleware.[jt]s}',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory is empty
|
||||
const hasStaticFiles = await fs.pathExists(join(output, 'static'));
|
||||
expect(
|
||||
hasStaticFiles,
|
||||
'Expected ".vercel/output/static" to not exist'
|
||||
).toEqual(false);
|
||||
|
||||
// "functions/api" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions/api'));
|
||||
expect(functions.sort()).toEqual(['edge.func']);
|
||||
|
||||
const vcConfig = await fs.readJSON(
|
||||
join(output, 'functions/api/edge.func/.vc-config.json')
|
||||
);
|
||||
expect(vcConfig).toMatchObject({
|
||||
runtime: 'edge',
|
||||
name: 'api/edge.js',
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: 'api/edge.js',
|
||||
});
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should pull "preview" env vars by default', async () => {
|
||||
const cwd = fixture('static-pull');
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'vercel-pull-next',
|
||||
name: 'vercel-pull-next',
|
||||
});
|
||||
const envFilePath = join(cwd, '.vercel', '.env.preview.local');
|
||||
const projectJsonPath = join(cwd, '.vercel', 'project.json');
|
||||
const originalProjectJson = await fs.readJSON(
|
||||
join(cwd, '.vercel/project.json')
|
||||
);
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
client.setArgv('build', '--yes');
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const previewEnv = await fs.readFile(envFilePath, 'utf8');
|
||||
const envFileHasPreviewEnv = previewEnv.includes(
|
||||
'REDIS_CONNECTION_STRING'
|
||||
);
|
||||
expect(envFileHasPreviewEnv).toBeTruthy();
|
||||
} finally {
|
||||
await fs.remove(envFilePath);
|
||||
await fs.writeJSON(projectJsonPath, originalProjectJson, { spaces: 2 });
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should pull "production" env vars with `--prod`', async () => {
|
||||
const cwd = fixture('static-pull');
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'vercel-pull-next',
|
||||
name: 'vercel-pull-next',
|
||||
});
|
||||
const envFilePath = join(cwd, '.vercel', '.env.production.local');
|
||||
const projectJsonPath = join(cwd, '.vercel', 'project.json');
|
||||
const originalProjectJson = await fs.readJSON(
|
||||
join(cwd, '.vercel/project.json')
|
||||
);
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
client.setArgv('build', '--yes', '--prod');
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const prodEnv = await fs.readFile(envFilePath, 'utf8');
|
||||
const envFileHasProductionEnv1 = prodEnv.includes(
|
||||
'REDIS_CONNECTION_STRING'
|
||||
);
|
||||
expect(envFileHasProductionEnv1).toBeTruthy();
|
||||
const envFileHasProductionEnv2 = prodEnv.includes(
|
||||
'SQL_CONNECTION_STRING'
|
||||
);
|
||||
expect(envFileHasProductionEnv2).toBeTruthy();
|
||||
} finally {
|
||||
await fs.remove(envFilePath);
|
||||
await fs.writeJSON(projectJsonPath, originalProjectJson, { spaces: 2 });
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should build root-level `middleware.js` and exclude from static files', async () => {
|
||||
const cwd = fixture('middleware');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/static" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'middleware.js',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
middleware: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
use: '@vercel/static',
|
||||
src: '!{api/**,package.json,middleware.[jt]s}',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// `config.json` includes the "middlewarePath" route
|
||||
const config = await fs.readJSON(join(output, 'config.json'));
|
||||
expect(config).toMatchObject({
|
||||
version: 3,
|
||||
routes: [
|
||||
{ src: '/.*', middlewarePath: 'middleware', continue: true },
|
||||
{ handle: 'filesystem' },
|
||||
{ src: '^/api(/.*)?$', status: 404 },
|
||||
{ handle: 'error' },
|
||||
{ status: 404, src: '^(?!/api).*$', dest: '/404.html' },
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory contains `index.html`, but *not* `middleware.js`
|
||||
const staticFiles = await fs.readdir(join(output, 'static'));
|
||||
expect(staticFiles.sort()).toEqual(['index.html']);
|
||||
|
||||
// "functions" directory contains `middleware.func`
|
||||
const functions = await fs.readdir(join(output, 'functions'));
|
||||
expect(functions.sort()).toEqual(['middleware.func']);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
import vercelNextPkg from '@vercel/next/package.json';
|
||||
import vercelNodePkg from '@vercel/node/package.json';
|
||||
|
||||
jest.setTimeout(ms('20 seconds'));
|
||||
jest.setTimeout(ms('30 seconds'));
|
||||
|
||||
describe('importBuilders()', () => {
|
||||
it('should import built-in Builders', async () => {
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"moduleResolution": "node",
|
||||
"module": "commonjs",
|
||||
"target": "es2019",
|
||||
"target": "ES2020",
|
||||
"esModuleInterop": true,
|
||||
"allowJs": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"resolveJsonModule": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "11.0.4",
|
||||
"version": "12.0.2-canary.0",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -20,7 +20,7 @@
|
||||
"test-unit": "yarn test tests/unit.*test.*"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
"node": ">= 14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/async-retry": "1.4.1",
|
||||
@@ -42,7 +42,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "3.1.1",
|
||||
"@vercel/build-utils": "4.1.1-canary.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"outDir": "dist",
|
||||
@@ -12,7 +12,7 @@
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"strict": true,
|
||||
"target": "es2019"
|
||||
"target": "ES2020"
|
||||
},
|
||||
"include": ["./src"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "1.0.0",
|
||||
"version": "1.0.1",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "1.13.3",
|
||||
"@vercel/routing-utils": "1.13.4",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
@@ -13,7 +13,7 @@
|
||||
"outDir": "./dist",
|
||||
"types": ["node", "jest"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
"target": "ES2020"
|
||||
},
|
||||
"include": ["src/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "1.4.4",
|
||||
"version": "2.0.2-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "3.1.1",
|
||||
"@vercel/build-utils": "4.1.1-canary.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
@@ -13,6 +13,6 @@
|
||||
"noImplicitThis": false,
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "es2018"
|
||||
"target": "ES2020"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "2.9.0",
|
||||
"version": "3.0.2-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -45,9 +45,9 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "3.1.1",
|
||||
"@vercel/build-utils": "4.1.1-canary.0",
|
||||
"@vercel/nft": "0.19.1",
|
||||
"@vercel/routing-utils": "1.13.3",
|
||||
"@vercel/routing-utils": "1.13.4",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
"cheerio": "1.0.0-rc.10",
|
||||
|
||||
@@ -2,23 +2,6 @@ const KIB = 1024;
|
||||
const MIB = 1024 * KIB;
|
||||
|
||||
/**
|
||||
* The limit after compression. it has to be kibibyte instead of kilobyte
|
||||
* See https://github.com/cloudflare/wrangler/blob/8907b12add3d70ee21ac597b69cd66f6807571f4/src/wranglerjs/output.rs#L44
|
||||
* The maximum size of a *compressed* edge function.
|
||||
*/
|
||||
const EDGE_FUNCTION_SCRIPT_SIZE_LIMIT = MIB;
|
||||
|
||||
/**
|
||||
* This safety buffer must cover the size of our whole runtime layer compressed
|
||||
* plus some extra space to allow it to grow in the future. At the time of
|
||||
* writing this comment the compressed size size is ~7KiB so 20KiB should
|
||||
* be more than enough.
|
||||
*/
|
||||
const EDGE_FUNCTION_SCRIPT_SIZE_BUFFER = 20 * KIB;
|
||||
|
||||
/**
|
||||
* The max size we allow for compressed user code is the compressed script
|
||||
* limit minus the compressed safety buffer. We must check this limit after
|
||||
* compressing the user code.
|
||||
*/
|
||||
export const EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT =
|
||||
EDGE_FUNCTION_SCRIPT_SIZE_LIMIT - EDGE_FUNCTION_SCRIPT_SIZE_BUFFER;
|
||||
export const EDGE_FUNCTION_SIZE_LIMIT = MIB;
|
||||
|
||||
@@ -3,7 +3,7 @@ import { readFile } from 'fs-extra';
|
||||
import { ConcatSource, Source } from 'webpack-sources';
|
||||
import { fileToSource, raw, sourcemapped } from '../sourcemapped';
|
||||
import { join } from 'path';
|
||||
import { EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT } from './constants';
|
||||
import { EDGE_FUNCTION_SIZE_LIMIT } from './constants';
|
||||
import zlib from 'zlib';
|
||||
import { promisify } from 'util';
|
||||
import bytes from 'pretty-bytes';
|
||||
@@ -74,11 +74,11 @@ function getWasmImportStatements(wasm: { name: string }[] = []) {
|
||||
|
||||
async function validateScript(content: string) {
|
||||
const gzipped = await gzip(content);
|
||||
if (gzipped.length > EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT) {
|
||||
if (gzipped.length > EDGE_FUNCTION_SIZE_LIMIT) {
|
||||
throw new Error(
|
||||
`Exceeds maximum edge function script size: ${bytes(
|
||||
gzipped.length
|
||||
)} / ${bytes(EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT)}`
|
||||
)} / ${bytes(EDGE_FUNCTION_SIZE_LIMIT)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,6 +78,7 @@ import {
|
||||
updateRouteSrc,
|
||||
validateEntrypoint,
|
||||
} from './utils';
|
||||
import assert from 'assert';
|
||||
|
||||
export const version = 2;
|
||||
export const htmlContentType = 'text/html; charset=utf-8';
|
||||
@@ -131,10 +132,11 @@ function getRealNextVersion(entryPath: string): string | false {
|
||||
// First try to resolve the `next` dependency and get the real version from its
|
||||
// package.json. This allows the builder to be used with frameworks like Blitz that
|
||||
// bundle Next but where Next isn't in the project root's package.json
|
||||
const nextVersion: string = require(resolveFrom(
|
||||
entryPath,
|
||||
'next/package.json'
|
||||
)).version;
|
||||
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
const nextVersion: string = eval('require')(
|
||||
resolveFrom(entryPath, 'next/package.json')
|
||||
).version;
|
||||
console.log(`Detected Next.js version: ${nextVersion}`);
|
||||
return nextVersion;
|
||||
} catch (_ignored) {
|
||||
@@ -1292,6 +1294,7 @@ export const build: BuildV2 = async ({
|
||||
entryPath,
|
||||
baseDir,
|
||||
dataRoutes,
|
||||
buildId,
|
||||
escapedBuildId,
|
||||
outputDirectory,
|
||||
trailingSlashRedirects,
|
||||
@@ -2596,8 +2599,17 @@ async function getServerlessPages(params: {
|
||||
|
||||
// Edge Functions do not consider as Serverless Functions
|
||||
for (const edgeFunctionFile of Object.keys(
|
||||
middlewareManifest?.middleware ?? {}
|
||||
middlewareManifest?.functions ?? {}
|
||||
)) {
|
||||
// `getStaticProps` are expecting `Prerender` output which is a Serverless function
|
||||
// and not an Edge Function. Therefore we only remove API endpoints for now, as they
|
||||
// don't have `getStaticProps`.
|
||||
//
|
||||
// Context: https://github.com/vercel/vercel/pull/7905#discussion_r890213165
|
||||
assert(
|
||||
edgeFunctionFile.startsWith('/api/'),
|
||||
`Only API endpoints are currently supported for Edge endpoints.`
|
||||
);
|
||||
delete pages[edgeFunctionFile.slice(1) + '.js'];
|
||||
}
|
||||
|
||||
|
||||
@@ -54,6 +54,8 @@ import prettyBytes from 'pretty-bytes';
|
||||
|
||||
// related PR: https://github.com/vercel/next.js/pull/30046
|
||||
const CORRECT_NOT_FOUND_ROUTES_VERSION = 'v12.0.1';
|
||||
const CORRECT_MIDDLEWARE_ORDER_VERSION = 'v12.1.7-canary.29';
|
||||
const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
|
||||
|
||||
export async function serverBuild({
|
||||
dynamicPages,
|
||||
@@ -64,6 +66,7 @@ export async function serverBuild({
|
||||
workPath,
|
||||
entryPath,
|
||||
nodeVersion,
|
||||
buildId,
|
||||
escapedBuildId,
|
||||
dynamicPrefix,
|
||||
entryDirectory,
|
||||
@@ -100,6 +103,7 @@ export async function serverBuild({
|
||||
privateOutputs: { files: Files; routes: Route[] };
|
||||
entryPath: string;
|
||||
dynamicPrefix: string;
|
||||
buildId: string;
|
||||
escapedBuildId: string;
|
||||
wildcardConfig: BuildResult['wildcard'];
|
||||
nodeVersion: NodeVersion;
|
||||
@@ -131,6 +135,10 @@ export async function serverBuild({
|
||||
nextVersion,
|
||||
CORRECT_NOT_FOUND_ROUTES_VERSION
|
||||
);
|
||||
const isCorrectMiddlewareOrder = semver.gte(
|
||||
nextVersion,
|
||||
CORRECT_MIDDLEWARE_ORDER_VERSION
|
||||
);
|
||||
let hasStatic500 = !!staticPages[path.join(entryDirectory, '500')];
|
||||
|
||||
if (lambdaPageKeys.length === 0) {
|
||||
@@ -788,8 +796,13 @@ export async function serverBuild({
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
routesManifest,
|
||||
isCorrectMiddlewareOrder,
|
||||
});
|
||||
|
||||
const isNextDataServerResolving =
|
||||
middleware.staticRoutes.length > 0 &&
|
||||
semver.gte(nextVersion, NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION);
|
||||
|
||||
const dynamicRoutes = await getDynamicRoutes(
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
@@ -870,6 +883,53 @@ export async function serverBuild({
|
||||
}
|
||||
}
|
||||
|
||||
const normalizeNextDataRoute = isNextDataServerResolving
|
||||
? [
|
||||
// strip _next/data prefix for resolving
|
||||
{
|
||||
src: `^${path.join(
|
||||
'/',
|
||||
entryDirectory,
|
||||
'/_next/data/',
|
||||
escapedBuildId,
|
||||
'/(.*).json'
|
||||
)}`,
|
||||
dest: `${path.join('/', entryDirectory, '/$1')}`,
|
||||
continue: true,
|
||||
override: true,
|
||||
has: [
|
||||
{
|
||||
type: 'header',
|
||||
key: 'x-nextjs-data',
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
: [];
|
||||
|
||||
const denormalizeNextDataRoute = isNextDataServerResolving
|
||||
? [
|
||||
{
|
||||
src: '/(.*)',
|
||||
has: [
|
||||
{
|
||||
type: 'header',
|
||||
key: 'x-nextjs-data',
|
||||
},
|
||||
],
|
||||
dest: `${path.join(
|
||||
'/',
|
||||
entryDirectory,
|
||||
'/_next/data/',
|
||||
buildId,
|
||||
'/$1.json'
|
||||
)}`,
|
||||
continue: true,
|
||||
override: true,
|
||||
},
|
||||
]
|
||||
: [];
|
||||
|
||||
return {
|
||||
wildcard: wildcardConfig,
|
||||
images:
|
||||
@@ -894,6 +954,15 @@ export async function serverBuild({
|
||||
...staticDirectoryFiles,
|
||||
...privateOutputs.files,
|
||||
...middleware.edgeFunctions,
|
||||
...(isNextDataServerResolving
|
||||
? {
|
||||
__next_data_catchall: new FileBlob({
|
||||
contentType: 'application/json',
|
||||
mode: 0o644,
|
||||
data: '{}',
|
||||
}),
|
||||
}
|
||||
: {}),
|
||||
},
|
||||
routes: [
|
||||
/*
|
||||
@@ -913,6 +982,9 @@ export async function serverBuild({
|
||||
|
||||
...privateOutputs.routes,
|
||||
|
||||
// normalize _next/data URL before processing redirects
|
||||
...normalizeNextDataRoute,
|
||||
|
||||
...(i18n
|
||||
? [
|
||||
// Handle auto-adding current default locale to path based on
|
||||
@@ -1025,6 +1097,10 @@ export async function serverBuild({
|
||||
|
||||
...redirects,
|
||||
|
||||
// middleware comes directly after redirects but before
|
||||
// beforeFiles rewrites as middleware is not a "file" route
|
||||
...(isCorrectMiddlewareOrder ? middleware.staticRoutes : []),
|
||||
|
||||
...beforeFilesRewrites,
|
||||
|
||||
// Make sure to 404 for the /404 path itself
|
||||
@@ -1067,7 +1143,13 @@ export async function serverBuild({
|
||||
},
|
||||
]),
|
||||
|
||||
...middleware.staticRoutes,
|
||||
// we need to undo _next/data normalize before checking filesystem
|
||||
...denormalizeNextDataRoute,
|
||||
|
||||
// while middleware was in beta the order came right before
|
||||
// handle: 'filesystem' we maintain this for older versions
|
||||
// to prevent a local/deploy mismatch
|
||||
...(!isCorrectMiddlewareOrder ? middleware.staticRoutes : []),
|
||||
|
||||
// Next.js page lambdas, `static/` folder, reserved assets, and `public/`
|
||||
// folder
|
||||
@@ -1085,13 +1167,20 @@ export async function serverBuild({
|
||||
]
|
||||
: []),
|
||||
|
||||
// No-op _next/data rewrite to trigger handle: 'rewrites' and then 404
|
||||
// if no match to prevent rewriting _next/data unexpectedly
|
||||
{
|
||||
src: path.join('/', entryDirectory, '_next/data/(.*)'),
|
||||
dest: path.join('/', entryDirectory, '_next/data/$1'),
|
||||
check: true,
|
||||
},
|
||||
// normalize _next/data URL before processing rewrites
|
||||
...normalizeNextDataRoute,
|
||||
|
||||
...(!isNextDataServerResolving
|
||||
? [
|
||||
// No-op _next/data rewrite to trigger handle: 'rewrites' and then 404
|
||||
// if no match to prevent rewriting _next/data unexpectedly
|
||||
{
|
||||
src: path.join('/', entryDirectory, '_next/data/(.*)'),
|
||||
dest: path.join('/', entryDirectory, '_next/data/$1'),
|
||||
check: true,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
|
||||
// These need to come before handle: miss or else they are grouped
|
||||
// with that routing section
|
||||
@@ -1150,21 +1239,45 @@ export async function serverBuild({
|
||||
// if there no rewrites
|
||||
{ handle: 'rewrite' },
|
||||
|
||||
// re-build /_next/data URL after resolving
|
||||
...denormalizeNextDataRoute,
|
||||
|
||||
// /_next/data routes for getServerProps/getStaticProps pages
|
||||
...dataRoutes,
|
||||
|
||||
// ensure we 404 for non-existent _next/data routes before
|
||||
// trying page dynamic routes
|
||||
{
|
||||
src: path.join('/', entryDirectory, '_next/data/(.*)'),
|
||||
dest: path.join('/', entryDirectory, '404'),
|
||||
status: 404,
|
||||
},
|
||||
...(!isNextDataServerResolving
|
||||
? [
|
||||
// ensure we 404 for non-existent _next/data routes before
|
||||
// trying page dynamic routes
|
||||
{
|
||||
src: path.join('/', entryDirectory, '_next/data/(.*)'),
|
||||
dest: path.join('/', entryDirectory, '404'),
|
||||
status: 404,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
|
||||
// Dynamic routes (must come after dataRoutes as dataRoutes are more
|
||||
// specific)
|
||||
...dynamicRoutes,
|
||||
|
||||
...(isNextDataServerResolving
|
||||
? [
|
||||
// add a catch-all data route so we don't 404 when getting
|
||||
// middleware effects
|
||||
{
|
||||
src: `^${path.join(
|
||||
'/',
|
||||
entryDirectory,
|
||||
'/_next/data/',
|
||||
escapedBuildId,
|
||||
'/(.*).json'
|
||||
)}`,
|
||||
dest: '__next_data_catchall',
|
||||
},
|
||||
]
|
||||
: []),
|
||||
|
||||
// routes to call after a file has been matched
|
||||
{ handle: 'hit' },
|
||||
// Before we handle static files we need to set proper caching headers
|
||||
|
||||
@@ -244,9 +244,7 @@ export async function getRoutesManifest(
|
||||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const routesManifest: RoutesManifest = require(pathRoutesManifest);
|
||||
|
||||
const routesManifest: RoutesManifest = await fs.readJSON(pathRoutesManifest);
|
||||
// remove temporary array based routeKeys from v1/v2 of routes
|
||||
// manifest since it can result in invalid routes
|
||||
for (const route of routesManifest.dataRoutes || []) {
|
||||
@@ -368,10 +366,10 @@ export async function getDynamicRoutes(
|
||||
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
|
||||
|
||||
try {
|
||||
({ getRouteRegex, getSortedRoutes } = require(resolveFrom(
|
||||
entryPath,
|
||||
'next-server/dist/lib/router/utils'
|
||||
)));
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
({ getRouteRegex, getSortedRoutes } = eval('require')(
|
||||
resolveFrom(entryPath, 'next-server/dist/lib/router/utils')
|
||||
));
|
||||
if (typeof getRouteRegex !== 'function') {
|
||||
getRouteRegex = undefined;
|
||||
}
|
||||
@@ -379,10 +377,10 @@ export async function getDynamicRoutes(
|
||||
|
||||
if (!getRouteRegex || !getSortedRoutes) {
|
||||
try {
|
||||
({ getRouteRegex, getSortedRoutes } = require(resolveFrom(
|
||||
entryPath,
|
||||
'next/dist/next-server/lib/router/utils'
|
||||
)));
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
({ getRouteRegex, getSortedRoutes } = eval('require')(
|
||||
resolveFrom(entryPath, 'next/dist/next-server/lib/router/utils')
|
||||
));
|
||||
if (typeof getRouteRegex !== 'function') {
|
||||
getRouteRegex = undefined;
|
||||
}
|
||||
@@ -536,9 +534,7 @@ export async function getImagesManifest(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const imagesManifest: NextImagesManifest = require(pathImagesManifest);
|
||||
return imagesManifest;
|
||||
return fs.readJson(pathImagesManifest);
|
||||
}
|
||||
|
||||
type FileMap = { [page: string]: FileFsRef };
|
||||
@@ -2145,10 +2141,12 @@ export async function getMiddlewareBundle({
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
routesManifest,
|
||||
isCorrectMiddlewareOrder,
|
||||
}: {
|
||||
entryPath: string;
|
||||
outputDirectory: string;
|
||||
routesManifest: RoutesManifest;
|
||||
isCorrectMiddlewareOrder: boolean;
|
||||
}) {
|
||||
const middlewareManifest = await getMiddlewareManifest(
|
||||
entryPath,
|
||||
@@ -2270,16 +2268,17 @@ export async function getMiddlewareBundle({
|
||||
const route: Route = {
|
||||
continue: true,
|
||||
src: worker.routeSrc,
|
||||
...(worker.type === 'middleware'
|
||||
? {
|
||||
middlewarePath: shortPath,
|
||||
override: true,
|
||||
}
|
||||
: {
|
||||
dest: shortPath,
|
||||
}),
|
||||
};
|
||||
|
||||
if (worker.type === 'function') {
|
||||
route.dest = shortPath;
|
||||
} else {
|
||||
route.middlewarePath = shortPath;
|
||||
if (isCorrectMiddlewareOrder) {
|
||||
route.override = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (routesManifest.version > 3 && isDynamicRoute(worker.page)) {
|
||||
source.dynamicRouteMap.set(worker.page, route);
|
||||
} else {
|
||||
@@ -2321,8 +2320,7 @@ export async function getMiddlewareManifest(
|
||||
return;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
return require(middlewareManifestPath);
|
||||
return fs.readJSON(middlewareManifestPath);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -4,6 +4,8 @@ const cheerio = require('cheerio');
|
||||
const { check, deployAndTest } = require('../../utils');
|
||||
const fetch = require('../../../../../test/lib/deployment/fetch-retry');
|
||||
|
||||
const ABSOLUTE_URL_PATTERN = /^https?:\/\//i;
|
||||
|
||||
async function checkForChange(url, initialValue, hardError) {
|
||||
return check(
|
||||
async () => {
|
||||
@@ -32,6 +34,13 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should deploy and pass probe checks', async () => {
|
||||
const info = await deployAndTest(__dirname);
|
||||
Object.assign(ctx, info);
|
||||
|
||||
if (!ABSOLUTE_URL_PATTERN.test(ctx.deploymentUrl)) {
|
||||
const details = JSON.stringify(ctx);
|
||||
throw new Error(
|
||||
`Deployment did not result in an absolute deploymentUrl: ${details}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should revalidate content properly from /', async () => {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user