mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
28 Commits
vercel@27.
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3d3774ee7e | ||
|
|
50f8eec7cb | ||
|
|
45374e2f90 | ||
|
|
fd9142b6f3 | ||
|
|
8cf67b549b | ||
|
|
5dc6f48e44 | ||
|
|
66c8544e8f | ||
|
|
0140db38fa | ||
|
|
e5421c27e8 | ||
|
|
5afc527233 | ||
|
|
de9518b010 | ||
|
|
c322d1dbba | ||
|
|
18c19ead76 | ||
|
|
9d80c27382 | ||
|
|
bef1aec766 | ||
|
|
4f4a42813f | ||
|
|
181a492d91 | ||
|
|
1be7a80bb8 | ||
|
|
0428d4744e | ||
|
|
2a929a4bb9 | ||
|
|
accd308dc5 | ||
|
|
e2d4efab08 | ||
|
|
7e0dd6f808 | ||
|
|
8971e02e49 | ||
|
|
10c91c8579 | ||
|
|
bfdbe58675 | ||
|
|
7bdaf107b7 | ||
|
|
8de100f0e1 |
@@ -1,4 +1,5 @@
|
||||
# https://prettier.io/docs/en/ignore.html
|
||||
|
||||
# ignore this file with an intentional syntax error
|
||||
# ignore these files with an intentional syntax error
|
||||
packages/cli/test/dev/fixtures/edge-function-error/api/edge-error-syntax.js
|
||||
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"dependencies": {
|
||||
"@sentry/node": "5.11.1",
|
||||
"got": "10.2.1",
|
||||
"node-fetch": "2.6.1",
|
||||
"node-fetch": "2.6.7",
|
||||
"parse-github-url": "1.0.2",
|
||||
"tar-fs": "2.0.0",
|
||||
"unzip-stream": "0.3.0"
|
||||
|
||||
@@ -26,12 +26,12 @@
|
||||
"jest": "28.0.2",
|
||||
"json5": "2.1.1",
|
||||
"lint-staged": "9.2.5",
|
||||
"node-fetch": "2.6.1",
|
||||
"node-fetch": "2.6.7",
|
||||
"npm-package-arg": "6.1.0",
|
||||
"prettier": "2.6.2",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "28.0.5",
|
||||
"turbo": "1.3.1"
|
||||
"turbo": "1.3.2-canary.1"
|
||||
},
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "5.0.2",
|
||||
"version": "5.0.4",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -44,7 +44,7 @@
|
||||
"js-yaml": "3.13.1",
|
||||
"minimatch": "3.0.4",
|
||||
"multistream": "2.1.1",
|
||||
"node-fetch": "2.6.1",
|
||||
"node-fetch": "2.6.7",
|
||||
"semver": "6.1.1",
|
||||
"typescript": "4.3.4",
|
||||
"yazl": "2.5.1"
|
||||
|
||||
@@ -33,9 +33,6 @@ function getHint(isAuto = false) {
|
||||
: `Please set "engines": { "node": "${range}" } in your \`package.json\` file to use Node.js ${major}.`;
|
||||
}
|
||||
|
||||
const upstreamProvider =
|
||||
'This change is the result of a decision made by an upstream infrastructure provider (AWS).';
|
||||
|
||||
export function getLatestNodeVersion() {
|
||||
return allOptions[0];
|
||||
}
|
||||
@@ -75,7 +72,7 @@ export async function getSupportedNodeVersion(
|
||||
throw new NowBuildError({
|
||||
code: 'BUILD_UTILS_NODE_VERSION_DISCONTINUED',
|
||||
link: 'http://vercel.link/node-version',
|
||||
message: `${intro} ${getHint(isAuto)} ${upstreamProvider}`,
|
||||
message: `${intro} ${getHint(isAuto)}`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -86,9 +83,9 @@ export async function getSupportedNodeVersion(
|
||||
console.warn(
|
||||
`Error: Node.js version ${
|
||||
selection.range
|
||||
} is deprecated. Deployments created on or after ${d} will fail to build. ${getHint(
|
||||
} has reached End-of-Life. Deployments created on or after ${d} will fail to build. ${getHint(
|
||||
isAuto
|
||||
)} ${upstreamProvider}`
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
8
packages/build-utils/test/unit.test.ts
vendored
8
packages/build-utils/test/unit.test.ts
vendored
@@ -387,10 +387,10 @@ it('should warn for deprecated versions, soon to be discontinued', async () => {
|
||||
12
|
||||
);
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
|
||||
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
|
||||
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
|
||||
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
|
||||
]);
|
||||
|
||||
global.Date.now = realDateNow;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "27.1.1",
|
||||
"version": "27.2.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -42,16 +42,16 @@
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.0.2",
|
||||
"@vercel/go": "2.0.6",
|
||||
"@vercel/hydrogen": "0.0.3",
|
||||
"@vercel/next": "3.1.5",
|
||||
"@vercel/node": "2.4.3",
|
||||
"@vercel/python": "3.0.6",
|
||||
"@vercel/redwood": "1.0.7",
|
||||
"@vercel/remix": "1.0.8",
|
||||
"@vercel/ruby": "1.3.14",
|
||||
"@vercel/static-build": "1.0.6",
|
||||
"@vercel/build-utils": "5.0.4",
|
||||
"@vercel/go": "2.0.8",
|
||||
"@vercel/hydrogen": "0.0.5",
|
||||
"@vercel/next": "3.1.8",
|
||||
"@vercel/node": "2.4.5",
|
||||
"@vercel/python": "3.1.0",
|
||||
"@vercel/redwood": "1.0.9",
|
||||
"@vercel/remix": "1.0.10",
|
||||
"@vercel/ruby": "1.3.16",
|
||||
"@vercel/static-build": "1.0.9",
|
||||
"update-notifier": "5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -96,11 +96,11 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "12.1.1",
|
||||
"@vercel/frameworks": "1.1.0",
|
||||
"@vercel/fs-detectors": "2.0.0",
|
||||
"@vercel/client": "12.1.3",
|
||||
"@vercel/frameworks": "1.1.1",
|
||||
"@vercel/fs-detectors": "2.0.1",
|
||||
"@vercel/fun": "1.0.4",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
"ajv": "6.12.2",
|
||||
"alpha-sort": "2.0.1",
|
||||
@@ -147,7 +147,7 @@
|
||||
"minimatch": "3.0.4",
|
||||
"mri": "1.1.5",
|
||||
"ms": "2.1.2",
|
||||
"node-fetch": "2.6.1",
|
||||
"node-fetch": "2.6.7",
|
||||
"npm-package-arg": "6.1.0",
|
||||
"open": "8.4.0",
|
||||
"ora": "3.4.0",
|
||||
|
||||
@@ -56,11 +56,11 @@ async function main() {
|
||||
args.push('src/index.ts');
|
||||
await execa('yarn', args, { stdio: 'inherit', cwd: dirRoot });
|
||||
|
||||
// `ncc` has some issues with `@zeit/fun`'s runtime files:
|
||||
// `ncc` has some issues with `@vercel/fun`'s runtime files:
|
||||
// - Executable bits on the `bootstrap` files appear to be lost:
|
||||
// https://github.com/zeit/ncc/pull/182
|
||||
// https://github.com/vercel/ncc/pull/182
|
||||
// - The `bootstrap.js` asset does not get copied into the output dir:
|
||||
// https://github.com/zeit/ncc/issues/278
|
||||
// https://github.com/vercel/ncc/issues/278
|
||||
//
|
||||
// Aside from those issues, all the same files from the `runtimes` directory
|
||||
// should be copied into the output runtimes dir, specifically the `index.js`
|
||||
@@ -70,7 +70,7 @@ async function main() {
|
||||
// with `fun`'s cache invalidation mechanism and they need to be shasum'd.
|
||||
const runtimes = join(
|
||||
dirRoot,
|
||||
'../../node_modules/@zeit/fun/dist/src/runtimes'
|
||||
'../../node_modules/@vercel/fun/dist/src/runtimes'
|
||||
);
|
||||
await cpy('**/*', join(distRoot, 'runtimes'), {
|
||||
parents: true,
|
||||
|
||||
@@ -25,7 +25,7 @@ import {
|
||||
MergeRoutesProps,
|
||||
Route,
|
||||
} from '@vercel/routing-utils';
|
||||
import { VercelConfig } from '@vercel/client';
|
||||
import type { VercelConfig } from '@vercel/client';
|
||||
|
||||
import pull from './pull';
|
||||
import { staticFiles as getFiles } from '../util/get-files';
|
||||
@@ -36,7 +36,10 @@ import * as cli from '../util/pkg-name';
|
||||
import cliPkg from '../util/pkg';
|
||||
import readJSONFile from '../util/read-json-file';
|
||||
import { CantParseJSONFile } from '../util/errors-ts';
|
||||
import { readProjectSettings } from '../util/projects/project-settings';
|
||||
import {
|
||||
ProjectLinkAndSettings,
|
||||
readProjectSettings,
|
||||
} from '../util/projects/project-settings';
|
||||
import { VERCEL_DIR } from '../util/projects/link';
|
||||
import confirm from '../util/input/confirm';
|
||||
import { emoji, prependEmoji } from '../util/emoji';
|
||||
@@ -46,12 +49,31 @@ import {
|
||||
PathOverride,
|
||||
writeBuildResult,
|
||||
} from '../util/build/write-build-result';
|
||||
import { importBuilders, BuilderWithPkg } from '../util/build/import-builders';
|
||||
import { importBuilders } from '../util/build/import-builders';
|
||||
import { initCorepack, cleanupCorepack } from '../util/build/corepack';
|
||||
import { sortBuilders } from '../util/build/sort-builders';
|
||||
import { toEnumerableError } from '../util/error';
|
||||
|
||||
type BuildResult = BuildResultV2 | BuildResultV3;
|
||||
|
||||
interface SerializedBuilder extends Builder {
|
||||
error?: any;
|
||||
require?: string;
|
||||
requirePath?: string;
|
||||
apiVersion: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Contents of the `builds.json` file.
|
||||
*/
|
||||
interface BuildsManifest {
|
||||
'//': string;
|
||||
target: string;
|
||||
argv: string[];
|
||||
error?: any;
|
||||
builds?: SerializedBuilder[];
|
||||
}
|
||||
|
||||
const help = () => {
|
||||
return console.log(`
|
||||
${chalk.bold(`${cli.logo} ${cli.name} build`)}
|
||||
@@ -168,7 +190,6 @@ export default async function main(client: Client): Promise<number> {
|
||||
}
|
||||
|
||||
// TODO: load env vars from the API, fall back to local files if that fails
|
||||
|
||||
const envPath = await checkExists([
|
||||
join(cwd, VERCEL_DIR, `.env.${target}.local`),
|
||||
join(cwd, `.env`),
|
||||
@@ -182,6 +203,48 @@ export default async function main(client: Client): Promise<number> {
|
||||
process.env.VERCEL = '1';
|
||||
process.env.NOW_BUILDER = '1';
|
||||
|
||||
// Delete output directory from potential previous build
|
||||
const outputDir = argv['--output']
|
||||
? resolve(argv['--output'])
|
||||
: join(cwd, OUTPUT_DIR);
|
||||
await fs.remove(outputDir);
|
||||
|
||||
const buildsJson: BuildsManifest = {
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
argv: process.argv,
|
||||
};
|
||||
|
||||
try {
|
||||
return await doBuild(client, project, buildsJson, cwd, outputDir);
|
||||
} catch (err: any) {
|
||||
output.prettyError(err);
|
||||
|
||||
// Write error to `builds.json` file
|
||||
buildsJson.error = toEnumerableError(err);
|
||||
const buildsJsonPath = join(outputDir, 'builds.json');
|
||||
const configJsonPath = join(outputDir, 'config.json');
|
||||
await fs.outputJSON(buildsJsonPath, buildsJson, {
|
||||
spaces: 2,
|
||||
});
|
||||
await fs.writeJSON(configJsonPath, { version: 3 }, { spaces: 2 });
|
||||
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute the Project's builders. If this function throws an error,
|
||||
* then it will be serialized into the `builds.json` manifest file.
|
||||
*/
|
||||
async function doBuild(
|
||||
client: Client,
|
||||
project: ProjectLinkAndSettings,
|
||||
buildsJson: BuildsManifest,
|
||||
cwd: string,
|
||||
outputDir: string
|
||||
): Promise<number> {
|
||||
const { output } = client;
|
||||
const workPath = join(cwd, project.settings.rootDirectory || '.');
|
||||
|
||||
// Load `package.json` and `vercel.json` files
|
||||
@@ -199,19 +262,18 @@ export default async function main(client: Client): Promise<number> {
|
||||
normalizePath(relative(workPath, f))
|
||||
);
|
||||
|
||||
const routesResult = getTransformedRoutes({ nowConfig: vercelConfig || {} });
|
||||
const routesResult = getTransformedRoutes(vercelConfig || {});
|
||||
if (routesResult.error) {
|
||||
output.prettyError(routesResult.error);
|
||||
return 1;
|
||||
throw routesResult.error;
|
||||
}
|
||||
|
||||
if (vercelConfig?.builds && vercelConfig.functions) {
|
||||
output.prettyError({
|
||||
throw new NowBuildError({
|
||||
code: 'bad_request',
|
||||
message:
|
||||
'The `functions` property cannot be used in conjunction with the `builds` property. Please remove one of them.',
|
||||
link: 'https://vercel.link/functions-and-builds',
|
||||
});
|
||||
return 1;
|
||||
}
|
||||
|
||||
let builds = vercelConfig?.builds || [];
|
||||
@@ -229,12 +291,12 @@ export default async function main(client: Client): Promise<number> {
|
||||
const detectedBuilders = await detectBuilders(files, pkg, {
|
||||
...vercelConfig,
|
||||
projectSettings: project.settings,
|
||||
ignoreBuildScript: true,
|
||||
featHandleMiss: true,
|
||||
});
|
||||
|
||||
if (detectedBuilders.errors && detectedBuilders.errors.length > 0) {
|
||||
output.prettyError(detectedBuilders.errors[0]);
|
||||
return 1;
|
||||
throw detectedBuilders.errors[0];
|
||||
}
|
||||
|
||||
for (const w of detectedBuilders.warnings) {
|
||||
@@ -267,13 +329,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
|
||||
const builderSpecs = new Set(builds.map(b => b.use));
|
||||
|
||||
let buildersWithPkgs: Map<string, BuilderWithPkg>;
|
||||
try {
|
||||
buildersWithPkgs = await importBuilders(builderSpecs, cwd, output);
|
||||
} catch (err: any) {
|
||||
output.prettyError(err);
|
||||
return 1;
|
||||
}
|
||||
const buildersWithPkgs = await importBuilders(builderSpecs, cwd, output);
|
||||
|
||||
// Populate Files -> FileFsRef mapping
|
||||
const filesMap: Files = {};
|
||||
@@ -283,12 +339,6 @@ export default async function main(client: Client): Promise<number> {
|
||||
filesMap[path] = new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
// Delete output directory from potential previous build
|
||||
const outputDir = argv['--output']
|
||||
? resolve(argv['--output'])
|
||||
: join(cwd, OUTPUT_DIR);
|
||||
await fs.remove(outputDir);
|
||||
|
||||
const buildStamp = stamp();
|
||||
|
||||
// Create fresh new output directory
|
||||
@@ -297,32 +347,31 @@ export default async function main(client: Client): Promise<number> {
|
||||
const ops: Promise<Error | void>[] = [];
|
||||
|
||||
// Write the `detectedBuilders` result to output dir
|
||||
ops.push(
|
||||
fs.writeJSON(
|
||||
join(outputDir, 'builds.json'),
|
||||
{
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
argv: process.argv,
|
||||
builds: builds.map(build => {
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
return {
|
||||
require: builderPkg.name,
|
||||
requirePath: builderWithPkg.path,
|
||||
apiVersion: builder.version,
|
||||
...build,
|
||||
};
|
||||
}),
|
||||
},
|
||||
{
|
||||
spaces: 2,
|
||||
const buildsJsonBuilds = new Map<Builder, SerializedBuilder>(
|
||||
builds.map(build => {
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
)
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
return [
|
||||
build,
|
||||
{
|
||||
require: builderPkg.name,
|
||||
requirePath: builderWithPkg.path,
|
||||
apiVersion: builder.version,
|
||||
...build,
|
||||
},
|
||||
];
|
||||
})
|
||||
);
|
||||
buildsJson.builds = Array.from(buildsJsonBuilds.values());
|
||||
const buildsJsonPath = join(outputDir, 'builds.json');
|
||||
const writeBuildsJsonPromise = fs.writeJSON(buildsJsonPath, buildsJson, {
|
||||
spaces: 2,
|
||||
});
|
||||
|
||||
ops.push(writeBuildsJsonPromise);
|
||||
|
||||
// The `meta` config property is re-used for each Builder
|
||||
// invocation so that Builders can share state between
|
||||
@@ -347,51 +396,73 @@ export default async function main(client: Client): Promise<number> {
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
|
||||
const buildConfig: Config = {
|
||||
outputDirectory: project.settings.outputDirectory ?? undefined,
|
||||
...build.config,
|
||||
projectSettings: project.settings,
|
||||
installCommand: project.settings.installCommand ?? undefined,
|
||||
devCommand: project.settings.devCommand ?? undefined,
|
||||
buildCommand: project.settings.buildCommand ?? undefined,
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config: buildConfig,
|
||||
meta,
|
||||
};
|
||||
output.debug(
|
||||
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
|
||||
);
|
||||
const buildResult = await builder.build(buildOptions);
|
||||
try {
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
|
||||
// Store the build result to generate the final `config.json` after
|
||||
// all builds have completed
|
||||
buildResults.set(build, buildResult);
|
||||
const buildConfig: Config = {
|
||||
outputDirectory: project.settings.outputDirectory ?? undefined,
|
||||
...build.config,
|
||||
projectSettings: project.settings,
|
||||
installCommand: project.settings.installCommand ?? undefined,
|
||||
devCommand: project.settings.devCommand ?? undefined,
|
||||
buildCommand: project.settings.buildCommand ?? undefined,
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config: buildConfig,
|
||||
meta,
|
||||
};
|
||||
output.debug(
|
||||
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
|
||||
);
|
||||
const buildResult = await builder.build(buildOptions);
|
||||
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
builderPkg,
|
||||
vercelConfig?.cleanUrls
|
||||
).then(
|
||||
override => {
|
||||
if (override) overrides.push(override);
|
||||
},
|
||||
err => err
|
||||
)
|
||||
);
|
||||
// Store the build result to generate the final `config.json` after
|
||||
// all builds have completed
|
||||
buildResults.set(build, buildResult);
|
||||
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
builderPkg,
|
||||
vercelConfig?.cleanUrls
|
||||
).then(
|
||||
override => {
|
||||
if (override) overrides.push(override);
|
||||
},
|
||||
err => err
|
||||
)
|
||||
);
|
||||
} catch (err: any) {
|
||||
const writeConfigJsonPromise = fs.writeJSON(
|
||||
join(outputDir, 'config.json'),
|
||||
{ version: 3 },
|
||||
{ spaces: 2 }
|
||||
);
|
||||
|
||||
await Promise.all([writeBuildsJsonPromise, writeConfigJsonPromise]);
|
||||
|
||||
const buildJsonBuild = buildsJsonBuilds.get(build);
|
||||
if (buildJsonBuild) {
|
||||
buildJsonBuild.error = toEnumerableError(err);
|
||||
|
||||
await fs.writeJSON(buildsJsonPath, buildsJson, {
|
||||
spaces: 2,
|
||||
});
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (corepackShimDir) {
|
||||
@@ -400,15 +471,12 @@ export default async function main(client: Client): Promise<number> {
|
||||
|
||||
// Wait for filesystem operations to complete
|
||||
// TODO render progress bar?
|
||||
let hadError = false;
|
||||
const errors = await Promise.all(ops);
|
||||
for (const error of errors) {
|
||||
if (error) {
|
||||
hadError = true;
|
||||
output.prettyError(error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
if (hadError) return 1;
|
||||
|
||||
// Merge existing `config.json` file into the one that will be produced
|
||||
const configPath = join(outputDir, 'config.json');
|
||||
|
||||
@@ -95,6 +95,7 @@ export default async (client: Client) => {
|
||||
// deprecated
|
||||
'--name': String,
|
||||
'-n': '--name',
|
||||
'--no-clipboard': Boolean,
|
||||
'--target': String,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -183,6 +184,17 @@ export default async (client: Client) => {
|
||||
);
|
||||
}
|
||||
|
||||
if (argv['--no-clipboard']) {
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
`The ${param(
|
||||
'--no-clipboard'
|
||||
)} option was ignored because it is the default behavior. Please remove it.`,
|
||||
emoji('warning')
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// build `target`
|
||||
const target = parseTarget(output, argv['--target'], argv['--prod']);
|
||||
if (typeof target === 'number') {
|
||||
@@ -416,7 +428,7 @@ export default async (client: Client) => {
|
||||
parseMeta(argv['--meta'])
|
||||
);
|
||||
|
||||
const gitMetadata = await createGitMeta(path, output);
|
||||
const gitMetadata = await createGitMeta(path, output, project);
|
||||
|
||||
// Merge dotenv config, `env` from vercel.json, and `--env` / `-e` arguments
|
||||
const deploymentEnv = Object.assign(
|
||||
|
||||
21
packages/cli/src/commands/env/index.ts
vendored
21
packages/cli/src/commands/env/index.ts
vendored
@@ -1,7 +1,9 @@
|
||||
import chalk from 'chalk';
|
||||
import { ProjectEnvTarget } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import { getEnvTargetPlaceholder } from '../../util/env/env-target';
|
||||
import {
|
||||
getEnvTargetPlaceholder,
|
||||
isValidEnvTarget,
|
||||
} from '../../util/env/env-target';
|
||||
import getArgs from '../../util/get-args';
|
||||
import getInvalidSubcommand from '../../util/get-invalid-subcommand';
|
||||
import getSubcommand from '../../util/get-subcommand';
|
||||
@@ -29,6 +31,7 @@ const help = () => {
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
--environment Set the Environment (development, preview, production) when pulling Environment Variables
|
||||
-A ${chalk.bold.underline('FILE')}, --local-config=${chalk.bold.underline(
|
||||
'FILE'
|
||||
)} Path to the local ${'`vercel.json`'} file
|
||||
@@ -111,6 +114,7 @@ export default async function main(client: Client) {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--yes': Boolean,
|
||||
'-y': '--yes',
|
||||
'--environment': String,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
@@ -126,6 +130,17 @@ export default async function main(client: Client) {
|
||||
const subArgs = argv._.slice(1);
|
||||
const { subcommand, args } = getSubcommand(subArgs, COMMAND_CONFIG);
|
||||
const { output, config } = client;
|
||||
|
||||
const target = argv['--environment']?.toLowerCase() || 'development';
|
||||
if (!isValidEnvTarget(target)) {
|
||||
output.error(
|
||||
`Invalid environment \`${chalk.cyan(
|
||||
target
|
||||
)}\`. Valid options: ${getEnvTargetPlaceholder()}`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const link = await getLinkedProject(client, cwd);
|
||||
if (link.status === 'error') {
|
||||
return link.exitCode;
|
||||
@@ -150,7 +165,7 @@ export default async function main(client: Client) {
|
||||
return pull(
|
||||
client,
|
||||
project,
|
||||
ProjectEnvTarget.Development,
|
||||
target,
|
||||
argv,
|
||||
args,
|
||||
output,
|
||||
|
||||
@@ -2,8 +2,9 @@ import chalk from 'chalk';
|
||||
import { join } from 'path';
|
||||
import { Org, Project } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import { parseGitConfig, pluckRemoteUrl } from '../../util/create-git-meta';
|
||||
import { parseGitConfig, pluckRemoteUrls } from '../../util/create-git-meta';
|
||||
import confirm from '../../util/input/confirm';
|
||||
import list, { ListChoice } from '../../util/input/list';
|
||||
import { Output } from '../../util/output';
|
||||
import link from '../../util/output/link';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
@@ -64,20 +65,37 @@ export default async function connect(
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
const remoteUrl = pluckRemoteUrl(gitConfig);
|
||||
if (!remoteUrl) {
|
||||
const remoteUrls = pluckRemoteUrls(gitConfig);
|
||||
if (!remoteUrls) {
|
||||
output.error(
|
||||
`No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run ${chalk.cyan(
|
||||
`No remote URLs found in your Git config. Make sure you've configured a remote repo in your local Git config. Run ${chalk.cyan(
|
||||
'`git remote --help`'
|
||||
)} for more details.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
output.log(`Identified Git remote "origin": ${link(remoteUrl)}`);
|
||||
|
||||
let remoteUrl: string;
|
||||
|
||||
if (Object.keys(remoteUrls).length > 1) {
|
||||
output.log(`Found multiple remote URLs.`);
|
||||
remoteUrl = await selectRemoteUrl(client, remoteUrls);
|
||||
} else {
|
||||
// If only one is found, get it — usually "origin"
|
||||
remoteUrl = Object.values(remoteUrls)[0];
|
||||
}
|
||||
|
||||
if (remoteUrl === '') {
|
||||
output.log('Aborted.');
|
||||
return 0;
|
||||
}
|
||||
|
||||
output.log(`Connecting Git remote: ${link(remoteUrl)}`);
|
||||
|
||||
const parsedUrl = parseRepoUrl(remoteUrl);
|
||||
if (!parsedUrl) {
|
||||
output.error(
|
||||
`Failed to parse Git repo data from the following remote URL in your Git config: ${link(
|
||||
`Failed to parse Git repo data from the following remote URL: ${link(
|
||||
remoteUrl
|
||||
)}`
|
||||
);
|
||||
@@ -166,3 +184,22 @@ async function confirmRepoConnect(
|
||||
}
|
||||
return shouldReplaceProject;
|
||||
}
|
||||
|
||||
async function selectRemoteUrl(
|
||||
client: Client,
|
||||
remoteUrls: { [key: string]: string }
|
||||
): Promise<string> {
|
||||
let choices: ListChoice[] = [];
|
||||
for (const [urlKey, urlValue] of Object.entries(remoteUrls)) {
|
||||
choices.push({
|
||||
name: `${urlValue} ${chalk.gray(`(${urlKey})`)}`,
|
||||
value: urlValue,
|
||||
short: urlKey,
|
||||
});
|
||||
}
|
||||
|
||||
return await list(client, {
|
||||
message: 'Which remote do you want to connect?',
|
||||
choices,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -750,9 +750,7 @@ const main = async () => {
|
||||
|
||||
// Otherwise it is an unexpected error and we should show the trace
|
||||
// and an unexpected error message
|
||||
output.error(
|
||||
`An unexpected error occurred in ${subcommand}: ${err.stack}`
|
||||
);
|
||||
output.error(`An unexpected error occurred in ${subcommand}: ${err}`);
|
||||
}
|
||||
|
||||
return 1;
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
import fs from 'fs-extra';
|
||||
import mimeTypes from 'mime-types';
|
||||
import { basename, dirname, extname, join, relative, resolve } from 'path';
|
||||
import {
|
||||
basename,
|
||||
dirname,
|
||||
extname,
|
||||
join,
|
||||
relative,
|
||||
resolve,
|
||||
posix,
|
||||
} from 'path';
|
||||
import {
|
||||
Builder,
|
||||
BuildResultV2,
|
||||
@@ -20,6 +28,7 @@ import pipe from 'promisepipe';
|
||||
import { unzip } from './unzip';
|
||||
import { VERCEL_DIR } from '../projects/link';
|
||||
|
||||
const { normalize } = posix;
|
||||
export const OUTPUT_DIR = join(VERCEL_DIR, 'output');
|
||||
|
||||
export async function writeBuildResult(
|
||||
@@ -67,6 +76,13 @@ export interface PathOverride {
|
||||
path?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate slashes as well as leading/trailing slashes.
|
||||
*/
|
||||
function stripDuplicateSlashes(path: string): string {
|
||||
return normalize(path).replace(/(^\/|\/$)/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the output from the `build()` return value of a v2 Builder to
|
||||
* the filesystem.
|
||||
@@ -84,16 +100,17 @@ async function writeBuildResultV2(
|
||||
const lambdas = new Map<Lambda, string>();
|
||||
const overrides: Record<string, PathOverride> = {};
|
||||
for (const [path, output] of Object.entries(buildResult.output)) {
|
||||
const normalizedPath = stripDuplicateSlashes(path);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, path, lambdas);
|
||||
await writeLambda(outputDir, output, normalizedPath, lambdas);
|
||||
} else if (isPrerender(output)) {
|
||||
await writeLambda(outputDir, output.lambda, path, lambdas);
|
||||
await writeLambda(outputDir, output.lambda, normalizedPath, lambdas);
|
||||
|
||||
// Write the fallback file alongside the Lambda directory
|
||||
let fallback = output.fallback;
|
||||
if (fallback) {
|
||||
const ext = getFileExtension(fallback);
|
||||
const fallbackName = `${path}.prerender-fallback${ext}`;
|
||||
const fallbackName = `${normalizedPath}.prerender-fallback${ext}`;
|
||||
const fallbackPath = join(outputDir, 'functions', fallbackName);
|
||||
const stream = fallback.toStream();
|
||||
await pipe(
|
||||
@@ -109,7 +126,7 @@ async function writeBuildResultV2(
|
||||
const prerenderConfigPath = join(
|
||||
outputDir,
|
||||
'functions',
|
||||
`${path}.prerender-config.json`
|
||||
`${normalizedPath}.prerender-config.json`
|
||||
);
|
||||
const prerenderConfig = {
|
||||
...output,
|
||||
@@ -118,12 +135,20 @@ async function writeBuildResultV2(
|
||||
};
|
||||
await fs.writeJSON(prerenderConfigPath, prerenderConfig, { spaces: 2 });
|
||||
} else if (isFile(output)) {
|
||||
await writeStaticFile(outputDir, output, path, overrides, cleanUrls);
|
||||
await writeStaticFile(
|
||||
outputDir,
|
||||
output,
|
||||
normalizedPath,
|
||||
overrides,
|
||||
cleanUrls
|
||||
);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
await writeEdgeFunction(outputDir, output, normalizedPath);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${(output as any).type}" for ${path}`
|
||||
`Unsupported output type: "${
|
||||
(output as any).type
|
||||
}" for ${normalizedPath}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -145,9 +170,9 @@ async function writeBuildResultV3(
|
||||
throw new Error(`Expected "build.src" to be a string`);
|
||||
}
|
||||
const ext = extname(src);
|
||||
const path = build.config?.zeroConfig
|
||||
? src.substring(0, src.length - ext.length)
|
||||
: src;
|
||||
const path = stripDuplicateSlashes(
|
||||
build.config?.zeroConfig ? src.substring(0, src.length - ext.length) : src
|
||||
);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, path);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
|
||||
@@ -3,76 +3,42 @@ import { join } from 'path';
|
||||
import ini from 'ini';
|
||||
import git from 'git-last-commit';
|
||||
import { exec } from 'child_process';
|
||||
import { GitMetadata } from '../types';
|
||||
import { GitMetadata, Project } from '../types';
|
||||
import { Output } from './output';
|
||||
|
||||
export function isDirty(directory: string, output: Output): Promise<boolean> {
|
||||
return new Promise(resolve => {
|
||||
exec('git status -s', { cwd: directory }, function (err, stdout, stderr) {
|
||||
let debugMessage = `Failed to determine if Git repo has been modified:`;
|
||||
if (err || stderr) {
|
||||
if (err) debugMessage += `\n${err}`;
|
||||
if (stderr) debugMessage += `\n${stderr.trim()}`;
|
||||
output.debug(debugMessage);
|
||||
return resolve(false);
|
||||
}
|
||||
resolve(stdout.trim().length > 0);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getLastCommit(directory: string): Promise<git.Commit> {
|
||||
return new Promise((resolve, reject) => {
|
||||
git.getLastCommit(
|
||||
(err, commit) => {
|
||||
if (err) return reject(err);
|
||||
resolve(commit);
|
||||
},
|
||||
{ dst: directory }
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export async function parseGitConfig(configPath: string, output: Output) {
|
||||
try {
|
||||
return ini.parse(await fs.readFile(configPath, 'utf-8'));
|
||||
} catch (error) {
|
||||
output.debug(`Error while parsing repo data: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function pluckRemoteUrl(gitConfig: {
|
||||
[key: string]: any;
|
||||
}): string | undefined {
|
||||
// Assuming "origin" is the remote url that the user would want to use
|
||||
return gitConfig['remote "origin"']?.url;
|
||||
}
|
||||
|
||||
export async function getRemoteUrl(
|
||||
configPath: string,
|
||||
output: Output
|
||||
): Promise<string | null> {
|
||||
let gitConfig = await parseGitConfig(configPath, output);
|
||||
if (!gitConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const originUrl = pluckRemoteUrl(gitConfig);
|
||||
if (originUrl) {
|
||||
return originUrl;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function createGitMeta(
|
||||
directory: string,
|
||||
output: Output
|
||||
output: Output,
|
||||
project?: Project | null
|
||||
): Promise<GitMetadata | undefined> {
|
||||
const remoteUrl = await getRemoteUrl(join(directory, '.git/config'), output);
|
||||
// If a Git repository is already connected via `vc git`, use that remote url
|
||||
let remoteUrl;
|
||||
if (project?.link) {
|
||||
// in the form of org/repo
|
||||
const { repo } = project.link;
|
||||
|
||||
const remoteUrls = await getRemoteUrls(
|
||||
join(directory, '.git/config'),
|
||||
output
|
||||
);
|
||||
if (remoteUrls) {
|
||||
for (const urlValue of Object.values(remoteUrls)) {
|
||||
if (urlValue.includes(repo)) {
|
||||
remoteUrl = urlValue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we couldn't get a remote url from the connected repo, default to the origin url
|
||||
if (!remoteUrl) {
|
||||
remoteUrl = await getOriginUrl(join(directory, '.git/config'), output);
|
||||
}
|
||||
// If we can't get the repo URL, then don't return any metadata
|
||||
if (!remoteUrl) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [commit, dirty] = await Promise.all([
|
||||
getLastCommit(directory).catch(err => {
|
||||
output.debug(
|
||||
@@ -96,3 +62,97 @@ export async function createGitMeta(
|
||||
dirty,
|
||||
};
|
||||
}
|
||||
|
||||
function getLastCommit(directory: string): Promise<git.Commit> {
|
||||
return new Promise((resolve, reject) => {
|
||||
git.getLastCommit(
|
||||
(err, commit) => {
|
||||
if (err) return reject(err);
|
||||
resolve(commit);
|
||||
},
|
||||
{ dst: directory }
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export function isDirty(directory: string, output: Output): Promise<boolean> {
|
||||
return new Promise(resolve => {
|
||||
exec('git status -s', { cwd: directory }, function (err, stdout, stderr) {
|
||||
let debugMessage = `Failed to determine if Git repo has been modified:`;
|
||||
if (err || stderr) {
|
||||
if (err) debugMessage += `\n${err}`;
|
||||
if (stderr) debugMessage += `\n${stderr.trim()}`;
|
||||
output.debug(debugMessage);
|
||||
return resolve(false);
|
||||
}
|
||||
resolve(stdout.trim().length > 0);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function parseGitConfig(configPath: string, output: Output) {
|
||||
try {
|
||||
return ini.parse(await fs.readFile(configPath, 'utf-8'));
|
||||
} catch (error) {
|
||||
output.debug(`Error while parsing repo data: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function pluckRemoteUrls(gitConfig: {
|
||||
[key: string]: any;
|
||||
}): { [key: string]: string } | undefined {
|
||||
let remoteUrls: { [key: string]: string } = {};
|
||||
|
||||
for (const key of Object.keys(gitConfig)) {
|
||||
if (key.includes('remote')) {
|
||||
// ex. remote "origin" — matches origin
|
||||
const remoteName = key.match(/(?<=").*(?=")/g)?.[0];
|
||||
const remoteUrl = gitConfig[key]?.url;
|
||||
if (remoteName && remoteUrl) {
|
||||
remoteUrls[remoteName] = remoteUrl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (Object.keys(remoteUrls).length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
return remoteUrls;
|
||||
}
|
||||
|
||||
export async function getRemoteUrls(
|
||||
configPath: string,
|
||||
output: Output
|
||||
): Promise<{ [key: string]: string } | undefined> {
|
||||
const config = await parseGitConfig(configPath, output);
|
||||
if (!config) {
|
||||
return;
|
||||
}
|
||||
|
||||
const remoteUrls = pluckRemoteUrls(config);
|
||||
return remoteUrls;
|
||||
}
|
||||
|
||||
export function pluckOriginUrl(gitConfig: {
|
||||
[key: string]: any;
|
||||
}): string | undefined {
|
||||
// Assuming "origin" is the remote url that the user would want to use
|
||||
return gitConfig['remote "origin"']?.url;
|
||||
}
|
||||
|
||||
export async function getOriginUrl(
|
||||
configPath: string,
|
||||
output: Output
|
||||
): Promise<string | null> {
|
||||
let gitConfig = await parseGitConfig(configPath, output);
|
||||
if (!gitConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const originUrl = pluckOriginUrl(gitConfig);
|
||||
if (originUrl) {
|
||||
return originUrl;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -11,14 +11,11 @@ import cliPkg from '../pkg';
|
||||
|
||||
import cmd from '../output/cmd';
|
||||
import { Output } from '../output';
|
||||
import { getDistTag } from '../get-dist-tag';
|
||||
import { NoBuilderCacheError } from '../errors-ts';
|
||||
|
||||
import * as staticBuilder from './static-builder';
|
||||
import { BuilderWithPackage } from './types';
|
||||
|
||||
type CliPackageJson = typeof cliPkg;
|
||||
|
||||
const require_: typeof require = eval('require');
|
||||
|
||||
const registryTypes = new Set(['version', 'tag', 'range']);
|
||||
@@ -37,8 +34,6 @@ const localBuilders: { [key: string]: BuilderWithPackage } = {
|
||||
'@vercel/static': createStaticBuilder('vercel'),
|
||||
};
|
||||
|
||||
const distTag = getDistTag(cliPkg.version);
|
||||
|
||||
export const cacheDirPromise = prepareCacheDir();
|
||||
export const builderDirPromise = prepareBuilderDir();
|
||||
|
||||
@@ -102,9 +97,8 @@ function parseVersionSafe(rawSpec: string) {
|
||||
|
||||
export function filterPackage(
|
||||
builderSpec: string,
|
||||
distTag: string,
|
||||
buildersPkg: PackageJson,
|
||||
cliPkg: Partial<CliPackageJson>
|
||||
cliPkg: Partial<PackageJson>
|
||||
) {
|
||||
if (builderSpec in localBuilders) return false;
|
||||
const parsed = npa(builderSpec);
|
||||
@@ -126,31 +120,6 @@ export function filterPackage(
|
||||
return false;
|
||||
}
|
||||
|
||||
// Skip install of already installed Runtime with tag compatible match
|
||||
if (
|
||||
parsed.name &&
|
||||
parsed.type === 'tag' &&
|
||||
parsed.fetchSpec === distTag &&
|
||||
buildersPkg.dependencies
|
||||
) {
|
||||
const parsedInstalled = npa(
|
||||
`${parsed.name}@${buildersPkg.dependencies[parsed.name]}`
|
||||
);
|
||||
if (parsedInstalled.type !== 'version') {
|
||||
return true;
|
||||
}
|
||||
const semverInstalled = semver.parse(parsedInstalled.rawSpec);
|
||||
if (!semverInstalled) {
|
||||
return true;
|
||||
}
|
||||
if (semverInstalled.prerelease.length > 0) {
|
||||
return semverInstalled.prerelease[0] !== distTag;
|
||||
}
|
||||
if (distTag === 'latest') {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -183,7 +152,7 @@ export async function installBuilders(
|
||||
|
||||
// Filter out any packages that come packaged with Vercel CLI
|
||||
const packagesToInstall = packages.filter(p =>
|
||||
filterPackage(p, distTag, buildersPkgBefore, cliPkg)
|
||||
filterPackage(p, buildersPkgBefore, cliPkg)
|
||||
);
|
||||
|
||||
if (packagesToInstall.length === 0) {
|
||||
@@ -392,20 +361,13 @@ export function isBundledBuilder(
|
||||
return false;
|
||||
}
|
||||
|
||||
const bundledVersion = dependencies[parsed.name];
|
||||
if (bundledVersion) {
|
||||
if (parsed.type === 'tag') {
|
||||
if (parsed.fetchSpec === 'canary') {
|
||||
return bundledVersion.includes('canary');
|
||||
} else if (parsed.fetchSpec === 'latest') {
|
||||
return !bundledVersion.includes('canary');
|
||||
}
|
||||
} else if (parsed.type === 'version') {
|
||||
return parsed.fetchSpec === bundledVersion;
|
||||
}
|
||||
}
|
||||
const inCliDependencyList = !!dependencies[parsed.name];
|
||||
const inScope = parsed.scope === '@vercel';
|
||||
const isVersionedReference = ['tag', 'version', 'range'].includes(
|
||||
parsed.type
|
||||
);
|
||||
|
||||
return false;
|
||||
return inCliDependencyList && inScope && isVersionedReference;
|
||||
}
|
||||
|
||||
function getPackageName(
|
||||
|
||||
@@ -4,7 +4,7 @@ import ms from 'ms';
|
||||
import bytes from 'bytes';
|
||||
import { delimiter, dirname, join } from 'path';
|
||||
import { fork, ChildProcess } from 'child_process';
|
||||
import { createFunction } from '@zeit/fun';
|
||||
import { createFunction } from '@vercel/fun';
|
||||
import {
|
||||
Builder,
|
||||
BuildOptions,
|
||||
|
||||
@@ -558,9 +558,8 @@ export default class DevServer {
|
||||
]);
|
||||
|
||||
await this.validateVercelConfig(vercelConfig);
|
||||
const { error: routeError, routes: maybeRoutes } = getTransformedRoutes({
|
||||
nowConfig: vercelConfig,
|
||||
});
|
||||
const { error: routeError, routes: maybeRoutes } =
|
||||
getTransformedRoutes(vercelConfig);
|
||||
if (routeError) {
|
||||
this.output.prettyError(routeError);
|
||||
await this.exit();
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import http from 'http';
|
||||
import { ChildProcess } from 'child_process';
|
||||
import { Lambda as FunLambda } from '@zeit/fun';
|
||||
import { Lambda as FunLambda } from '@vercel/fun';
|
||||
import {
|
||||
Builder as BuildConfig,
|
||||
BuildOptions,
|
||||
|
||||
@@ -87,3 +87,18 @@ export async function responseErrorMessage(
|
||||
|
||||
return `${message} (${res.status})`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new Object with enumberable properties that match
|
||||
* the provided `err` instance, for use with `JSON.stringify()`.
|
||||
*/
|
||||
export function toEnumerableError<E extends Partial<Error>>(err: E) {
|
||||
const enumerable: {
|
||||
[K in keyof E]?: E[K];
|
||||
} = {};
|
||||
enumerable.name = err.name;
|
||||
for (const key of Object.getOwnPropertyNames(err) as (keyof E)[]) {
|
||||
enumerable[key] = err[key];
|
||||
}
|
||||
return enumerable;
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ interface ListSeparator {
|
||||
separator: string;
|
||||
}
|
||||
|
||||
type ListChoice = ListEntry | ListSeparator | typeof inquirer.Separator;
|
||||
export type ListChoice = ListEntry | ListSeparator | typeof inquirer.Separator;
|
||||
|
||||
interface ListOptions {
|
||||
message: string;
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
// nothing returned
|
||||
}
|
||||
@@ -17,6 +17,7 @@ export default async function edge(request, event) {
|
||||
decamelized: decamelize('someCamelCaseThing'),
|
||||
uppercase: upper('someThing'),
|
||||
optionalChaining: request?.doesnotexist ?? 'fallback',
|
||||
ENV_VAR_IN_EDGE: process.env.ENV_VAR_IN_EDGE,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
export default function serverless(request, response) {
|
||||
return response.send('hello from a serverless function');
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
// no response
|
||||
}
|
||||
@@ -16,7 +16,11 @@ const {
|
||||
|
||||
test('[vercel dev] should support edge functions', async () => {
|
||||
const dir = fixture('edge-function');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
const { dev, port, readyResolver } = await testFixture(dir, {
|
||||
env: {
|
||||
ENV_VAR_IN_EDGE: '1',
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
@@ -42,6 +46,7 @@ test('[vercel dev] should support edge functions', async () => {
|
||||
decamelized: 'some_camel_case_thing',
|
||||
uppercase: 'SOMETHING',
|
||||
optionalChaining: 'fallback',
|
||||
ENV_VAR_IN_EDGE: '1',
|
||||
});
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
@@ -56,6 +61,31 @@ test(
|
||||
})
|
||||
);
|
||||
|
||||
test('[vercel dev] throws an error when an edge function has no response', async () => {
|
||||
const dir = fixture('edge-function-error');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
let res = await fetch(`http://localhost:${port}/api/edge-no-response`);
|
||||
validateResponseHeaders(res);
|
||||
|
||||
const { stdout, stderr } = await dev.kill('SIGTERM');
|
||||
|
||||
expect(await res.status).toBe(500);
|
||||
expect(await res.text()).toMatch('FUNCTION_INVOCATION_FAILED');
|
||||
expect(stdout).toMatch(
|
||||
/Unhandled rejection: Edge Function "api\/edge-no-response.js" did not return a response./g
|
||||
);
|
||||
expect(stderr).toMatch(
|
||||
/Failed to complete request to \/api\/edge-no-response: Error: socket hang up/g
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should support edge functions returning intentional 500 responses', async () => {
|
||||
const dir = fixture('edge-function');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
@@ -442,6 +442,17 @@ test(
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware that has no response',
|
||||
testFixtureStdio('middleware-no-response', async (testPath: any) => {
|
||||
await testPath(
|
||||
500,
|
||||
'/api/hello',
|
||||
'A server error has occurred\n\nEDGE_FUNCTION_INVOCATION_FAILED'
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware that does basic rewrite',
|
||||
testFixtureStdio('middleware-rewrite', async (testPath: any) => {
|
||||
|
||||
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/error-vercel-json-validation/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/error-vercel-json-validation/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Vercel</h1>
|
||||
5
packages/cli/test/fixtures/unit/commands/build/error-vercel-json-validation/vercel.json
vendored
Normal file
5
packages/cli/test/fixtures/unit/commands/build/error-vercel-json-validation/vercel.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"functions": {
|
||||
"invalid.js": {}
|
||||
}
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/node-error/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/node-error/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/es6.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/es6.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/index.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/index.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/mjs.mjs
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/mjs.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
4
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
|
||||
// Intentional syntax error to make the build fail
|
||||
export default (req: IncomingMessage, res: ServerResponse => res.end('Vercel');
|
||||
9
packages/cli/test/fixtures/unit/commands/build/static-with-pkg/.vercel/project.json
vendored
Normal file
9
packages/cli/test/fixtures/unit/commands/build/static-with-pkg/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null,
|
||||
"buildCommand": null,
|
||||
"outputDirectory": "out"
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/static-with-pkg/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/static-with-pkg/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Vercel</h1>
|
||||
5
packages/cli/test/fixtures/unit/commands/build/static-with-pkg/package.json
vendored
Normal file
5
packages/cli/test/fixtures/unit/commands/build/static-with-pkg/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,17 @@
|
||||
const { FileBlob } = require('@vercel/build-utils');
|
||||
const { FileBlob, Lambda } = require('@vercel/build-utils');
|
||||
|
||||
exports.build = async () => {
|
||||
const file = new FileBlob({
|
||||
data: Buffer.from('file contents')
|
||||
});
|
||||
const output = { file };
|
||||
const lambda = new Lambda({
|
||||
files: {},
|
||||
runtime: 'provided',
|
||||
handler: 'example.js'
|
||||
})
|
||||
const output = {
|
||||
file,
|
||||
'withTrailingSlash/': lambda
|
||||
};
|
||||
return { output };
|
||||
};
|
||||
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "multiple-remotes"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/git/HEAD
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/git/HEAD
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/heads/master
|
||||
13
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/git/config
generated
vendored
Normal file
13
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/git/config
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo.git
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
[remote "secondary"]
|
||||
url = https://github.com/user/repo2.git
|
||||
fetch = +refs/heads/*:refs/remotes/secondary/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/git/description
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/multiple-remotes/git/description
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/.gitignore
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/.vercel/project.json
vendored
Normal file
4
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "connected-repo"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/COMMIT_EDITMSG
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/COMMIT_EDITMSG
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
add hi
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/HEAD
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/HEAD
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/heads/master
|
||||
13
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/config
generated
vendored
Normal file
13
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/config
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
[remote "secondary"]
|
||||
url = https://github.com/user/repo2
|
||||
fetch = +refs/heads/*:refs/remotes/secondary/*
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/description
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/description
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
BIN
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/objects/45/b983be36b73c0788dc9cbcb76cbb80fc7bb057
generated
vendored
Normal file
BIN
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/objects/45/b983be36b73c0788dc9cbcb76cbb80fc7bb057
generated
vendored
Normal file
Binary file not shown.
BIN
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/objects/80/50816205303e5957b2909083c50677930d5b29
generated
vendored
Normal file
BIN
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/objects/80/50816205303e5957b2909083c50677930d5b29
generated
vendored
Normal file
Binary file not shown.
BIN
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/objects/b2/e4d98c09ed53967b0a8f67c293c04ca4173438
generated
vendored
Normal file
BIN
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/objects/b2/e4d98c09ed53967b0a8f67c293c04ca4173438
generated
vendored
Normal file
Binary file not shown.
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/refs/heads/master
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/git/refs/heads/master
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
8050816205303e5957b2909083c50677930d5b29
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/index.txt
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/connected-repo/index.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
hi
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/multiple-remotes/git/HEAD
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/multiple-remotes/git/HEAD
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/heads/master
|
||||
13
packages/cli/test/fixtures/unit/create-git-meta/multiple-remotes/git/config
generated
vendored
Normal file
13
packages/cli/test/fixtures/unit/create-git-meta/multiple-remotes/git/config
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
[remote "secondary"]
|
||||
url = https://github.com/user/repo2
|
||||
fetch = +refs/heads/*:refs/remotes/secondary/*
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/multiple-remotes/git/description
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/multiple-remotes/git/description
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
@@ -589,8 +589,6 @@ describe('build', () => {
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
client.stderr.pipe(process.stderr);
|
||||
client.setArgv('build');
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
@@ -614,6 +612,97 @@ describe('build', () => {
|
||||
expect(await fs.readFile(join(output, 'static/file'), 'utf8')).toEqual(
|
||||
'file contents'
|
||||
);
|
||||
|
||||
// "functions" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions'));
|
||||
expect(functions.sort()).toEqual(['withTrailingSlash.func']);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should store `detectBuilders()` error in `builds.json`', async () => {
|
||||
const cwd = fixture('error-vercel-json-validation');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
|
||||
// `builds.json` contains top-level "error" property
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds.builds).toBeUndefined();
|
||||
|
||||
expect(builds.error.code).toEqual('invalid_function');
|
||||
expect(builds.error.message).toEqual(
|
||||
'Function must contain at least one property.'
|
||||
);
|
||||
|
||||
// `config.json` contains `version`
|
||||
const configJson = await fs.readJSON(join(output, 'config.json'));
|
||||
expect(configJson.version).toBe(3);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should store Builder error in `builds.json`', async () => {
|
||||
const cwd = fixture('node-error');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
|
||||
// `builds.json` contains "error" build
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds.builds).toHaveLength(4);
|
||||
|
||||
const errorBuilds = builds.builds.filter((b: any) => 'error' in b);
|
||||
expect(errorBuilds).toHaveLength(1);
|
||||
|
||||
expect(errorBuilds[0].error.name).toEqual('Error');
|
||||
expect(errorBuilds[0].error.message).toMatch(`TS1005`);
|
||||
expect(errorBuilds[0].error.message).toMatch(`',' expected.`);
|
||||
expect(errorBuilds[0].error.hideStackTrace).toEqual(true);
|
||||
expect(errorBuilds[0].error.code).toEqual('NODE_TYPESCRIPT_ERROR');
|
||||
|
||||
// `config.json` contains `version`
|
||||
const configJson = await fs.readJSON(join(output, 'config.json'));
|
||||
expect(configJson.version).toBe(3);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should allow for missing "build" script', async () => {
|
||||
const cwd = fixture('static-with-pkg');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/static" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
src: '**',
|
||||
use: '@vercel/static',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory contains static files
|
||||
const files = await fs.readdir(join(output, 'static'));
|
||||
expect(files.sort()).toEqual(['index.html', 'package.json']);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
|
||||
@@ -57,6 +57,69 @@ describe('env', () => {
|
||||
expect(devFileHasDevEnv).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should use given environment', async () => {
|
||||
const cwd = setupFixture('vercel-env-pull');
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'vercel-env-pull',
|
||||
name: 'vercel-env-pull',
|
||||
});
|
||||
|
||||
client.setArgv(
|
||||
'env',
|
||||
'pull',
|
||||
'.env.production',
|
||||
'--environment',
|
||||
'production',
|
||||
'--cwd',
|
||||
cwd
|
||||
);
|
||||
const exitCodePromise = env(client);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Downloading "production" Environment Variables for Project vercel-env-pull`
|
||||
);
|
||||
await expect(client.stderr).toOutput('Created .env.production file');
|
||||
await expect(exitCodePromise).resolves.toEqual(0);
|
||||
|
||||
const rawProdEnv = await fs.readFile(path.join(cwd, '.env.production'));
|
||||
|
||||
// check for development env value
|
||||
const envFileHasEnv = rawProdEnv
|
||||
.toString()
|
||||
.includes('REDIS_CONNECTION_STRING');
|
||||
expect(envFileHasEnv).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should throw an error when it does not recognize given environment', async () => {
|
||||
const cwd = setupFixture('vercel-env-pull');
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'vercel-env-pull',
|
||||
name: 'vercel-env-pull',
|
||||
});
|
||||
|
||||
client.setArgv(
|
||||
'env',
|
||||
'pull',
|
||||
'.env.production',
|
||||
'--environment',
|
||||
'something-invalid',
|
||||
'--cwd',
|
||||
cwd
|
||||
);
|
||||
|
||||
const exitCodePromise = env(client);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Invalid environment \`something-invalid\`. Valid options: <production | preview | development>`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should expose production system env variables', async () => {
|
||||
const cwd = setupFixture('vercel-env-pull');
|
||||
useUser();
|
||||
|
||||
@@ -25,7 +25,7 @@ describe('git', () => {
|
||||
id: 'unlinked',
|
||||
name: 'unlinked',
|
||||
});
|
||||
client.setArgv('projects', 'connect');
|
||||
client.setArgv('git', 'connect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Set up');
|
||||
@@ -40,7 +40,7 @@ describe('git', () => {
|
||||
client.stdin.write('y\n');
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo.git`
|
||||
`Connecting Git remote: https://github.com/user/repo.git`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
@@ -76,7 +76,7 @@ describe('git', () => {
|
||||
id: 'no-git-config',
|
||||
name: 'no-git-config',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
client.setArgv('git', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
await expect(client.stderr).toOutput(
|
||||
@@ -98,11 +98,11 @@ describe('git', () => {
|
||||
id: 'no-remote-url',
|
||||
name: 'no-remote-url',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
client.setArgv('git', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run \`git remote --help\` for more details.`
|
||||
`Error! No remote URLs found in your Git config. Make sure you've configured a remote repo in your local Git config. Run \`git remote --help\` for more details.`
|
||||
);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
@@ -121,15 +121,15 @@ describe('git', () => {
|
||||
id: 'bad-remote-url',
|
||||
name: 'bad-remote-url',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
client.setArgv('git', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": bababooey`
|
||||
`Connecting Git remote: bababooey`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! Failed to parse Git repo data from the following remote URL in your Git config: bababooey\n`
|
||||
`Error! Failed to parse Git repo data from the following remote URL: bababooey\n`
|
||||
);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
@@ -148,11 +148,11 @@ describe('git', () => {
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
client.setArgv('git', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo`
|
||||
`Connecting Git remote: https://github.com/user/repo`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> Connected GitHub repository user/repo!\n`
|
||||
@@ -201,11 +201,11 @@ describe('git', () => {
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
client.setArgv('git', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user2/repo2`
|
||||
`Connecting Git remote: https://github.com/user2/repo2`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> Connected GitHub repository user2/repo2!\n`
|
||||
@@ -253,11 +253,11 @@ describe('git', () => {
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
client.setArgv('git', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo`
|
||||
`Connecting Git remote: https://github.com/user/repo`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> user/repo is already connected to your project.\n`
|
||||
@@ -283,11 +283,11 @@ describe('git', () => {
|
||||
name: 'invalid-repo',
|
||||
});
|
||||
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
client.setArgv('git', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/laksfj/asdgklsadkl`
|
||||
`Connecting Git remote: https://github.com/laksfj/asdgklsadkl`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Failed to link laksfj/asdgklsadkl. Make sure there aren't any typos and that you have access to the repository if it's private.`
|
||||
@@ -300,6 +300,56 @@ describe('git', () => {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should connect the default option of multiple remotes', async () => {
|
||||
const cwd = fixture('multiple-remotes');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'multiple-remotes',
|
||||
name: 'multiple-remotes',
|
||||
});
|
||||
|
||||
client.setArgv('git', 'connect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Found multiple remote URLs.');
|
||||
await expect(client.stderr).toOutput(
|
||||
'Which remote do you want to connect?'
|
||||
);
|
||||
|
||||
client.stdin.write('\r');
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'Connecting Git remote: https://github.com/user/repo.git'
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
'Connected GitHub repository user/repo!'
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const project: Project = await client.fetch(
|
||||
`/v8/projects/multiple-remotes`
|
||||
);
|
||||
expect(project.link).toMatchObject({
|
||||
type: 'github',
|
||||
repo: 'user/repo',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
describe('disconnect', () => {
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
@@ -4,31 +4,58 @@ import os from 'os';
|
||||
import { getWriteableDirectory } from '@vercel/build-utils';
|
||||
import {
|
||||
createGitMeta,
|
||||
getRemoteUrl,
|
||||
getOriginUrl,
|
||||
getRemoteUrls,
|
||||
isDirty,
|
||||
} from '../../../../src/util/create-git-meta';
|
||||
import { client } from '../../../mocks/client';
|
||||
import { parseRepoUrl } from '../../../../src/util/projects/connect-git-provider';
|
||||
import { readOutputStream } from '../../../helpers/read-output-stream';
|
||||
import { useUser } from '../../../mocks/user';
|
||||
import { defaultProject, useProject } from '../../../mocks/project';
|
||||
import { Project } from '../../../../src/types';
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../../fixtures/unit/create-git-meta', name);
|
||||
|
||||
describe('getRemoteUrl', () => {
|
||||
describe('getOriginUrl', () => {
|
||||
it('does not provide data for no-origin', async () => {
|
||||
const configPath = join(fixture('no-origin'), 'git/config');
|
||||
const data = await getRemoteUrl(configPath, client.output);
|
||||
const data = await getOriginUrl(configPath, client.output);
|
||||
expect(data).toBeNull();
|
||||
});
|
||||
it('displays debug message when repo data cannot be parsed', async () => {
|
||||
const dir = await getWriteableDirectory();
|
||||
client.output.debugEnabled = true;
|
||||
const data = await getRemoteUrl(join(dir, 'git/config'), client.output);
|
||||
const data = await getOriginUrl(join(dir, 'git/config'), client.output);
|
||||
expect(data).toBeNull();
|
||||
await expect(client.stderr).toOutput('Error while parsing repo data');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getRemoteUrls', () => {
|
||||
it('does not provide data when there are no remote urls', async () => {
|
||||
const configPath = join(fixture('no-origin'), 'git/config');
|
||||
const data = await getRemoteUrls(configPath, client.output);
|
||||
expect(data).toBeUndefined();
|
||||
});
|
||||
it('returns an object when multiple urls are present', async () => {
|
||||
const configPath = join(fixture('multiple-remotes'), 'git/config');
|
||||
const data = await getRemoteUrls(configPath, client.output);
|
||||
expect(data).toMatchObject({
|
||||
origin: 'https://github.com/user/repo',
|
||||
secondary: 'https://github.com/user/repo2',
|
||||
});
|
||||
});
|
||||
it('returns an object for origin url', async () => {
|
||||
const configPath = join(fixture('test-github'), 'git/config');
|
||||
const data = await getRemoteUrls(configPath, client.output);
|
||||
expect(data).toMatchObject({
|
||||
origin: 'https://github.com/user/repo.git',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseRepoUrl', () => {
|
||||
it('should be null when a url does not match the regex', () => {
|
||||
const parsedUrl = parseRepoUrl('https://examplecom/foo');
|
||||
@@ -244,4 +271,45 @@ describe('createGitMeta', () => {
|
||||
await fs.remove(tmpDir);
|
||||
}
|
||||
});
|
||||
it('uses the repo url for a connected project', async () => {
|
||||
const originalCwd = process.cwd();
|
||||
const directory = fixture('connected-repo');
|
||||
try {
|
||||
process.chdir(directory);
|
||||
await fs.rename(join(directory, 'git'), join(directory, '.git'));
|
||||
|
||||
useUser();
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'connected-repo',
|
||||
name: 'connected-repo',
|
||||
});
|
||||
project.project.link = {
|
||||
type: 'github',
|
||||
repo: 'user/repo2',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
|
||||
const data = await createGitMeta(
|
||||
directory,
|
||||
client.output,
|
||||
project.project as Project
|
||||
);
|
||||
expect(data).toMatchObject({
|
||||
remoteUrl: 'https://github.com/user/repo2',
|
||||
commitAuthorName: 'Matthew Stanciu',
|
||||
commitMessage: 'add hi',
|
||||
commitRef: 'master',
|
||||
commitSha: '8050816205303e5957b2909083c50677930d5b29',
|
||||
dirty: true,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(directory, '.git'), join(directory, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -6,19 +6,25 @@ import {
|
||||
} from '../../../../src/util/dev/builder-cache';
|
||||
|
||||
describe('filterPackage', () => {
|
||||
it('should filter install "latest", cached canary', () => {
|
||||
const cliPkg = {
|
||||
dependencies: {
|
||||
'@vercel/build-utils': '0.0.1',
|
||||
},
|
||||
};
|
||||
|
||||
it('should filter package that does not appear in CLI package.json', () => {
|
||||
const result = filterPackage('@vercel/other', {}, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
it('should not filter "latest", cached canary', () => {
|
||||
const buildersPkg = {
|
||||
dependencies: {
|
||||
'@vercel/build-utils': '0.0.1-canary.0',
|
||||
},
|
||||
};
|
||||
const result = filterPackage(
|
||||
'@vercel/build-utils',
|
||||
'canary',
|
||||
buildersPkg,
|
||||
{}
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
const result = filterPackage('@vercel/build-utils', buildersPkg, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
|
||||
it('should filter install "canary", cached stable', () => {
|
||||
@@ -29,11 +35,10 @@ describe('filterPackage', () => {
|
||||
};
|
||||
const result = filterPackage(
|
||||
'@vercel/build-utils@canary',
|
||||
'latest',
|
||||
buildersPkg,
|
||||
{}
|
||||
cliPkg
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
|
||||
it('should filter install "latest", cached stable', () => {
|
||||
@@ -42,12 +47,7 @@ describe('filterPackage', () => {
|
||||
'@vercel/build-utils': '0.0.1',
|
||||
},
|
||||
};
|
||||
const result = filterPackage(
|
||||
'@vercel/build-utils',
|
||||
'latest',
|
||||
buildersPkg,
|
||||
{}
|
||||
);
|
||||
const result = filterPackage('@vercel/build-utils', buildersPkg, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
|
||||
@@ -59,9 +59,8 @@ describe('filterPackage', () => {
|
||||
};
|
||||
const result = filterPackage(
|
||||
'@vercel/build-utils@canary',
|
||||
'canary',
|
||||
buildersPkg,
|
||||
{}
|
||||
cliPkg
|
||||
);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
@@ -72,12 +71,7 @@ describe('filterPackage', () => {
|
||||
'@vercel/build-utils': '0.0.1',
|
||||
},
|
||||
};
|
||||
const result = filterPackage(
|
||||
'https://tarball.now.sh',
|
||||
'latest',
|
||||
buildersPkg,
|
||||
{}
|
||||
);
|
||||
const result = filterPackage('https://tarball.now.sh', buildersPkg, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
@@ -87,27 +81,7 @@ describe('filterPackage', () => {
|
||||
'@vercel/build-utils': '0.0.1-canary.0',
|
||||
},
|
||||
};
|
||||
const result = filterPackage(
|
||||
'https://tarball.now.sh',
|
||||
'canary',
|
||||
buildersPkg,
|
||||
{}
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
it('should filter install "latest", cached URL - stable', () => {
|
||||
const buildersPkg = {
|
||||
dependencies: {
|
||||
'@vercel/build-utils': 'https://tarball.now.sh',
|
||||
},
|
||||
};
|
||||
const result = filterPackage(
|
||||
'@vercel/build-utils',
|
||||
'latest',
|
||||
buildersPkg,
|
||||
{}
|
||||
);
|
||||
const result = filterPackage('https://tarball.now.sh', buildersPkg, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
@@ -117,13 +91,8 @@ describe('filterPackage', () => {
|
||||
'@vercel/build-utils': 'https://tarball.now.sh',
|
||||
},
|
||||
};
|
||||
const result = filterPackage(
|
||||
'@vercel/build-utils',
|
||||
'canary',
|
||||
buildersPkg,
|
||||
{}
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
const result = filterPackage('@vercel/build-utils', buildersPkg, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
|
||||
it('should filter install not bundled version, cached same version', () => {
|
||||
@@ -134,9 +103,8 @@ describe('filterPackage', () => {
|
||||
};
|
||||
const result = filterPackage(
|
||||
'not-bundled-package@0.0.1',
|
||||
'_',
|
||||
buildersPkg,
|
||||
{}
|
||||
cliPkg
|
||||
);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
@@ -149,9 +117,8 @@ describe('filterPackage', () => {
|
||||
};
|
||||
const result = filterPackage(
|
||||
'not-bundled-package@0.0.1',
|
||||
'_',
|
||||
buildersPkg,
|
||||
{}
|
||||
cliPkg
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
@@ -162,7 +129,7 @@ describe('filterPackage', () => {
|
||||
'not-bundled-package': '0.0.1',
|
||||
},
|
||||
};
|
||||
const result = filterPackage('not-bundled-package', '_', buildersPkg, {});
|
||||
const result = filterPackage('not-bundled-package', buildersPkg, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
@@ -174,9 +141,8 @@ describe('filterPackage', () => {
|
||||
};
|
||||
const result = filterPackage(
|
||||
'not-bundled-package@alpha',
|
||||
'_',
|
||||
buildersPkg,
|
||||
{}
|
||||
cliPkg
|
||||
);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
@@ -213,89 +179,57 @@ describe('getBuildUtils', () => {
|
||||
});
|
||||
|
||||
describe('isBundledBuilder', () => {
|
||||
it('should work with "stable" releases', () => {
|
||||
const cliPkg = {
|
||||
dependencies: {
|
||||
'@vercel/node': '1.6.1',
|
||||
},
|
||||
};
|
||||
const cliPkg = {
|
||||
dependencies: {
|
||||
'@vercel/node': '0.0.1',
|
||||
},
|
||||
};
|
||||
|
||||
// "canary" tag
|
||||
{
|
||||
const parsed = npa('@vercel/node@canary');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
}
|
||||
|
||||
// "latest" tag
|
||||
{
|
||||
const parsed = npa('@vercel/node');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
}
|
||||
|
||||
// specific matching version
|
||||
{
|
||||
const parsed = npa('@vercel/node@1.6.1');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
}
|
||||
|
||||
// specific non-matching version
|
||||
{
|
||||
const parsed = npa('@vercel/node@1.6.0');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
}
|
||||
|
||||
// URL
|
||||
{
|
||||
const parsed = npa('https://example.com');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
}
|
||||
it('should not detect when dependency does not appear in CLI package.json', () => {
|
||||
const parsed = npa('@vercel/node');
|
||||
const result = isBundledBuilder(parsed, {});
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
|
||||
it('should work with "canary" releases', () => {
|
||||
const cliPkg = {
|
||||
dependencies: {
|
||||
'@vercel/node': '1.6.1-canary.0',
|
||||
},
|
||||
};
|
||||
it('should detect "canary" tagged releases', () => {
|
||||
const parsed = npa('@vercel/node@canary');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
// "canary" tag
|
||||
{
|
||||
const parsed = npa('@vercel/node@canary');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
}
|
||||
it('should detect "canary" versioned releases', () => {
|
||||
const parsed = npa('@vercel/node@1.6.1-canary.0');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
// "latest" tag
|
||||
{
|
||||
const parsed = npa('@vercel/node');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
}
|
||||
it('should detect latest releases', () => {
|
||||
const parsed = npa('@vercel/node');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
// specific matching version
|
||||
{
|
||||
const parsed = npa('@vercel/node@1.6.1-canary.0');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
}
|
||||
it('should detect "latest" tagged releases', () => {
|
||||
const parsed = npa('@vercel/node@latest');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
// specific non-matching version
|
||||
{
|
||||
const parsed = npa('@vercel/node@1.5.2-canary.9');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
}
|
||||
it('should detect versioned releases', () => {
|
||||
const parsed = npa('@vercel/node@1.6.1');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(true);
|
||||
});
|
||||
|
||||
// URL
|
||||
{
|
||||
const parsed = npa('https://example.com');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
}
|
||||
it('should NOT detect URL releases', () => {
|
||||
const parsed = npa('https://example.com');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
|
||||
it('should NOT detect git url releases', () => {
|
||||
const parsed = npa('git://example.com/repo.git');
|
||||
const result = isBundledBuilder(parsed, cliPkg);
|
||||
expect(result).toEqual(false);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,7 +2,11 @@ import fetch from 'node-fetch';
|
||||
import listen from 'async-listen';
|
||||
import { createServer, IncomingMessage, Server, ServerResponse } from 'http';
|
||||
import { JSONValue } from '../../../src/types';
|
||||
import { responseError, responseErrorMessage } from '../../../src/util/error';
|
||||
import {
|
||||
responseError,
|
||||
responseErrorMessage,
|
||||
toEnumerableError,
|
||||
} from '../../../src/util/error';
|
||||
|
||||
const send = (res: ServerResponse, statusCode: number, body: JSONValue) => {
|
||||
res.statusCode = statusCode;
|
||||
@@ -10,7 +14,7 @@ const send = (res: ServerResponse, statusCode: number, body: JSONValue) => {
|
||||
res.end(JSON.stringify(body));
|
||||
};
|
||||
|
||||
describe('responseError', () => {
|
||||
describe('responseError()', () => {
|
||||
let url: string;
|
||||
let server: Server;
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
@@ -187,3 +191,43 @@ describe('responseError', () => {
|
||||
expect(formatted.retryAfter).toEqual(undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('toEnumerableError()', () => {
|
||||
it('should JSON stringify Error', () => {
|
||||
const err = new Error('An error');
|
||||
const enumerable = toEnumerableError(err);
|
||||
expect(JSON.stringify(err)).toEqual('{}');
|
||||
|
||||
// Delete `stack` since it makes stringify nondeterministic
|
||||
// (due to filenames / line numbers)
|
||||
expect(typeof enumerable.stack).toEqual('string');
|
||||
delete enumerable.stack;
|
||||
|
||||
expect(JSON.stringify(enumerable)).toEqual(
|
||||
'{"name":"Error","message":"An error"}'
|
||||
);
|
||||
});
|
||||
|
||||
it('should JSON stringify Error with custom properties', () => {
|
||||
const err = new Error('An error');
|
||||
Object.defineProperty(err, 'custom', {
|
||||
enumerable: false,
|
||||
value: 'value',
|
||||
});
|
||||
Object.defineProperty(err, 'userError', {
|
||||
enumerable: false,
|
||||
value: true,
|
||||
});
|
||||
const enumerable = toEnumerableError(err);
|
||||
expect(JSON.stringify(err)).toEqual('{}');
|
||||
|
||||
// Delete `stack` since it makes stringify undeterministinc
|
||||
// (due to filenames / line numbers)
|
||||
expect(typeof enumerable.stack).toEqual('string');
|
||||
delete enumerable.stack;
|
||||
|
||||
expect(JSON.stringify(enumerable)).toEqual(
|
||||
'{"name":"Error","message":"An error","custom":"value","userError":true}'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "12.1.1",
|
||||
"version": "12.1.3",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -42,8 +42,8 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.0.2",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/build-utils": "5.0.4",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
@@ -51,7 +51,7 @@
|
||||
"ignore": "4.0.6",
|
||||
"minimatch": "5.0.1",
|
||||
"ms": "2.1.2",
|
||||
"node-fetch": "2.6.1",
|
||||
"node-fetch": "2.6.7",
|
||||
"querystring": "^0.2.0",
|
||||
"sleep-promise": "8.0.1"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.1",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/fs-detectors",
|
||||
"version": "2.0.0",
|
||||
"version": "2.0.1",
|
||||
"description": "Vercel filesystem detectors",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
@@ -20,8 +20,8 @@
|
||||
"test-unit": "yarn test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/frameworks": "1.1.0",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/frameworks": "1.1.1",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"glob": "8.0.3",
|
||||
"js-yaml": "4.1.0",
|
||||
"minimatch": "3.0.4",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import minimatch from 'minimatch';
|
||||
import { valid as validSemver } from 'semver';
|
||||
import { parse as parsePath, extname } from 'path';
|
||||
import type { Route, Source } from '@vercel/routing-utils';
|
||||
import type { Route, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import frameworkList, { Framework } from '@vercel/frameworks';
|
||||
import type {
|
||||
PackageJson,
|
||||
@@ -155,8 +155,8 @@ export async function detectBuilders(
|
||||
|
||||
let fallbackEntrypoint: string | null = null;
|
||||
|
||||
const apiRoutes: Source[] = [];
|
||||
const dynamicRoutes: Source[] = [];
|
||||
const apiRoutes: RouteWithSrc[] = [];
|
||||
const dynamicRoutes: RouteWithSrc[] = [];
|
||||
|
||||
// API
|
||||
for (const fileName of sortedFiles) {
|
||||
@@ -692,7 +692,7 @@ function getApiRoute(
|
||||
options: Options,
|
||||
absolutePathCache: Map<string, string>
|
||||
): {
|
||||
apiRoute: Source | null;
|
||||
apiRoute: RouteWithSrc | null;
|
||||
isDynamic: boolean;
|
||||
routeError: ErrorResponse | null;
|
||||
} {
|
||||
@@ -886,7 +886,7 @@ function createRouteFromPath(
|
||||
filePath: string,
|
||||
featHandleMiss: boolean,
|
||||
cleanUrls: boolean
|
||||
): { route: Source; isDynamic: boolean } {
|
||||
): { route: RouteWithSrc; isDynamic: boolean } {
|
||||
const parts = filePath.split('/');
|
||||
|
||||
let counter = 1;
|
||||
@@ -932,7 +932,7 @@ function createRouteFromPath(
|
||||
? `^/${srcParts.slice(0, -1).join('/')}${srcParts.slice(-1)[0]}$`
|
||||
: `^/${srcParts.join('/')}$`;
|
||||
|
||||
let route: Source;
|
||||
let route: RouteWithSrc;
|
||||
|
||||
if (featHandleMiss) {
|
||||
const extensionless = ext ? filePath.slice(0, -ext.length) : filePath;
|
||||
@@ -959,8 +959,8 @@ interface LimitedRoutes {
|
||||
|
||||
function getRouteResult(
|
||||
pkg: PackageJson | undefined | null,
|
||||
apiRoutes: Source[],
|
||||
dynamicRoutes: Source[],
|
||||
apiRoutes: RouteWithSrc[],
|
||||
dynamicRoutes: RouteWithSrc[],
|
||||
outputDirectory: string,
|
||||
apiBuilders: Builder[],
|
||||
frontendBuilder: Builder | null,
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import type { Source, Route, Handler } from '@vercel/routing-utils';
|
||||
import type {
|
||||
Route,
|
||||
RouteWithHandle as Handler,
|
||||
RouteWithSrc as Source,
|
||||
} from '@vercel/routing-utils';
|
||||
import {
|
||||
detectBuilders,
|
||||
detectOutputDirectory,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "2.0.6",
|
||||
"version": "2.0.8",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "5.0.2",
|
||||
"@vercel/build-utils": "5.0.4",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/hydrogen",
|
||||
"version": "0.0.3",
|
||||
"version": "0.0.5",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -22,7 +22,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.0.2",
|
||||
"@vercel/build-utils": "5.0.4",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "3.1.5",
|
||||
"version": "3.1.8",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -45,9 +45,9 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "5.0.2",
|
||||
"@vercel/build-utils": "5.0.4",
|
||||
"@vercel/nft": "0.20.1",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
"cheerio": "1.0.0-rc.10",
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
NodejsLambda,
|
||||
BuildResultV2Typical as BuildResult,
|
||||
} from '@vercel/build-utils';
|
||||
import { Handler, Route, Source } from '@vercel/routing-utils';
|
||||
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import {
|
||||
convertHeaders,
|
||||
convertRedirects,
|
||||
@@ -896,7 +896,7 @@ export const build: BuildV2 = async ({
|
||||
...(output[path.join('./', entryDirectory, '404')] ||
|
||||
output[path.join('./', entryDirectory, '404/index')]
|
||||
? [
|
||||
{ handle: 'error' } as Handler,
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
|
||||
{
|
||||
status: 404,
|
||||
@@ -928,7 +928,7 @@ export const build: BuildV2 = async ({
|
||||
let trailingSlash = false;
|
||||
|
||||
redirects = redirects.filter(_redir => {
|
||||
const redir = _redir as Source;
|
||||
const redir = _redir as RouteWithSrc;
|
||||
// detect the trailing slash redirect and make sure it's
|
||||
// kept above the wildcard mapping to prevent erroneous redirects
|
||||
// since non-continue routes come after continue the $wildcard
|
||||
@@ -1146,7 +1146,7 @@ export const build: BuildV2 = async ({
|
||||
continue;
|
||||
}
|
||||
|
||||
const route: Source & { dest: string } = {
|
||||
const route: RouteWithSrc & { dest: string } = {
|
||||
src: (
|
||||
dataRoute.namedDataRouteRegex || dataRoute.dataRouteRegex
|
||||
).replace(/^\^/, `^${appMountPrefixNoTrailingSlash}`),
|
||||
@@ -1175,7 +1175,7 @@ export const build: BuildV2 = async ({
|
||||
if (isOmittedRoute && isServerMode) {
|
||||
// only match this route when in preview mode so
|
||||
// preview works for non-prerender fallback: false pages
|
||||
(route as Source).has = [
|
||||
(route as RouteWithSrc).has = [
|
||||
{
|
||||
type: 'cookie',
|
||||
key: '__prerender_bypass',
|
||||
@@ -2454,7 +2454,7 @@ export const build: BuildV2 = async ({
|
||||
? []
|
||||
: [
|
||||
// Custom Next.js 404 page
|
||||
{ handle: 'error' } as Handler,
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
|
||||
...(i18n && (static404Page || hasIsr404Page)
|
||||
? [
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
Files,
|
||||
BuildResultV2Typical as BuildResult,
|
||||
} from '@vercel/build-utils';
|
||||
import { Handler, Route, Source } from '@vercel/routing-utils';
|
||||
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import { MAX_AGE_ONE_YEAR } from '.';
|
||||
import {
|
||||
NextRequiredServerFilesManifest,
|
||||
@@ -56,6 +56,8 @@ import prettyBytes from 'pretty-bytes';
|
||||
const CORRECT_NOT_FOUND_ROUTES_VERSION = 'v12.0.1';
|
||||
const CORRECT_MIDDLEWARE_ORDER_VERSION = 'v12.1.7-canary.29';
|
||||
const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
|
||||
const EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION = 'v12.2.0';
|
||||
const CORRECTED_MANIFESTS_VERSION = 'v12.2.0';
|
||||
|
||||
export async function serverBuild({
|
||||
dynamicPages,
|
||||
@@ -133,6 +135,10 @@ export async function serverBuild({
|
||||
const lambdaPageKeys = Object.keys(lambdaPages);
|
||||
const internalPages = ['_app.js', '_error.js', '_document.js'];
|
||||
const pageBuildTraces = await glob('**/*.js.nft.json', pagesDir);
|
||||
const isEmptyAllowQueryForPrendered = semver.gte(
|
||||
nextVersion,
|
||||
EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION
|
||||
);
|
||||
const isCorrectNotFoundRoutes = semver.gte(
|
||||
nextVersion,
|
||||
CORRECT_NOT_FOUND_ROUTES_VERSION
|
||||
@@ -141,6 +147,10 @@ export async function serverBuild({
|
||||
nextVersion,
|
||||
CORRECT_MIDDLEWARE_ORDER_VERSION
|
||||
);
|
||||
const isCorrectManifests = semver.gte(
|
||||
nextVersion,
|
||||
CORRECTED_MANIFESTS_VERSION
|
||||
);
|
||||
let hasStatic500 = !!staticPages[path.join(entryDirectory, '500')];
|
||||
|
||||
if (lambdaPageKeys.length === 0) {
|
||||
@@ -404,7 +414,7 @@ export async function serverBuild({
|
||||
fsPath = path.join(requiredServerFilesManifest.appDir, file);
|
||||
}
|
||||
|
||||
const relativePath = path.join(path.relative(baseDir, fsPath));
|
||||
const relativePath = path.relative(baseDir, fsPath);
|
||||
const { mode } = await fs.lstat(fsPath);
|
||||
lstatSema.release();
|
||||
|
||||
@@ -671,18 +681,80 @@ export async function serverBuild({
|
||||
);
|
||||
|
||||
for (const group of combinedGroups) {
|
||||
const groupPageFiles: { [key: string]: PseudoFile } = {};
|
||||
|
||||
for (const page of [...group.pages, ...internalPages]) {
|
||||
const pageFileName = path.normalize(
|
||||
path.relative(baseDir, lambdaPages[page].fsPath)
|
||||
);
|
||||
groupPageFiles[pageFileName] = compressedPages[page];
|
||||
}
|
||||
|
||||
const updatedManifestFiles: { [name: string]: FileBlob } = {};
|
||||
|
||||
if (isCorrectManifests) {
|
||||
// filter dynamic routes to only the included dynamic routes
|
||||
// in this specific serverless function so that we don't
|
||||
// accidentally match a dynamic route while resolving that
|
||||
// is not actually in this specific serverless function
|
||||
for (const manifest of [
|
||||
'routes-manifest.json',
|
||||
'server/pages-manifest.json',
|
||||
] as const) {
|
||||
const fsPath = path.join(entryPath, outputDirectory, manifest);
|
||||
|
||||
const relativePath = path.relative(baseDir, fsPath);
|
||||
delete group.pseudoLayer[relativePath];
|
||||
|
||||
const manifestData = await fs.readJSON(fsPath);
|
||||
const normalizedPages = new Set(
|
||||
group.pages.map(page => {
|
||||
page = `/${page.replace(/\.js$/, '')}`;
|
||||
if (page === '/index') page = '/';
|
||||
return page;
|
||||
})
|
||||
);
|
||||
|
||||
switch (manifest) {
|
||||
case 'routes-manifest.json': {
|
||||
const filterItem = (item: { page: string }) =>
|
||||
normalizedPages.has(item.page);
|
||||
|
||||
manifestData.dynamicRoutes =
|
||||
manifestData.dynamicRoutes?.filter(filterItem);
|
||||
manifestData.staticRoutes =
|
||||
manifestData.staticRoutes?.filter(filterItem);
|
||||
break;
|
||||
}
|
||||
case 'server/pages-manifest.json': {
|
||||
for (const key of Object.keys(manifestData)) {
|
||||
if (isDynamicRoute(key) && !normalizedPages.has(key)) {
|
||||
delete manifestData[key];
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
throw new NowBuildError({
|
||||
message: `Unexpected manifest value ${manifest}, please contact support if this continues`,
|
||||
code: 'NEXT_MANIFEST_INVARIANT',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
updatedManifestFiles[relativePath] = new FileBlob({
|
||||
contentType: 'application/json',
|
||||
data: JSON.stringify(manifestData),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const lambda = await createLambdaFromPseudoLayers({
|
||||
files: launcherFiles,
|
||||
layers: [
|
||||
group.pseudoLayer,
|
||||
[...group.pages, ...internalPages].reduce((prev, page) => {
|
||||
const pageFileName = path.normalize(
|
||||
path.relative(baseDir, lambdaPages[page].fsPath)
|
||||
);
|
||||
prev[pageFileName] = compressedPages[page];
|
||||
return prev;
|
||||
}, {} as { [key: string]: PseudoFile }),
|
||||
],
|
||||
files: {
|
||||
...launcherFiles,
|
||||
...updatedManifestFiles,
|
||||
},
|
||||
layers: [group.pseudoLayer, groupPageFiles],
|
||||
handler: path.join(
|
||||
path.relative(
|
||||
baseDir,
|
||||
@@ -756,6 +828,7 @@ export async function serverBuild({
|
||||
static404Page,
|
||||
hasPages404: routesManifest.pages404,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
});
|
||||
|
||||
Object.keys(prerenderManifest.staticRoutes).forEach(route =>
|
||||
@@ -822,7 +895,7 @@ export async function serverBuild({
|
||||
const { staticFiles, publicDirectoryFiles, staticDirectoryFiles } =
|
||||
await getStaticFiles(entryPath, entryDirectory, outputDirectory);
|
||||
|
||||
const notFoundPreviewRoutes: Source[] = [];
|
||||
const notFoundPreviewRoutes: RouteWithSrc[] = [];
|
||||
|
||||
if (prerenderManifest.notFoundRoutes?.length > 0 && canUsePreviewMode) {
|
||||
// we combine routes into one src here to reduce the number of needed
|
||||
@@ -1378,7 +1451,7 @@ export async function serverBuild({
|
||||
},
|
||||
|
||||
// error handling
|
||||
{ handle: 'error' } as Handler,
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
|
||||
// Custom Next.js 404 page
|
||||
...(i18n && (static404Page || hasIsr404Page || lambdaPages['404.js'])
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
EdgeFunction,
|
||||
} from '@vercel/build-utils';
|
||||
import { NodeFileTraceReasons } from '@vercel/nft';
|
||||
import { Header, Rewrite, Route, Source } from '@vercel/routing-utils';
|
||||
import { Header, Rewrite, Route, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import { Sema } from 'async-sema';
|
||||
import crc32 from 'buffer-crc32';
|
||||
import fs, { lstat, stat } from 'fs-extra';
|
||||
@@ -273,8 +273,8 @@ export async function getDynamicRoutes(
|
||||
canUsePreviewMode?: boolean,
|
||||
bypassToken?: string,
|
||||
isServerMode?: boolean,
|
||||
dynamicMiddlewareRouteMap?: Map<string, Source>
|
||||
): Promise<Source[]> {
|
||||
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>
|
||||
): Promise<RouteWithSrc[]> {
|
||||
if (routesManifest) {
|
||||
switch (routesManifest.version) {
|
||||
case 1:
|
||||
@@ -307,7 +307,7 @@ export async function getDynamicRoutes(
|
||||
}
|
||||
|
||||
const { page, namedRegex, regex, routeKeys } = params;
|
||||
const route: Source = {
|
||||
const route: RouteWithSrc = {
|
||||
src: namedRegex || regex,
|
||||
dest: `${!isDev ? path.join('/', entryDirectory, page) : page}${
|
||||
routeKeys
|
||||
@@ -400,7 +400,7 @@ export async function getDynamicRoutes(
|
||||
matcher: getRouteRegex && getRouteRegex(pageName).re,
|
||||
}));
|
||||
|
||||
const routes: Source[] = [];
|
||||
const routes: RouteWithSrc[] = [];
|
||||
pageMatchers.forEach(pageMatcher => {
|
||||
// in `vercel dev` we don't need to prefix the destination
|
||||
const dest = !isDev
|
||||
@@ -419,7 +419,7 @@ export async function getDynamicRoutes(
|
||||
}
|
||||
|
||||
export function localizeDynamicRoutes(
|
||||
dynamicRoutes: Source[],
|
||||
dynamicRoutes: RouteWithSrc[],
|
||||
dynamicPrefix: string,
|
||||
entryDirectory: string,
|
||||
staticPages: Files,
|
||||
@@ -427,8 +427,8 @@ export function localizeDynamicRoutes(
|
||||
routesManifest?: RoutesManifest,
|
||||
isServerMode?: boolean,
|
||||
isCorrectLocaleAPIRoutes?: boolean
|
||||
): Source[] {
|
||||
return dynamicRoutes.map((route: Source) => {
|
||||
): RouteWithSrc[] {
|
||||
return dynamicRoutes.map((route: RouteWithSrc) => {
|
||||
// i18n is already handled for middleware
|
||||
if (route.middleware !== undefined || route.middlewarePath !== undefined)
|
||||
return route;
|
||||
@@ -1665,6 +1665,7 @@ type OnPrerenderRouteArgs = {
|
||||
pageLambdaMap: { [key: string]: string };
|
||||
routesManifest?: RoutesManifest;
|
||||
isCorrectNotFoundRoutes?: boolean;
|
||||
isEmptyAllowQueryForPrendered?: boolean;
|
||||
};
|
||||
let prerenderGroup = 1;
|
||||
|
||||
@@ -1698,6 +1699,7 @@ export const onPrerenderRoute =
|
||||
pageLambdaMap,
|
||||
routesManifest,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
} = prerenderRouteArgs;
|
||||
|
||||
if (isBlocking && isFallback) {
|
||||
@@ -1901,7 +1903,6 @@ export const onPrerenderRoute =
|
||||
// a given path. All other query keys will be striped. We can automatically
|
||||
// detect this for prerender (ISR) pages by reading the routes manifest file.
|
||||
const pageKey = srcRoute || routeKey;
|
||||
const isDynamic = isDynamicRoute(pageKey);
|
||||
const route = routesManifest?.dynamicRoutes.find(
|
||||
(r): r is RoutesManifestRoute =>
|
||||
r.page === pageKey && !('isMiddleware' in r)
|
||||
@@ -1911,14 +1912,33 @@ export const onPrerenderRoute =
|
||||
// we have sufficient information to set it
|
||||
let allowQuery: string[] | undefined;
|
||||
|
||||
if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
} else if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
allowQuery = [];
|
||||
if (isEmptyAllowQueryForPrendered) {
|
||||
const isDynamic = isDynamicRoute(routeKey);
|
||||
|
||||
if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
// prerendered paths also do not pass allowQuery as they match
|
||||
// during handle: 'filesystem' so should not cache differently
|
||||
// by query values
|
||||
allowQuery = [];
|
||||
} else if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
}
|
||||
} else {
|
||||
const isDynamic = isDynamicRoute(pageKey);
|
||||
|
||||
if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
} else if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
allowQuery = [];
|
||||
}
|
||||
}
|
||||
|
||||
prerenders[outputPathPage] = new Prerender({
|
||||
@@ -2292,7 +2312,7 @@ export async function getMiddlewareBundle({
|
||||
|
||||
const source: {
|
||||
staticRoutes: Route[];
|
||||
dynamicRouteMap: Map<string, Source>;
|
||||
dynamicRouteMap: Map<string, RouteWithSrc>;
|
||||
edgeFunctions: Record<string, EdgeFunction>;
|
||||
} = {
|
||||
staticRoutes: [],
|
||||
|
||||
8
packages/next/test/fixtures/00-mixed-dynamic-routes/index.test.js
vendored
Normal file
8
packages/next/test/fixtures/00-mixed-dynamic-routes/index.test.js
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
const path = require('path');
|
||||
const { deployAndTest } = require('../../utils');
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should deploy and pass probe checks', async () => {
|
||||
await deployAndTest(__dirname);
|
||||
});
|
||||
});
|
||||
13
packages/next/test/fixtures/00-mixed-dynamic-routes/package.json
vendored
Normal file
13
packages/next/test/fixtures/00-mixed-dynamic-routes/package.json
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"next": "canary",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"scripts": {
|
||||
"dev": "next dev",
|
||||
"build": "next build",
|
||||
"start": "next start",
|
||||
"lint": "next lint"
|
||||
}
|
||||
}
|
||||
17
packages/next/test/fixtures/00-mixed-dynamic-routes/pages/[...slug].js
vendored
Normal file
17
packages/next/test/fixtures/00-mixed-dynamic-routes/pages/[...slug].js
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
export default function Page(props) {
|
||||
return (
|
||||
<>
|
||||
<p>/[...slug] page</p>
|
||||
<p>{JSON.stringify(props)}</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export const getServerSideProps = async ({ params }) => {
|
||||
return {
|
||||
props: {
|
||||
params,
|
||||
page: '[...slug]',
|
||||
},
|
||||
};
|
||||
};
|
||||
29
packages/next/test/fixtures/00-mixed-dynamic-routes/pages/[slug].js
vendored
Normal file
29
packages/next/test/fixtures/00-mixed-dynamic-routes/pages/[slug].js
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
export default function Page(props) {
|
||||
return (
|
||||
<>
|
||||
<p>/[slug] page</p>
|
||||
<p>{JSON.stringify(props)}</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export const getStaticProps = async ({ params }) => {
|
||||
return {
|
||||
props: {
|
||||
params,
|
||||
page: '[slug]',
|
||||
now: Date.now(),
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export const getStaticPaths = async () => {
|
||||
return {
|
||||
paths: [
|
||||
{ params: { slug: 'static-1' } },
|
||||
{ params: { slug: 'static-2' } },
|
||||
{ params: { slug: 'static-3' } },
|
||||
],
|
||||
fallback: false,
|
||||
};
|
||||
};
|
||||
17
packages/next/test/fixtures/00-mixed-dynamic-routes/pages/index.js
vendored
Normal file
17
packages/next/test/fixtures/00-mixed-dynamic-routes/pages/index.js
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
export default function Page(props) {
|
||||
return (
|
||||
<>
|
||||
<p>index page</p>
|
||||
<p>{JSON.stringify(props)}</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export const getStaticProps = async () => {
|
||||
return {
|
||||
props: {
|
||||
page: 'index',
|
||||
now: Date.now(),
|
||||
},
|
||||
};
|
||||
};
|
||||
36
packages/next/test/fixtures/00-mixed-dynamic-routes/vercel.json
vendored
Normal file
36
packages/next/test/fixtures/00-mixed-dynamic-routes/vercel.json
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "package.json", "use": "@vercel/next" }],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/",
|
||||
"status": 200,
|
||||
"mustContain": "index page"
|
||||
},
|
||||
{
|
||||
"path": "/static-1",
|
||||
"status": 200,
|
||||
"mustContain": "[slug] page"
|
||||
},
|
||||
{
|
||||
"path": "/static-2",
|
||||
"status": 200,
|
||||
"mustContain": "[slug] page"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic-1",
|
||||
"status": 200,
|
||||
"mustContain": "[...slug] page"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic-2",
|
||||
"status": 200,
|
||||
"mustContain": "[...slug] page"
|
||||
},
|
||||
{
|
||||
"path": "/long/dynamic",
|
||||
"status": 200,
|
||||
"mustContain": "[...slug] page"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,8 +1,144 @@
|
||||
/* eslint-env jest */
|
||||
const path = require('path');
|
||||
const { deployAndTest } = require('../../utils');
|
||||
const cheerio = require('cheerio');
|
||||
const { deployAndTest, check, waitFor } = require('../../utils');
|
||||
const fetch = require('../../../../../test/lib/deployment/fetch-retry');
|
||||
|
||||
async function checkForChange(url, initialValue, getNewValue) {
|
||||
return check(async () => {
|
||||
const res = await fetch(url);
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(`Invalid status code ${res.status}`);
|
||||
}
|
||||
const newValue = await getNewValue(res);
|
||||
|
||||
return initialValue !== newValue
|
||||
? 'success'
|
||||
: JSON.stringify({ initialValue, newValue });
|
||||
}, 'success');
|
||||
}
|
||||
|
||||
const ctx = {};
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should deploy and pass probe checks', async () => {
|
||||
await deployAndTest(__dirname);
|
||||
const info = await deployAndTest(__dirname);
|
||||
Object.assign(ctx, info);
|
||||
});
|
||||
|
||||
it.each([
|
||||
{
|
||||
title: 'should update content for prerendered path correctly',
|
||||
pathsToCheck: [
|
||||
{ urlPath: '/fallback-blocking/first' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?slug=first' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?slug=random' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?another=value' },
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'should update content for non-prerendered path correctly',
|
||||
pathsToCheck: [
|
||||
{ urlPath: '/fallback-blocking/on-demand-2' },
|
||||
{
|
||||
urlPath: '/fallback-blocking/on-demand-2',
|
||||
query: '?slug=on-demand-2',
|
||||
},
|
||||
{ urlPath: '/fallback-blocking/on-demand-2', query: '?slug=random' },
|
||||
{ urlPath: '/fallback-blocking/on-demand-2', query: '?another=value' },
|
||||
],
|
||||
},
|
||||
])('$title', async ({ pathsToCheck }) => {
|
||||
let initialRandom;
|
||||
let initialRandomData;
|
||||
let preRevalidateRandom;
|
||||
let preRevalidateRandomData;
|
||||
|
||||
const checkPaths = async pathsToCheck => {
|
||||
for (const { urlPath, query } of pathsToCheck) {
|
||||
console.log('checking', {
|
||||
urlPath,
|
||||
query,
|
||||
initialRandom,
|
||||
preRevalidateRandom,
|
||||
});
|
||||
|
||||
if (preRevalidateRandom) {
|
||||
// wait for change as cache may take a little to propagate
|
||||
const initialUrl = `${ctx.deploymentUrl}${urlPath}${query || ''}`;
|
||||
await checkForChange(initialUrl, preRevalidateRandom, async () => {
|
||||
const res = await fetch(initialUrl);
|
||||
const $ = cheerio.load(await res.text());
|
||||
return JSON.parse($('#props').text()).random;
|
||||
});
|
||||
}
|
||||
|
||||
const res = await fetch(`${ctx.deploymentUrl}${urlPath}${query || ''}`);
|
||||
expect(res.status).toBe(200);
|
||||
|
||||
const $ = await cheerio.load(await res.text());
|
||||
const props = JSON.parse($('#props').text());
|
||||
|
||||
if (initialRandom) {
|
||||
// for fallback paths the initial value is generated
|
||||
// in the foreground and then a revalidation is kicked off
|
||||
// in the background so the initial value will be replaced
|
||||
if (initialRandom !== props.random && urlPath.includes('on-demand')) {
|
||||
initialRandom = props.random;
|
||||
} else {
|
||||
expect(initialRandom).toBe(props.random);
|
||||
}
|
||||
} else {
|
||||
initialRandom = props.random;
|
||||
}
|
||||
expect(isNaN(initialRandom)).toBe(false);
|
||||
|
||||
const dataRes = await fetch(
|
||||
`${ctx.deploymentUrl}/_next/data/testing-build-id${urlPath}.json${
|
||||
query || ''
|
||||
}`
|
||||
);
|
||||
expect(dataRes.status).toBe(200);
|
||||
|
||||
const { pageProps: dataProps } = await dataRes.json();
|
||||
|
||||
if (initialRandomData) {
|
||||
// for fallback paths the initial value is generated
|
||||
// in the foreground and then a revalidation is kicked off
|
||||
// in the background so the initial value will be replaced
|
||||
if (
|
||||
initialRandomData !== dataProps.random &&
|
||||
urlPath.includes('on-demand-2')
|
||||
) {
|
||||
initialRandomData = dataProps.random;
|
||||
} else {
|
||||
expect(initialRandomData).toBe(dataProps.random);
|
||||
}
|
||||
} else {
|
||||
initialRandomData = dataProps.random;
|
||||
}
|
||||
expect(isNaN(initialRandomData)).toBe(false);
|
||||
}
|
||||
};
|
||||
|
||||
await checkPaths(pathsToCheck);
|
||||
|
||||
preRevalidateRandom = initialRandom;
|
||||
preRevalidateRandomData = initialRandomData;
|
||||
|
||||
initialRandom = undefined;
|
||||
initialRandomData = undefined;
|
||||
|
||||
const revalidateRes = await fetch(
|
||||
`${ctx.deploymentUrl}/api/revalidate?urlPath=${pathsToCheck[0].urlPath}`
|
||||
);
|
||||
expect(revalidateRes.status).toBe(200);
|
||||
expect((await revalidateRes.json()).revalidated).toBe(true);
|
||||
|
||||
await checkPaths(pathsToCheck);
|
||||
|
||||
expect(preRevalidateRandom).toBeDefined();
|
||||
expect(preRevalidateRandomData).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
10
packages/next/test/fixtures/00-server-build/pages/api/revalidate.js
vendored
Normal file
10
packages/next/test/fixtures/00-server-build/pages/api/revalidate.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
export default async function handler(req, res) {
|
||||
try {
|
||||
console.log('revalidating', req.query.urlPath);
|
||||
await res.revalidate(req.query.urlPath);
|
||||
return res.json({ revalidated: true });
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return res.json({ revalidated: false });
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,7 @@ export const getStaticProps = ({ params }) => {
|
||||
|
||||
export const getStaticPaths = () => {
|
||||
return {
|
||||
paths: ['/fallback-blocking/first'],
|
||||
paths: ['/fallback-blocking/first', '/fallback-blocking/on-demand-1'],
|
||||
fallback: 'blocking',
|
||||
};
|
||||
};
|
||||
|
||||
@@ -78,8 +78,20 @@ it('should build using server build', async () => {
|
||||
expect(output['dynamic/[slug]'].maxDuration).toBe(5);
|
||||
expect(output['fallback/[slug]'].type).toBe('Prerender');
|
||||
expect(output['fallback/[slug]'].allowQuery).toEqual(['slug']);
|
||||
expect(output['_next/data/testing-build-id/fallback/[slug].json'].type).toBe(
|
||||
'Prerender'
|
||||
);
|
||||
expect(
|
||||
output['_next/data/testing-build-id/fallback/[slug].json'].allowQuery
|
||||
).toEqual(['slug']);
|
||||
expect(output['fallback/first'].type).toBe('Prerender');
|
||||
expect(output['fallback/first'].allowQuery).toEqual(['slug']);
|
||||
expect(output['fallback/first'].allowQuery).toEqual([]);
|
||||
expect(output['_next/data/testing-build-id/fallback/first.json'].type).toBe(
|
||||
'Prerender'
|
||||
);
|
||||
expect(
|
||||
output['_next/data/testing-build-id/fallback/first.json'].allowQuery
|
||||
).toEqual([]);
|
||||
expect(output['api'].type).toBe('Lambda');
|
||||
expect(output['api'].allowQuery).toBe(undefined);
|
||||
expect(output['api'].memory).toBe(128);
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
module.exports = (phase, { defaultConfig }) => ({
|
||||
pageExtensions: [...defaultConfig.pageExtensions, 'hello.js'],
|
||||
generateBuildId() {
|
||||
return 'testing-build-id';
|
||||
},
|
||||
});
|
||||
|
||||
2
packages/next/test/utils.ts
vendored
2
packages/next/test/utils.ts
vendored
@@ -129,7 +129,7 @@ export async function deployAndTest(fixtureDir) {
|
||||
};
|
||||
}
|
||||
|
||||
async function waitFor(milliseconds) {
|
||||
export async function waitFor(milliseconds) {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, milliseconds);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "2.4.3",
|
||||
"version": "2.4.5",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -31,13 +31,13 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.0.2",
|
||||
"@vercel/build-utils": "5.0.4",
|
||||
"@vercel/node-bridge": "3.0.0",
|
||||
"@vercel/static-config": "2.0.1",
|
||||
"edge-runtime": "1.0.1",
|
||||
"esbuild": "0.14.47",
|
||||
"exit-hook": "2.2.1",
|
||||
"node-fetch": "2.6.1",
|
||||
"node-fetch": "2.6.7",
|
||||
"ts-node": "8.9.1",
|
||||
"typescript": "4.3.4"
|
||||
},
|
||||
|
||||
@@ -148,21 +148,26 @@ async function serializeRequest(message: IncomingMessage) {
|
||||
});
|
||||
}
|
||||
|
||||
async function compileUserCode(entrypoint: string) {
|
||||
async function compileUserCode(
|
||||
entrypointPath: string,
|
||||
entrypointLabel: string
|
||||
) {
|
||||
try {
|
||||
const result = await esbuild.build({
|
||||
platform: 'node',
|
||||
target: 'node14',
|
||||
sourcemap: 'inline',
|
||||
bundle: true,
|
||||
entryPoints: [entrypoint],
|
||||
entryPoints: [entrypointPath],
|
||||
write: false, // operate in memory
|
||||
format: 'cjs',
|
||||
});
|
||||
|
||||
const compiledFile = result.outputFiles?.[0];
|
||||
if (!compiledFile) {
|
||||
throw new Error(`Compilation of ${entrypoint} produced no output files.`);
|
||||
throw new Error(
|
||||
`Compilation of ${entrypointLabel} produced no output files.`
|
||||
);
|
||||
}
|
||||
|
||||
const userCode = new TextDecoder().decode(compiledFile.contents);
|
||||
@@ -198,6 +203,10 @@ async function compileUserCode(entrypoint: string) {
|
||||
|
||||
let response = await edgeHandler(event.request, event);
|
||||
|
||||
if (!response) {
|
||||
throw new Error('Edge Function "${entrypointLabel}" did not return a response.');
|
||||
}
|
||||
|
||||
return event.respondWith(response);
|
||||
} catch (error) {
|
||||
// we can't easily show a meaningful stack trace
|
||||
@@ -233,6 +242,9 @@ async function createEdgeRuntime(userCode: string | undefined) {
|
||||
module: {
|
||||
exports: {},
|
||||
},
|
||||
process: {
|
||||
env: process.env,
|
||||
},
|
||||
});
|
||||
return context;
|
||||
},
|
||||
@@ -252,9 +264,10 @@ async function createEdgeRuntime(userCode: string | undefined) {
|
||||
}
|
||||
|
||||
async function createEdgeEventHandler(
|
||||
entrypoint: string
|
||||
entrypointPath: string,
|
||||
entrypointLabel: string
|
||||
): Promise<(request: IncomingMessage) => Promise<VercelProxyResponse>> {
|
||||
const userCode = await compileUserCode(entrypoint);
|
||||
const userCode = await compileUserCode(entrypointPath, entrypointLabel);
|
||||
const server = await createEdgeRuntime(userCode);
|
||||
|
||||
return async function (request: IncomingMessage) {
|
||||
@@ -317,17 +330,17 @@ async function createEventHandler(
|
||||
config: Config,
|
||||
options: { shouldAddHelpers: boolean }
|
||||
): Promise<(request: IncomingMessage) => Promise<VercelProxyResponse>> {
|
||||
const entryPointPath = join(process.cwd(), entrypoint!);
|
||||
const runtime = parseRuntime(entrypoint, entryPointPath);
|
||||
const entrypointPath = join(process.cwd(), entrypoint!);
|
||||
const runtime = parseRuntime(entrypoint, entrypointPath);
|
||||
|
||||
// `middleware.js`/`middleware.ts` file is always run as
|
||||
// an Edge Function, otherwise needs to be opted-in via
|
||||
// `export const config = { runtime: 'experimental-edge' }`
|
||||
if (config.middleware === true || runtime === 'experimental-edge') {
|
||||
return createEdgeEventHandler(entryPointPath);
|
||||
return createEdgeEventHandler(entrypointPath, entrypoint);
|
||||
}
|
||||
|
||||
return createServerlessEventHandler(entryPointPath, options);
|
||||
return createServerlessEventHandler(entrypointPath, options);
|
||||
}
|
||||
|
||||
let handleEvent: (request: IncomingMessage) => Promise<VercelProxyResponse>;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/python",
|
||||
"version": "3.0.6",
|
||||
"version": "3.1.0",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
|
||||
@@ -23,7 +23,7 @@
|
||||
"devDependencies": {
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/jest": "27.4.1",
|
||||
"@vercel/build-utils": "5.0.2",
|
||||
"@vercel/build-utils": "5.0.4",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"execa": "^1.0.0",
|
||||
"typescript": "4.3.4"
|
||||
|
||||
@@ -25,9 +25,6 @@ const allOptions: PythonVersion[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const upstreamProvider =
|
||||
'This change is the result of a decision made by an upstream infrastructure provider (AWS)';
|
||||
|
||||
function getDevPythonVersion(): PythonVersion {
|
||||
// Use the system-installed version of `python3` when running `vercel dev`
|
||||
return {
|
||||
@@ -75,14 +72,14 @@ export function getSupportedPythonVersion({
|
||||
throw new NowBuildError({
|
||||
code: 'BUILD_UTILS_PYTHON_VERSION_DISCONTINUED',
|
||||
link: 'http://vercel.link/python-version',
|
||||
message: `Python version "${selection.version}" detected in Pipfile.lock is discontinued and must be upgraded. ${upstreamProvider}.`,
|
||||
message: `Python version "${selection.version}" detected in Pipfile.lock is discontinued and must be upgraded.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (selection.discontinueDate) {
|
||||
const d = selection.discontinueDate.toISOString().split('T')[0];
|
||||
console.warn(
|
||||
`Error: Python version "${selection.version}" detected in Pipfile.lock is deprecated. Deployments created on or after ${d} will fail to build. ${upstreamProvider}. http://vercel.link/python-version`
|
||||
`Error: Python version "${selection.version}" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after ${d} will fail to build. http://vercel.link/python-version`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -9,4 +9,4 @@ verify_ssl = true
|
||||
flask = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.6"
|
||||
python_version = "3.9"
|
||||
|
||||
209
packages/python/test/fixtures/02-wsgi-pipenv/Pipfile.lock
generated
vendored
209
packages/python/test/fixtures/02-wsgi-pipenv/Pipfile.lock
generated
vendored
@@ -1,90 +1,123 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "8ec50e78e90ad609e540d41d1ed90f3fb880ffbdf6049b0a6b2f1a00158a3288"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.6"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
|
||||
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
|
||||
],
|
||||
"version": "==7.0"
|
||||
},
|
||||
"flask": {
|
||||
"hashes": [
|
||||
"sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48",
|
||||
"sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.2"
|
||||
},
|
||||
"itsdangerous": {
|
||||
"hashes": [
|
||||
"sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
|
||||
"sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
|
||||
],
|
||||
"version": "==1.1.0"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013",
|
||||
"sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b"
|
||||
],
|
||||
"version": "==2.10.1"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
|
||||
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
|
||||
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
|
||||
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
|
||||
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
|
||||
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
|
||||
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
|
||||
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
|
||||
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
|
||||
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
|
||||
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
|
||||
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
|
||||
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
|
||||
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
|
||||
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
|
||||
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
|
||||
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
|
||||
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
|
||||
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
|
||||
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
|
||||
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
|
||||
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
|
||||
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
|
||||
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
|
||||
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
|
||||
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
|
||||
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
|
||||
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
|
||||
],
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"werkzeug": {
|
||||
"hashes": [
|
||||
"sha256:0a73e8bb2ff2feecfc5d56e6f458f5b99290ef34f565ffb2665801ff7de6af7a",
|
||||
"sha256:7fad9770a8778f9576693f0cc29c7dcc36964df916b83734f4431c0e612a7fbc"
|
||||
],
|
||||
"version": "==0.15.2"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "f7f1cea682a03d85328caf2f88382c4380283d3892a9ba31b374784fb29536c4"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.9"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
|
||||
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==8.1.3"
|
||||
},
|
||||
"flask": {
|
||||
"hashes": [
|
||||
"sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb",
|
||||
"sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.1.3"
|
||||
},
|
||||
"importlib-metadata": {
|
||||
"hashes": [
|
||||
"sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670",
|
||||
"sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"
|
||||
],
|
||||
"markers": "python_version < '3.10'",
|
||||
"version": "==4.12.0"
|
||||
},
|
||||
"itsdangerous": {
|
||||
"hashes": [
|
||||
"sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44",
|
||||
"sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.1.2"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
|
||||
"sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.1.2"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003",
|
||||
"sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88",
|
||||
"sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5",
|
||||
"sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7",
|
||||
"sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a",
|
||||
"sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603",
|
||||
"sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1",
|
||||
"sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135",
|
||||
"sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247",
|
||||
"sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6",
|
||||
"sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601",
|
||||
"sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77",
|
||||
"sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02",
|
||||
"sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e",
|
||||
"sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63",
|
||||
"sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f",
|
||||
"sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980",
|
||||
"sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b",
|
||||
"sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812",
|
||||
"sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff",
|
||||
"sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96",
|
||||
"sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1",
|
||||
"sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925",
|
||||
"sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a",
|
||||
"sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6",
|
||||
"sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e",
|
||||
"sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f",
|
||||
"sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4",
|
||||
"sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f",
|
||||
"sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3",
|
||||
"sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c",
|
||||
"sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a",
|
||||
"sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417",
|
||||
"sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a",
|
||||
"sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a",
|
||||
"sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37",
|
||||
"sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452",
|
||||
"sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933",
|
||||
"sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a",
|
||||
"sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.1.1"
|
||||
},
|
||||
"werkzeug": {
|
||||
"hashes": [
|
||||
"sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6",
|
||||
"sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.1.2"
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2",
|
||||
"sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.8.1"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
"probes": [
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "wsgi:RANDOMNESS_PLACEHOLDER",
|
||||
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build"
|
||||
"mustContain": "wsgi:RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -9,4 +9,4 @@ verify_ssl = true
|
||||
flask = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.6"
|
||||
python_version = "3.9"
|
||||
|
||||
209
packages/python/test/fixtures/03-env-vars/Pipfile.lock
generated
vendored
209
packages/python/test/fixtures/03-env-vars/Pipfile.lock
generated
vendored
@@ -1,90 +1,123 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "8ec50e78e90ad609e540d41d1ed90f3fb880ffbdf6049b0a6b2f1a00158a3288"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.6"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
|
||||
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
|
||||
],
|
||||
"version": "==7.0"
|
||||
},
|
||||
"flask": {
|
||||
"hashes": [
|
||||
"sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48",
|
||||
"sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.2"
|
||||
},
|
||||
"itsdangerous": {
|
||||
"hashes": [
|
||||
"sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19",
|
||||
"sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749"
|
||||
],
|
||||
"version": "==1.1.0"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013",
|
||||
"sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b"
|
||||
],
|
||||
"version": "==2.10.1"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
|
||||
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
|
||||
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
|
||||
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
|
||||
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
|
||||
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
|
||||
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
|
||||
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
|
||||
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
|
||||
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
|
||||
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
|
||||
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
|
||||
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
|
||||
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
|
||||
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
|
||||
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
|
||||
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
|
||||
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
|
||||
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
|
||||
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
|
||||
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
|
||||
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
|
||||
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
|
||||
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
|
||||
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
|
||||
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
|
||||
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
|
||||
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
|
||||
],
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"werkzeug": {
|
||||
"hashes": [
|
||||
"sha256:0a73e8bb2ff2feecfc5d56e6f458f5b99290ef34f565ffb2665801ff7de6af7a",
|
||||
"sha256:7fad9770a8778f9576693f0cc29c7dcc36964df916b83734f4431c0e612a7fbc"
|
||||
],
|
||||
"version": "==0.15.2"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "f7f1cea682a03d85328caf2f88382c4380283d3892a9ba31b374784fb29536c4"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.9"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"click": {
|
||||
"hashes": [
|
||||
"sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e",
|
||||
"sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==8.1.3"
|
||||
},
|
||||
"flask": {
|
||||
"hashes": [
|
||||
"sha256:15972e5017df0575c3d6c090ba168b6db90259e620ac8d7ea813a396bad5b6cb",
|
||||
"sha256:9013281a7402ad527f8fd56375164f3aa021ecfaff89bfe3825346c24f87e04c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.1.3"
|
||||
},
|
||||
"importlib-metadata": {
|
||||
"hashes": [
|
||||
"sha256:637245b8bab2b6502fcbc752cc4b7a6f6243bb02b31c5c26156ad103d3d45670",
|
||||
"sha256:7401a975809ea1fdc658c3aa4f78cc2195a0e019c5cbc4c06122884e9ae80c23"
|
||||
],
|
||||
"markers": "python_version < '3.10'",
|
||||
"version": "==4.12.0"
|
||||
},
|
||||
"itsdangerous": {
|
||||
"hashes": [
|
||||
"sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44",
|
||||
"sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.1.2"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852",
|
||||
"sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.1.2"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003",
|
||||
"sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88",
|
||||
"sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5",
|
||||
"sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7",
|
||||
"sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a",
|
||||
"sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603",
|
||||
"sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1",
|
||||
"sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135",
|
||||
"sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247",
|
||||
"sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6",
|
||||
"sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601",
|
||||
"sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77",
|
||||
"sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02",
|
||||
"sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e",
|
||||
"sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63",
|
||||
"sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f",
|
||||
"sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980",
|
||||
"sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b",
|
||||
"sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812",
|
||||
"sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff",
|
||||
"sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96",
|
||||
"sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1",
|
||||
"sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925",
|
||||
"sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a",
|
||||
"sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6",
|
||||
"sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e",
|
||||
"sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f",
|
||||
"sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4",
|
||||
"sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f",
|
||||
"sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3",
|
||||
"sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c",
|
||||
"sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a",
|
||||
"sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417",
|
||||
"sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a",
|
||||
"sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a",
|
||||
"sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37",
|
||||
"sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452",
|
||||
"sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933",
|
||||
"sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a",
|
||||
"sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.1.1"
|
||||
},
|
||||
"werkzeug": {
|
||||
"hashes": [
|
||||
"sha256:1ce08e8093ed67d638d63879fd1ba3735817f7a80de3674d293f5984f25fb6e6",
|
||||
"sha256:72a4b735692dd3135217911cbeaa1be5fa3f62bffb8745c5215420a03dc55255"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==2.1.2"
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2",
|
||||
"sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.8.1"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
||||
|
||||
@@ -7,8 +7,7 @@
|
||||
"probes": [
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "RANDOMNESS_PLACEHOLDER:env",
|
||||
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build"
|
||||
"mustContain": "RANDOMNESS_PLACEHOLDER:env"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -9,4 +9,4 @@ verify_ssl = true
|
||||
django = "*"
|
||||
|
||||
[requires]
|
||||
python_version = "3.6"
|
||||
python_version = "3.9"
|
||||
|
||||
84
packages/python/test/fixtures/08-django/Pipfile.lock
generated
vendored
84
packages/python/test/fixtures/08-django/Pipfile.lock
generated
vendored
@@ -1,43 +1,45 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "68309cd71a258c30a39567fce09a09ad5c4ff0bdc85b6fba22b47598c985c883"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.6"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"django": {
|
||||
"hashes": [
|
||||
"sha256:7c3543e4fb070d14e10926189a7fcf42ba919263b7473dceaefce34d54e8a119",
|
||||
"sha256:a2814bffd1f007805b19194eb0b9a331933b82bd5da1c3ba3d7b7ba16e06dc4b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.2"
|
||||
},
|
||||
"pytz": {
|
||||
"hashes": [
|
||||
"sha256:32b0891edff07e28efe91284ed9c31e123d84bea3fd98e1f72be2508f43ef8d9",
|
||||
"sha256:d5f05e487007e29e03409f9398d074e158d920d36eb82eaf66fb1136b0c5374c"
|
||||
],
|
||||
"version": "==2018.9"
|
||||
},
|
||||
"sqlparse": {
|
||||
"hashes": [
|
||||
"sha256:40afe6b8d4b1117e7dff5504d7a8ce07d9a1b15aeeade8a2d10f130a834f8177",
|
||||
"sha256:7c3dca29c022744e95b547e867cee89f4fce4373f3549ccd8797d8eb52cdb873"
|
||||
],
|
||||
"version": "==0.3.0"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "c36ae28fea7b9a4cc02145632e2f41469af2e7b38b801903abb8333d3306f36b"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.9"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"asgiref": {
|
||||
"hashes": [
|
||||
"sha256:1d2880b792ae8757289136f1db2b7b99100ce959b2aa57fd69dab783d05afac4",
|
||||
"sha256:4a29362a6acebe09bf1d6640db38c1dc3d9217c68e6f9f6204d72667fc19a424"
|
||||
],
|
||||
"markers": "python_version >= '3.7'",
|
||||
"version": "==3.5.2"
|
||||
},
|
||||
"django": {
|
||||
"hashes": [
|
||||
"sha256:a67a793ff6827fd373555537dca0da293a63a316fe34cb7f367f898ccca3c3ae",
|
||||
"sha256:ca54ebedfcbc60d191391efbf02ba68fb52165b8bf6ccd6fe71f098cac1fe59e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.0.6"
|
||||
},
|
||||
"sqlparse": {
|
||||
"hashes": [
|
||||
"sha256:0c00730c74263a94e5a9919ade150dfc3b19c574389985446148402998287dae",
|
||||
"sha256:48719e356bb8b42991bdbb1e8b83223757b93789c00910a616a071910ca4a64d"
|
||||
],
|
||||
"markers": "python_version >= '3.5'",
|
||||
"version": "==0.4.2"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user