mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
35 Commits
add/vc-ope
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bef1aec766 | ||
|
|
4f4a42813f | ||
|
|
181a492d91 | ||
|
|
1be7a80bb8 | ||
|
|
0428d4744e | ||
|
|
2a929a4bb9 | ||
|
|
accd308dc5 | ||
|
|
e2d4efab08 | ||
|
|
7e0dd6f808 | ||
|
|
8971e02e49 | ||
|
|
10c91c8579 | ||
|
|
bfdbe58675 | ||
|
|
7bdaf107b7 | ||
|
|
8de100f0e1 | ||
|
|
38a6785859 | ||
|
|
c67d1a8525 | ||
|
|
c5a7c574a2 | ||
|
|
d2f8d178f7 | ||
|
|
f9a747764c | ||
|
|
27d80f13cd | ||
|
|
8c668c925d | ||
|
|
4b1b33c143 | ||
|
|
a8d4147554 | ||
|
|
09339f494d | ||
|
|
ee4d772ae9 | ||
|
|
61e8103404 | ||
|
|
fb4f477325 | ||
|
|
016bff848e | ||
|
|
183e411f7c | ||
|
|
070e300148 | ||
|
|
cbdf9b4a88 | ||
|
|
ec9b55dc81 | ||
|
|
06829bc21a | ||
|
|
628071f659 | ||
|
|
5a7461dfe3 |
@@ -1,4 +1,5 @@
|
||||
# https://prettier.io/docs/en/ignore.html
|
||||
|
||||
# ignore this file with an intentional syntax error
|
||||
# ignore these files with an intentional syntax error
|
||||
packages/cli/test/dev/fixtures/edge-function-error/api/edge-error-syntax.js
|
||||
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"prettier": "2.6.2",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "28.0.5",
|
||||
"turbo": "1.3.1"
|
||||
"turbo": "1.3.2-canary.1"
|
||||
},
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "5.0.1",
|
||||
"version": "5.0.3",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -33,6 +33,11 @@ export class EdgeFunction {
|
||||
*/
|
||||
envVarsInUse?: string[];
|
||||
|
||||
/**
|
||||
* Extra binary files to be included in the edge function
|
||||
*/
|
||||
assets?: { name: string; path: string }[];
|
||||
|
||||
constructor(params: Omit<EdgeFunction, 'type'>) {
|
||||
this.type = 'EdgeFunction';
|
||||
this.name = params.name;
|
||||
@@ -40,5 +45,6 @@ export class EdgeFunction {
|
||||
this.entrypoint = params.entrypoint;
|
||||
this.files = params.files;
|
||||
this.envVarsInUse = params.envVarsInUse;
|
||||
this.assets = params.assets;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,9 +33,6 @@ function getHint(isAuto = false) {
|
||||
: `Please set "engines": { "node": "${range}" } in your \`package.json\` file to use Node.js ${major}.`;
|
||||
}
|
||||
|
||||
const upstreamProvider =
|
||||
'This change is the result of a decision made by an upstream infrastructure provider (AWS).';
|
||||
|
||||
export function getLatestNodeVersion() {
|
||||
return allOptions[0];
|
||||
}
|
||||
@@ -75,7 +72,7 @@ export async function getSupportedNodeVersion(
|
||||
throw new NowBuildError({
|
||||
code: 'BUILD_UTILS_NODE_VERSION_DISCONTINUED',
|
||||
link: 'http://vercel.link/node-version',
|
||||
message: `${intro} ${getHint(isAuto)} ${upstreamProvider}`,
|
||||
message: `${intro} ${getHint(isAuto)}`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -86,9 +83,9 @@ export async function getSupportedNodeVersion(
|
||||
console.warn(
|
||||
`Error: Node.js version ${
|
||||
selection.range
|
||||
} is deprecated. Deployments created on or after ${d} will fail to build. ${getHint(
|
||||
} has reached End-of-Life. Deployments created on or after ${d} will fail to build. ${getHint(
|
||||
isAuto
|
||||
)} ${upstreamProvider}`
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
56
packages/build-utils/test/unit.test.ts
vendored
56
packages/build-utils/test/unit.test.ts
vendored
@@ -1,6 +1,7 @@
|
||||
import ms from 'ms';
|
||||
import path from 'path';
|
||||
import fs, { readlink } from 'fs-extra';
|
||||
import retry from 'async-retry';
|
||||
import { strict as assert, strictEqual } from 'assert';
|
||||
import { createZip } from '../src/lambda';
|
||||
import { getSupportedNodeVersion } from '../src/fs/node-version';
|
||||
@@ -386,10 +387,10 @@ it('should warn for deprecated versions, soon to be discontinued', async () => {
|
||||
12
|
||||
);
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
|
||||
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
|
||||
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
|
||||
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
|
||||
]);
|
||||
|
||||
global.Date.now = realDateNow;
|
||||
@@ -494,28 +495,43 @@ it('should only invoke `runNpmInstall()` once per `package.json` file (serial)',
|
||||
const meta: Meta = {};
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const apiDir = path.join(fixture, 'api');
|
||||
const run1 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run1).toEqual(true);
|
||||
expect(
|
||||
(meta.runNpmInstallSet as Set<string>).has(
|
||||
path.join(fixture, 'package.json')
|
||||
)
|
||||
).toEqual(true);
|
||||
const run2 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run2).toEqual(false);
|
||||
const run3 = await runNpmInstall(fixture, [], undefined, meta);
|
||||
expect(run3).toEqual(false);
|
||||
const retryOpts = { maxRetryTime: 1000 };
|
||||
let run1, run2, run3;
|
||||
await retry(async () => {
|
||||
run1 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run1).toEqual(true);
|
||||
expect(
|
||||
(meta.runNpmInstallSet as Set<string>).has(
|
||||
path.join(fixture, 'package.json')
|
||||
)
|
||||
).toEqual(true);
|
||||
}, retryOpts);
|
||||
await retry(async () => {
|
||||
run2 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run2).toEqual(false);
|
||||
}, retryOpts);
|
||||
await retry(async () => {
|
||||
run3 = await runNpmInstall(fixture, [], undefined, meta);
|
||||
expect(run3).toEqual(false);
|
||||
}, retryOpts);
|
||||
});
|
||||
|
||||
it('should only invoke `runNpmInstall()` once per `package.json` file (parallel)', async () => {
|
||||
const meta: Meta = {};
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const apiDir = path.join(fixture, 'api');
|
||||
const [run1, run2, run3] = await Promise.all([
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(fixture, [], undefined, meta),
|
||||
]);
|
||||
let results: [boolean, boolean, boolean] | undefined;
|
||||
await retry(
|
||||
async () => {
|
||||
results = await Promise.all([
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(fixture, [], undefined, meta),
|
||||
]);
|
||||
},
|
||||
{ maxRetryTime: 3000 }
|
||||
);
|
||||
const [run1, run2, run3] = results || [];
|
||||
expect(run1).toEqual(true);
|
||||
expect(run2).toEqual(false);
|
||||
expect(run3).toEqual(false);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "27.0.0",
|
||||
"version": "27.1.5",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -42,16 +42,16 @@
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/go": "2.0.5",
|
||||
"@vercel/hydrogen": "0.0.2",
|
||||
"@vercel/next": "3.1.4",
|
||||
"@vercel/node": "2.4.1",
|
||||
"@vercel/python": "3.0.5",
|
||||
"@vercel/redwood": "1.0.6",
|
||||
"@vercel/remix": "1.0.6",
|
||||
"@vercel/ruby": "1.3.13",
|
||||
"@vercel/static-build": "1.0.5",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/go": "2.0.7",
|
||||
"@vercel/hydrogen": "0.0.4",
|
||||
"@vercel/next": "3.1.7",
|
||||
"@vercel/node": "2.4.4",
|
||||
"@vercel/python": "3.0.7",
|
||||
"@vercel/redwood": "1.0.8",
|
||||
"@vercel/remix": "1.0.9",
|
||||
"@vercel/ruby": "1.3.15",
|
||||
"@vercel/static-build": "1.0.8",
|
||||
"update-notifier": "5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -96,9 +96,9 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "12.1.0",
|
||||
"@vercel/frameworks": "1.1.0",
|
||||
"@vercel/fs-detectors": "1.0.1",
|
||||
"@vercel/client": "12.1.2",
|
||||
"@vercel/frameworks": "1.1.1",
|
||||
"@vercel/fs-detectors": "2.0.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
|
||||
@@ -48,9 +48,17 @@ import {
|
||||
} from '../util/build/write-build-result';
|
||||
import { importBuilders, BuilderWithPkg } from '../util/build/import-builders';
|
||||
import { initCorepack, cleanupCorepack } from '../util/build/corepack';
|
||||
import { sortBuilders } from '../util/build/sort-builders';
|
||||
|
||||
type BuildResult = BuildResultV2 | BuildResultV3;
|
||||
|
||||
interface SerializedBuilder extends Builder {
|
||||
error?: Error;
|
||||
require?: string;
|
||||
requirePath?: string;
|
||||
apiVersion: number;
|
||||
}
|
||||
|
||||
const help = () => {
|
||||
return console.log(`
|
||||
${chalk.bold(`${cli.logo} ${cli.name} build`)}
|
||||
@@ -198,7 +206,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
normalizePath(relative(workPath, f))
|
||||
);
|
||||
|
||||
const routesResult = getTransformedRoutes({ nowConfig: vercelConfig || {} });
|
||||
const routesResult = getTransformedRoutes(vercelConfig || {});
|
||||
if (routesResult.error) {
|
||||
output.prettyError(routesResult.error);
|
||||
return 1;
|
||||
@@ -296,32 +304,36 @@ export default async function main(client: Client): Promise<number> {
|
||||
const ops: Promise<Error | void>[] = [];
|
||||
|
||||
// Write the `detectedBuilders` result to output dir
|
||||
ops.push(
|
||||
fs.writeJSON(
|
||||
join(outputDir, 'builds.json'),
|
||||
{
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
argv: process.argv,
|
||||
builds: builds.map(build => {
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
return {
|
||||
require: builderPkg.name,
|
||||
requirePath: builderWithPkg.path,
|
||||
apiVersion: builder.version,
|
||||
...build,
|
||||
};
|
||||
}),
|
||||
},
|
||||
{
|
||||
spaces: 2,
|
||||
const buildsJsonBuilds = new Map<Builder, SerializedBuilder>(
|
||||
builds.map(build => {
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
)
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
return [
|
||||
build,
|
||||
{
|
||||
require: builderPkg.name,
|
||||
requirePath: builderWithPkg.path,
|
||||
apiVersion: builder.version,
|
||||
...build,
|
||||
},
|
||||
];
|
||||
})
|
||||
);
|
||||
const buildsJson = {
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
argv: process.argv,
|
||||
builds: Array.from(buildsJsonBuilds.values()),
|
||||
};
|
||||
const buildsJsonPath = join(outputDir, 'builds.json');
|
||||
const writeBuildsJsonPromise = fs.writeJSON(buildsJsonPath, buildsJson, {
|
||||
spaces: 2,
|
||||
});
|
||||
|
||||
ops.push(writeBuildsJsonPromise);
|
||||
|
||||
// The `meta` config property is re-used for each Builder
|
||||
// invocation so that Builders can share state between
|
||||
@@ -332,65 +344,95 @@ export default async function main(client: Client): Promise<number> {
|
||||
};
|
||||
|
||||
// Execute Builders for detected entrypoints
|
||||
// TODO: parallelize builds
|
||||
// TODO: parallelize builds (except for frontend)
|
||||
const sortedBuilders = sortBuilders(builds);
|
||||
const buildResults: Map<Builder, BuildResult> = new Map();
|
||||
const overrides: PathOverride[] = [];
|
||||
const repoRootPath = cwd;
|
||||
const rootPackageJsonPath = repoRootPath || workPath;
|
||||
const corepackShimDir = await initCorepack({ cwd, rootPackageJsonPath });
|
||||
const corepackShimDir = await initCorepack({ repoRootPath });
|
||||
|
||||
for (const build of builds) {
|
||||
for (const build of sortedBuilders) {
|
||||
if (typeof build.src !== 'string') continue;
|
||||
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
|
||||
const buildConfig: Config = {
|
||||
outputDirectory: project.settings.outputDirectory ?? undefined,
|
||||
...build.config,
|
||||
projectSettings: project.settings,
|
||||
installCommand: project.settings.installCommand ?? undefined,
|
||||
devCommand: project.settings.devCommand ?? undefined,
|
||||
buildCommand: project.settings.buildCommand ?? undefined,
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config: buildConfig,
|
||||
meta,
|
||||
};
|
||||
output.debug(
|
||||
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
|
||||
);
|
||||
const buildResult = await builder.build(buildOptions);
|
||||
try {
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
|
||||
// Store the build result to generate the final `config.json` after
|
||||
// all builds have completed
|
||||
buildResults.set(build, buildResult);
|
||||
const buildConfig: Config = {
|
||||
outputDirectory: project.settings.outputDirectory ?? undefined,
|
||||
...build.config,
|
||||
projectSettings: project.settings,
|
||||
installCommand: project.settings.installCommand ?? undefined,
|
||||
devCommand: project.settings.devCommand ?? undefined,
|
||||
buildCommand: project.settings.buildCommand ?? undefined,
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config: buildConfig,
|
||||
meta,
|
||||
};
|
||||
output.debug(
|
||||
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
|
||||
);
|
||||
const buildResult = await builder.build(buildOptions);
|
||||
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
builderPkg,
|
||||
vercelConfig?.cleanUrls
|
||||
).then(
|
||||
override => {
|
||||
if (override) overrides.push(override);
|
||||
},
|
||||
err => err
|
||||
)
|
||||
);
|
||||
// Store the build result to generate the final `config.json` after
|
||||
// all builds have completed
|
||||
buildResults.set(build, buildResult);
|
||||
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
builderPkg,
|
||||
vercelConfig?.cleanUrls
|
||||
).then(
|
||||
override => {
|
||||
if (override) overrides.push(override);
|
||||
},
|
||||
err => err
|
||||
)
|
||||
);
|
||||
} catch (err: any) {
|
||||
const configJson = {
|
||||
version: 3,
|
||||
};
|
||||
const configJsonPromise = fs.writeJSON(
|
||||
join(outputDir, 'config.json'),
|
||||
configJson,
|
||||
{ spaces: 2 }
|
||||
);
|
||||
|
||||
await Promise.all([writeBuildsJsonPromise, configJsonPromise]);
|
||||
|
||||
const buildJsonBuild = buildsJsonBuilds.get(build);
|
||||
if (buildJsonBuild) {
|
||||
buildJsonBuild.error = {
|
||||
name: err.name,
|
||||
message: err.message,
|
||||
stack: err.stack,
|
||||
...err,
|
||||
};
|
||||
|
||||
await fs.writeJSON(buildsJsonPath, buildsJson, {
|
||||
spaces: 2,
|
||||
});
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (corepackShimDir) {
|
||||
|
||||
@@ -15,6 +15,7 @@ export const help = () => `
|
||||
)}
|
||||
dev Start a local development server
|
||||
env Manages the Environment Variables for your current Project
|
||||
git Manage Git provider repository for your current Project
|
||||
init [example] Initialize an example project
|
||||
ls | list [app] Lists deployments
|
||||
inspect [id] Displays information related to a deployment
|
||||
|
||||
@@ -64,7 +64,7 @@ import { help } from './args';
|
||||
import { getDeploymentChecks } from '../../util/deploy/get-deployment-checks';
|
||||
import parseTarget from '../../util/deploy/parse-target';
|
||||
import getPrebuiltJson from '../../util/deploy/get-prebuilt-json';
|
||||
import { createGitMeta } from '../../util/deploy/create-git-meta';
|
||||
import { createGitMeta } from '../../util/create-git-meta';
|
||||
|
||||
export default async (client: Client) => {
|
||||
const { output } = client;
|
||||
@@ -95,6 +95,7 @@ export default async (client: Client) => {
|
||||
// deprecated
|
||||
'--name': String,
|
||||
'-n': '--name',
|
||||
'--no-clipboard': Boolean,
|
||||
'--target': String,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -183,6 +184,17 @@ export default async (client: Client) => {
|
||||
);
|
||||
}
|
||||
|
||||
if (argv['--no-clipboard']) {
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
`The ${param(
|
||||
'--no-clipboard'
|
||||
)} option was ignored because it is the default behavior. Please remove it.`,
|
||||
emoji('warning')
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// build `target`
|
||||
const target = parseTarget(output, argv['--target'], argv['--prod']);
|
||||
if (typeof target === 'number') {
|
||||
|
||||
168
packages/cli/src/commands/git/connect.ts
Normal file
168
packages/cli/src/commands/git/connect.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import chalk from 'chalk';
|
||||
import { join } from 'path';
|
||||
import { Org, Project } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import { parseGitConfig, pluckRemoteUrl } from '../../util/create-git-meta';
|
||||
import confirm from '../../util/input/confirm';
|
||||
import { Output } from '../../util/output';
|
||||
import link from '../../util/output/link';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
import {
|
||||
connectGitProvider,
|
||||
disconnectGitProvider,
|
||||
formatProvider,
|
||||
parseRepoUrl,
|
||||
} from '../../util/projects/connect-git-provider';
|
||||
import validatePaths from '../../util/validate-paths';
|
||||
|
||||
export default async function connect(
|
||||
client: Client,
|
||||
argv: any,
|
||||
args: string[],
|
||||
project: Project | undefined,
|
||||
org: Org | undefined
|
||||
) {
|
||||
const { output } = client;
|
||||
const confirm = Boolean(argv['--confirm']);
|
||||
|
||||
if (args.length !== 0) {
|
||||
output.error(
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan(
|
||||
`${getCommandName('project connect')}`
|
||||
)}`
|
||||
);
|
||||
return 2;
|
||||
}
|
||||
if (!project || !org) {
|
||||
output.error(
|
||||
`Can't find \`org\` or \`project\`. Make sure your current directory is linked to a Vercel projet by running ${getCommandName(
|
||||
'link'
|
||||
)}.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
let paths = [process.cwd()];
|
||||
|
||||
const validate = await validatePaths(client, paths);
|
||||
if (!validate.valid) {
|
||||
return validate.exitCode;
|
||||
}
|
||||
const { path } = validate;
|
||||
|
||||
const gitProviderLink = project.link;
|
||||
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
|
||||
|
||||
// get project from .git
|
||||
const gitConfigPath = join(path, '.git/config');
|
||||
const gitConfig = await parseGitConfig(gitConfigPath, output);
|
||||
if (!gitConfig) {
|
||||
output.error(
|
||||
`No local git repo found. Run ${chalk.cyan(
|
||||
'`git clone <url>`'
|
||||
)} to clone a remote Git repository first.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
const remoteUrl = pluckRemoteUrl(gitConfig);
|
||||
if (!remoteUrl) {
|
||||
output.error(
|
||||
`No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run ${chalk.cyan(
|
||||
'`git remote --help`'
|
||||
)} for more details.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
output.log(`Identified Git remote "origin": ${link(remoteUrl)}`);
|
||||
const parsedUrl = parseRepoUrl(remoteUrl);
|
||||
if (!parsedUrl) {
|
||||
output.error(
|
||||
`Failed to parse Git repo data from the following remote URL in your Git config: ${link(
|
||||
remoteUrl
|
||||
)}`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
const { provider, org: gitOrg, repo } = parsedUrl;
|
||||
const repoPath = `${gitOrg}/${repo}`;
|
||||
let connectedRepoPath;
|
||||
|
||||
if (!gitProviderLink) {
|
||||
const connect = await connectGitProvider(
|
||||
client,
|
||||
org,
|
||||
project.id,
|
||||
provider,
|
||||
repoPath
|
||||
);
|
||||
if (typeof connect === 'number') {
|
||||
return connect;
|
||||
}
|
||||
} else {
|
||||
const connectedProvider = gitProviderLink.type;
|
||||
const connectedOrg = gitProviderLink.org;
|
||||
const connectedRepo = gitProviderLink.repo;
|
||||
connectedRepoPath = `${connectedOrg}/${connectedRepo}`;
|
||||
|
||||
const isSameRepo =
|
||||
connectedProvider === provider &&
|
||||
connectedOrg === gitOrg &&
|
||||
connectedRepo === repo;
|
||||
if (isSameRepo) {
|
||||
output.log(
|
||||
`${chalk.cyan(connectedRepoPath)} is already connected to your project.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const shouldReplaceRepo = await confirmRepoConnect(
|
||||
client,
|
||||
output,
|
||||
confirm,
|
||||
connectedRepoPath
|
||||
);
|
||||
if (!shouldReplaceRepo) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
await disconnectGitProvider(client, org, project.id);
|
||||
const connect = await connectGitProvider(
|
||||
client,
|
||||
org,
|
||||
project.id,
|
||||
provider,
|
||||
repoPath
|
||||
);
|
||||
if (typeof connect === 'number') {
|
||||
return connect;
|
||||
}
|
||||
}
|
||||
|
||||
output.log(
|
||||
`Connected ${formatProvider(provider)} repository ${chalk.cyan(repoPath)}!`
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
async function confirmRepoConnect(
|
||||
client: Client,
|
||||
output: Output,
|
||||
yes: boolean,
|
||||
connectedRepoPath: string
|
||||
) {
|
||||
let shouldReplaceProject = yes;
|
||||
if (!shouldReplaceProject) {
|
||||
shouldReplaceProject = await confirm(
|
||||
client,
|
||||
`Looks like you already have a repository connected: ${chalk.cyan(
|
||||
connectedRepoPath
|
||||
)}. Do you want to replace it?`,
|
||||
true
|
||||
);
|
||||
if (!shouldReplaceProject) {
|
||||
output.log(`Aborted. Repo not connected.`);
|
||||
}
|
||||
}
|
||||
return shouldReplaceProject;
|
||||
}
|
||||
58
packages/cli/src/commands/git/disconnect.ts
Normal file
58
packages/cli/src/commands/git/disconnect.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import chalk from 'chalk';
|
||||
import { Org, Project } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import confirm from '../../util/input/confirm';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
import { disconnectGitProvider } from '../../util/projects/connect-git-provider';
|
||||
|
||||
export default async function disconnect(
|
||||
client: Client,
|
||||
args: string[],
|
||||
project: Project | undefined,
|
||||
org: Org | undefined
|
||||
) {
|
||||
const { output } = client;
|
||||
|
||||
if (args.length !== 0) {
|
||||
output.error(
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan(
|
||||
`${getCommandName('project disconnect')}`
|
||||
)}`
|
||||
);
|
||||
return 2;
|
||||
}
|
||||
if (!project || !org) {
|
||||
output.error('An unexpected error occurred.');
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (project.link) {
|
||||
const { org: linkOrg, repo } = project.link;
|
||||
output.print(
|
||||
`Your Vercel project will no longer create deployments when you push to this repository.\n`
|
||||
);
|
||||
const confirmDisconnect = await confirm(
|
||||
client,
|
||||
`Are you sure you want to disconnect ${chalk.cyan(
|
||||
`${linkOrg}/${repo}`
|
||||
)} from your project?`,
|
||||
false
|
||||
);
|
||||
|
||||
if (confirmDisconnect) {
|
||||
await disconnectGitProvider(client, org, project.id);
|
||||
output.log(`Disconnected ${chalk.cyan(`${linkOrg}/${repo}`)}.`);
|
||||
} else {
|
||||
output.log('Aborted.');
|
||||
}
|
||||
} else {
|
||||
output.error(
|
||||
`No Git repository connected. Run ${getCommandName(
|
||||
'project connect'
|
||||
)} to connect one.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
94
packages/cli/src/commands/git/index.ts
Normal file
94
packages/cli/src/commands/git/index.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import chalk from 'chalk';
|
||||
import Client from '../../util/client';
|
||||
import { ensureLink } from '../../util/ensure-link';
|
||||
import getArgs from '../../util/get-args';
|
||||
import getInvalidSubcommand from '../../util/get-invalid-subcommand';
|
||||
import handleError from '../../util/handle-error';
|
||||
import logo from '../../util/output/logo';
|
||||
import { getPkgName } from '../../util/pkg-name';
|
||||
import validatePaths from '../../util/validate-paths';
|
||||
import connect from './connect';
|
||||
import disconnect from './disconnect';
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
${chalk.bold(`${logo} ${getPkgName()} git`)} <command>
|
||||
|
||||
${chalk.dim('Commands:')}
|
||||
|
||||
connect Connect your Git config "origin" remote as a Git provider to your project
|
||||
disconnect Disconnect the Git provider repository from your project
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
-t ${chalk.bold.underline('TOKEN')}, --token=${chalk.bold.underline(
|
||||
'TOKEN'
|
||||
)} Login token
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} Connect a Git provider repository
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} git connect`)}
|
||||
|
||||
${chalk.gray('–')} Disconnect the Git provider repository
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} git disconnect`)}
|
||||
`);
|
||||
};
|
||||
|
||||
const COMMAND_CONFIG = {
|
||||
connect: ['connect'],
|
||||
disconnect: ['disconnect'],
|
||||
};
|
||||
|
||||
export default async function main(client: Client) {
|
||||
let argv: any;
|
||||
let subcommand: string | string[];
|
||||
|
||||
try {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--confirm': Boolean,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (argv['--help']) {
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
subcommand = argv._[0];
|
||||
const args = argv._.slice(1);
|
||||
const confirm = Boolean(argv['--confirm']);
|
||||
const { output } = client;
|
||||
|
||||
let paths = [process.cwd()];
|
||||
const pathValidation = await validatePaths(client, paths);
|
||||
if (!pathValidation.valid) {
|
||||
return pathValidation.exitCode;
|
||||
}
|
||||
const { path } = pathValidation;
|
||||
|
||||
const linkedProject = await ensureLink('git', client, path, confirm);
|
||||
if (typeof linkedProject === 'number') {
|
||||
return linkedProject;
|
||||
}
|
||||
|
||||
const { org, project } = linkedProject;
|
||||
|
||||
switch (subcommand) {
|
||||
case 'connect':
|
||||
return await connect(client, argv, args, project, org);
|
||||
case 'disconnect':
|
||||
return await disconnect(client, args, project, org);
|
||||
default:
|
||||
output.error(getInvalidSubcommand(COMMAND_CONFIG));
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
@@ -14,6 +14,7 @@ export default new Map([
|
||||
['domain', 'domains'],
|
||||
['domains', 'domains'],
|
||||
['env', 'env'],
|
||||
['git', 'git'],
|
||||
['help', 'help'],
|
||||
['init', 'init'],
|
||||
['inspect', 'inspect'],
|
||||
@@ -25,8 +26,8 @@ export default new Map([
|
||||
['logout', 'logout'],
|
||||
['logs', 'logs'],
|
||||
['ls', 'list'],
|
||||
['project', 'projects'],
|
||||
['projects', 'projects'],
|
||||
['project', 'project'],
|
||||
['projects', 'project'],
|
||||
['pull', 'pull'],
|
||||
['remove', 'remove'],
|
||||
['rm', 'remove'],
|
||||
|
||||
@@ -6,7 +6,6 @@ import getScope from '../../util/get-scope';
|
||||
import handleError from '../../util/handle-error';
|
||||
import logo from '../../util/output/logo';
|
||||
import { getPkgName } from '../../util/pkg-name';
|
||||
import validatePaths from '../../util/validate-paths';
|
||||
import add from './add';
|
||||
import list from './list';
|
||||
import rm from './rm';
|
||||
@@ -48,7 +47,6 @@ const COMMAND_CONFIG = {
|
||||
ls: ['ls', 'list'],
|
||||
add: ['add'],
|
||||
rm: ['rm', 'remove'],
|
||||
connect: ['connect'],
|
||||
};
|
||||
|
||||
export default async function main(client: Client) {
|
||||
@@ -59,7 +57,6 @@ export default async function main(client: Client) {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--next': Number,
|
||||
'-N': '--next',
|
||||
'--yes': Boolean,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
@@ -76,12 +73,6 @@ export default async function main(client: Client) {
|
||||
const args = argv._.slice(1);
|
||||
const { output } = client;
|
||||
|
||||
let paths = [process.cwd()];
|
||||
const pathValidation = await validatePaths(client, paths);
|
||||
if (!pathValidation.valid) {
|
||||
return pathValidation.exitCode;
|
||||
}
|
||||
|
||||
let contextName = '';
|
||||
|
||||
try {
|
||||
|
||||
@@ -173,7 +173,7 @@ const main = async () => {
|
||||
const targetOrSubcommand = argv._[2];
|
||||
|
||||
// Currently no beta commands - add here as needed
|
||||
const betaCommands: string[] = [''];
|
||||
const betaCommands: string[] = [];
|
||||
if (betaCommands.includes(targetOrSubcommand)) {
|
||||
console.log(
|
||||
`${chalk.grey(
|
||||
@@ -632,6 +632,9 @@ const main = async () => {
|
||||
case 'env':
|
||||
func = require('./commands/env').default;
|
||||
break;
|
||||
case 'git':
|
||||
func = require('./commands/git').default;
|
||||
break;
|
||||
case 'init':
|
||||
func = require('./commands/init').default;
|
||||
break;
|
||||
@@ -653,7 +656,7 @@ const main = async () => {
|
||||
case 'logout':
|
||||
func = require('./commands/logout').default;
|
||||
break;
|
||||
case 'projects':
|
||||
case 'project':
|
||||
func = require('./commands/project').default;
|
||||
break;
|
||||
case 'pull':
|
||||
|
||||
@@ -248,12 +248,34 @@ export interface ProjectEnvVariable {
|
||||
gitBranch?: string;
|
||||
}
|
||||
|
||||
export interface DeployHook {
|
||||
createdAt: number;
|
||||
id: string;
|
||||
name: string;
|
||||
ref: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ProjectLinkData {
|
||||
type: string;
|
||||
repo: string;
|
||||
repoId: number;
|
||||
org?: string;
|
||||
gitCredentialId: string;
|
||||
productionBranch?: string | null;
|
||||
sourceless: boolean;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
deployHooks?: DeployHook[];
|
||||
}
|
||||
|
||||
export interface Project extends ProjectSettings {
|
||||
id: string;
|
||||
name: string;
|
||||
accountId: string;
|
||||
updatedAt: number;
|
||||
createdAt: number;
|
||||
link?: ProjectLinkData;
|
||||
alias?: ProjectAliasTarget[];
|
||||
latestDeployments?: Partial<Deployment>[];
|
||||
}
|
||||
|
||||
@@ -6,11 +6,9 @@ import { VERCEL_DIR } from '../projects/link';
|
||||
import readJSONFile from '../read-json-file';
|
||||
|
||||
export async function initCorepack({
|
||||
cwd,
|
||||
rootPackageJsonPath,
|
||||
repoRootPath,
|
||||
}: {
|
||||
cwd: string;
|
||||
rootPackageJsonPath: string;
|
||||
repoRootPath: string;
|
||||
}): Promise<string | null> {
|
||||
if (process.env.ENABLE_EXPERIMENTAL_COREPACK !== '1') {
|
||||
// Since corepack is experimental, we need to exit early
|
||||
@@ -18,7 +16,7 @@ export async function initCorepack({
|
||||
return null;
|
||||
}
|
||||
const pkg = await readJSONFile<PackageJson>(
|
||||
join(rootPackageJsonPath, 'package.json')
|
||||
join(repoRootPath, 'package.json')
|
||||
);
|
||||
if (pkg instanceof CantParseJSONFile) {
|
||||
console.warn(
|
||||
@@ -32,16 +30,13 @@ export async function initCorepack({
|
||||
console.log(
|
||||
`Detected ENABLE_EXPERIMENTAL_COREPACK=1 and "${pkg.packageManager}" in package.json`
|
||||
);
|
||||
const corepackRootDir = join(cwd, VERCEL_DIR, 'cache', 'corepack');
|
||||
const corepackRootDir = join(repoRootPath, VERCEL_DIR, 'cache', 'corepack');
|
||||
const corepackHomeDir = join(corepackRootDir, 'home');
|
||||
const corepackShimDir = join(corepackRootDir, 'shim');
|
||||
await fs.mkdirp(corepackHomeDir);
|
||||
await fs.mkdirp(corepackShimDir);
|
||||
process.env.COREPACK_HOME = corepackHomeDir;
|
||||
process.env.PATH = `${corepackShimDir}${delimiter}${process.env.PATH}`;
|
||||
process.env.DEBUG = process.env.DEBUG
|
||||
? `corepack,${process.env.DEBUG}`
|
||||
: 'corepack';
|
||||
const pkgManagerName = pkg.packageManager.split('@')[0];
|
||||
// We must explicitly call `corepack enable npm` since `corepack enable`
|
||||
// doesn't work with npm. See https://github.com/nodejs/corepack/pull/24
|
||||
@@ -72,11 +67,4 @@ export function cleanupCorepack(corepackShimDir: string) {
|
||||
''
|
||||
);
|
||||
}
|
||||
if (process.env.DEBUG) {
|
||||
if (process.env.DEBUG === 'corepack') {
|
||||
delete process.env.DEBUG;
|
||||
} else {
|
||||
process.env.DEBUG = process.env.DEBUG.replace('corepack,', '');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
12
packages/cli/src/util/build/sort-builders.ts
Normal file
12
packages/cli/src/util/build/sort-builders.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import frameworkList from '@vercel/frameworks';
|
||||
|
||||
export function sortBuilders<B extends { use: string }>(builds: B[]): B[] {
|
||||
const frontendRuntimeSet = new Set(
|
||||
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
|
||||
);
|
||||
const toNumber = (build: B) => (frontendRuntimeSet.has(build.use) ? 0 : 1);
|
||||
|
||||
return builds.sort((build1, build2) => {
|
||||
return toNumber(build1) - toNumber(build2);
|
||||
});
|
||||
}
|
||||
@@ -1,6 +1,14 @@
|
||||
import fs from 'fs-extra';
|
||||
import mimeTypes from 'mime-types';
|
||||
import { basename, dirname, extname, join, relative, resolve } from 'path';
|
||||
import {
|
||||
basename,
|
||||
dirname,
|
||||
extname,
|
||||
join,
|
||||
relative,
|
||||
resolve,
|
||||
posix,
|
||||
} from 'path';
|
||||
import {
|
||||
Builder,
|
||||
BuildResultV2,
|
||||
@@ -20,6 +28,7 @@ import pipe from 'promisepipe';
|
||||
import { unzip } from './unzip';
|
||||
import { VERCEL_DIR } from '../projects/link';
|
||||
|
||||
const { normalize } = posix;
|
||||
export const OUTPUT_DIR = join(VERCEL_DIR, 'output');
|
||||
|
||||
export async function writeBuildResult(
|
||||
@@ -67,6 +76,13 @@ export interface PathOverride {
|
||||
path?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate slashes as well as leading/trailing slashes.
|
||||
*/
|
||||
function stripDuplicateSlashes(path: string): string {
|
||||
return normalize(path).replace(/(^\/|\/$)/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the output from the `build()` return value of a v2 Builder to
|
||||
* the filesystem.
|
||||
@@ -84,16 +100,17 @@ async function writeBuildResultV2(
|
||||
const lambdas = new Map<Lambda, string>();
|
||||
const overrides: Record<string, PathOverride> = {};
|
||||
for (const [path, output] of Object.entries(buildResult.output)) {
|
||||
const normalizedPath = stripDuplicateSlashes(path);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, path, lambdas);
|
||||
await writeLambda(outputDir, output, normalizedPath, lambdas);
|
||||
} else if (isPrerender(output)) {
|
||||
await writeLambda(outputDir, output.lambda, path, lambdas);
|
||||
await writeLambda(outputDir, output.lambda, normalizedPath, lambdas);
|
||||
|
||||
// Write the fallback file alongside the Lambda directory
|
||||
let fallback = output.fallback;
|
||||
if (fallback) {
|
||||
const ext = getFileExtension(fallback);
|
||||
const fallbackName = `${path}.prerender-fallback${ext}`;
|
||||
const fallbackName = `${normalizedPath}.prerender-fallback${ext}`;
|
||||
const fallbackPath = join(outputDir, 'functions', fallbackName);
|
||||
const stream = fallback.toStream();
|
||||
await pipe(
|
||||
@@ -109,7 +126,7 @@ async function writeBuildResultV2(
|
||||
const prerenderConfigPath = join(
|
||||
outputDir,
|
||||
'functions',
|
||||
`${path}.prerender-config.json`
|
||||
`${normalizedPath}.prerender-config.json`
|
||||
);
|
||||
const prerenderConfig = {
|
||||
...output,
|
||||
@@ -118,12 +135,20 @@ async function writeBuildResultV2(
|
||||
};
|
||||
await fs.writeJSON(prerenderConfigPath, prerenderConfig, { spaces: 2 });
|
||||
} else if (isFile(output)) {
|
||||
await writeStaticFile(outputDir, output, path, overrides, cleanUrls);
|
||||
await writeStaticFile(
|
||||
outputDir,
|
||||
output,
|
||||
normalizedPath,
|
||||
overrides,
|
||||
cleanUrls
|
||||
);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
await writeEdgeFunction(outputDir, output, normalizedPath);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${(output as any).type}" for ${path}`
|
||||
`Unsupported output type: "${
|
||||
(output as any).type
|
||||
}" for ${normalizedPath}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -145,9 +170,9 @@ async function writeBuildResultV3(
|
||||
throw new Error(`Expected "build.src" to be a string`);
|
||||
}
|
||||
const ext = extname(src);
|
||||
const path = build.config?.zeroConfig
|
||||
? src.substring(0, src.length - ext.length)
|
||||
: src;
|
||||
const path = stripDuplicateSlashes(
|
||||
build.config?.zeroConfig ? src.substring(0, src.length - ext.length) : src
|
||||
);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, path);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
|
||||
@@ -3,17 +3,17 @@ import { join } from 'path';
|
||||
import ini from 'ini';
|
||||
import git from 'git-last-commit';
|
||||
import { exec } from 'child_process';
|
||||
import { GitMetadata } from '../../types';
|
||||
import { Output } from '../output';
|
||||
import { GitMetadata } from '../types';
|
||||
import { Output } from './output';
|
||||
|
||||
export function isDirty(directory: string, output: Output): Promise<boolean> {
|
||||
return new Promise(resolve => {
|
||||
exec('git status -s', { cwd: directory }, function (err, stdout, stderr) {
|
||||
if (err) return resolve(false);
|
||||
if (stderr) {
|
||||
output.debug(
|
||||
`Failed to determine if git repo has been modified: ${stderr.trim()}`
|
||||
);
|
||||
let debugMessage = `Failed to determine if Git repo has been modified:`;
|
||||
if (err || stderr) {
|
||||
if (err) debugMessage += `\n${err}`;
|
||||
if (stderr) debugMessage += `\n${stderr.trim()}`;
|
||||
output.debug(debugMessage);
|
||||
return resolve(false);
|
||||
}
|
||||
resolve(stdout.trim().length > 0);
|
||||
@@ -33,21 +33,31 @@ function getLastCommit(directory: string): Promise<git.Commit> {
|
||||
});
|
||||
}
|
||||
|
||||
export async function parseGitConfig(configPath: string, output: Output) {
|
||||
try {
|
||||
return ini.parse(await fs.readFile(configPath, 'utf-8'));
|
||||
} catch (error) {
|
||||
output.debug(`Error while parsing repo data: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function pluckRemoteUrl(gitConfig: {
|
||||
[key: string]: any;
|
||||
}): string | undefined {
|
||||
// Assuming "origin" is the remote url that the user would want to use
|
||||
return gitConfig['remote "origin"']?.url;
|
||||
}
|
||||
|
||||
export async function getRemoteUrl(
|
||||
configPath: string,
|
||||
output: Output
|
||||
): Promise<string | null> {
|
||||
let gitConfig;
|
||||
try {
|
||||
gitConfig = ini.parse(await fs.readFile(configPath, 'utf-8'));
|
||||
} catch (error) {
|
||||
output.debug(`Error while parsing repo data: ${error.message}`);
|
||||
}
|
||||
let gitConfig = await parseGitConfig(configPath, output);
|
||||
if (!gitConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const originUrl: string = gitConfig['remote "origin"']?.url;
|
||||
const originUrl = pluckRemoteUrl(gitConfig);
|
||||
if (originUrl) {
|
||||
return originUrl;
|
||||
}
|
||||
@@ -64,7 +74,10 @@ export async function createGitMeta(
|
||||
return;
|
||||
}
|
||||
const [commit, dirty] = await Promise.all([
|
||||
getLastCommit(directory).catch(() => {
|
||||
getLastCommit(directory).catch(err => {
|
||||
output.debug(
|
||||
`Failed to get last commit. The directory is likely not a Git repo, there are no latest commits, or it is corrupted.\n${err}`
|
||||
);
|
||||
return;
|
||||
}),
|
||||
isDirty(directory, output),
|
||||
@@ -558,9 +558,8 @@ export default class DevServer {
|
||||
]);
|
||||
|
||||
await this.validateVercelConfig(vercelConfig);
|
||||
const { error: routeError, routes: maybeRoutes } = getTransformedRoutes({
|
||||
nowConfig: vercelConfig,
|
||||
});
|
||||
const { error: routeError, routes: maybeRoutes } =
|
||||
getTransformedRoutes(vercelConfig);
|
||||
if (routeError) {
|
||||
this.output.prettyError(routeError);
|
||||
await this.exit();
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import inquirer from 'inquirer';
|
||||
import Client from '../client';
|
||||
import getUser from '../get-user';
|
||||
import getTeams from '../teams/get-teams';
|
||||
@@ -43,7 +42,7 @@ export default async function selectOrg(
|
||||
return choices[defaultOrgIndex].value;
|
||||
}
|
||||
|
||||
const answers = await inquirer.prompt({
|
||||
const answers = await client.prompt({
|
||||
type: 'list',
|
||||
name: 'org',
|
||||
message: question,
|
||||
|
||||
117
packages/cli/src/util/projects/connect-git-provider.ts
Normal file
117
packages/cli/src/util/projects/connect-git-provider.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import Client from '../client';
|
||||
import { stringify } from 'qs';
|
||||
import { Org } from '../../types';
|
||||
import chalk from 'chalk';
|
||||
import link from '../output/link';
|
||||
|
||||
export async function disconnectGitProvider(
|
||||
client: Client,
|
||||
org: Org,
|
||||
projectId: string
|
||||
) {
|
||||
const fetchUrl = `/v4/projects/${projectId}/link?${stringify({
|
||||
teamId: org.type === 'team' ? org.id : undefined,
|
||||
})}`;
|
||||
return client.fetch(fetchUrl, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function connectGitProvider(
|
||||
client: Client,
|
||||
org: Org,
|
||||
projectId: string,
|
||||
type: string,
|
||||
repo: string
|
||||
) {
|
||||
const fetchUrl = `/v4/projects/${projectId}/link?${stringify({
|
||||
teamId: org.type === 'team' ? org.id : undefined,
|
||||
})}`;
|
||||
try {
|
||||
return await client.fetch(fetchUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
type,
|
||||
repo,
|
||||
}),
|
||||
});
|
||||
} catch (err) {
|
||||
if (
|
||||
err.meta?.action === 'Install GitHub App' ||
|
||||
err.code === 'repo_not_found'
|
||||
) {
|
||||
client.output.error(
|
||||
`Failed to link ${chalk.cyan(
|
||||
repo
|
||||
)}. Make sure there aren't any typos and that you have access to the repository if it's private.`
|
||||
);
|
||||
} else if (err.action === 'Add a Login Connection') {
|
||||
client.output.error(
|
||||
err.message.replace(repo, chalk.cyan(repo)) +
|
||||
`\nVisit ${link(err.link)} for more information.`
|
||||
);
|
||||
} else {
|
||||
client.output.error(
|
||||
`Failed to connect the ${formatProvider(
|
||||
type
|
||||
)} repository ${repo}.\n${err}`
|
||||
);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatProvider(type: string): string {
|
||||
switch (type) {
|
||||
case 'github':
|
||||
return 'GitHub';
|
||||
case 'gitlab':
|
||||
return 'GitLab';
|
||||
case 'bitbucket':
|
||||
return 'Bitbucket';
|
||||
default:
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
export function parseRepoUrl(originUrl: string): {
|
||||
provider: string;
|
||||
org: string;
|
||||
repo: string;
|
||||
} | null {
|
||||
const isSSH = originUrl.startsWith('git@');
|
||||
// Matches all characters between (// or @) and (.com or .org)
|
||||
// eslint-disable-next-line prefer-named-capture-group
|
||||
const provider = /(?<=(\/\/|@)).*(?=(\.com|\.org))/.exec(originUrl);
|
||||
if (!provider) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let org;
|
||||
let repo;
|
||||
|
||||
if (isSSH) {
|
||||
org = originUrl.split(':')[1].split('/')[0];
|
||||
repo = originUrl.split('/')[1]?.replace('.git', '');
|
||||
} else {
|
||||
// Assume https:// or git://
|
||||
org = originUrl.split('/')[3];
|
||||
repo = originUrl.split('/')[4]?.replace('.git', '');
|
||||
}
|
||||
|
||||
if (!org || !repo) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
provider: provider[0],
|
||||
org,
|
||||
repo,
|
||||
};
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import { join } from 'path';
|
||||
|
||||
export type ProjectLinkAndSettings = ProjectLink & {
|
||||
settings: {
|
||||
createdAt: Project['createdAt'];
|
||||
installCommand: Project['installCommand'];
|
||||
buildCommand: Project['buildCommand'];
|
||||
devCommand: Project['devCommand'];
|
||||
@@ -28,6 +29,7 @@ export async function writeProjectSettings(
|
||||
projectId: project.id,
|
||||
orgId: org.id,
|
||||
settings: {
|
||||
createdAt: project.createdAt,
|
||||
framework: project.framework,
|
||||
devCommand: project.devCommand,
|
||||
installCommand: project.installCommand,
|
||||
|
||||
7
packages/cli/test/fixtures/unit/commands/build/node-error/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/node-error/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/es6.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/es6.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/index.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/index.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/mjs.mjs
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node-error/api/mjs.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
4
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
|
||||
// Intentional syntax error to make the build fail
|
||||
export default (req: IncomingMessage, res: ServerResponse => res.end('Vercel');
|
||||
@@ -1,9 +1,17 @@
|
||||
const { FileBlob } = require('@vercel/build-utils');
|
||||
const { FileBlob, Lambda } = require('@vercel/build-utils');
|
||||
|
||||
exports.build = async () => {
|
||||
const file = new FileBlob({
|
||||
data: Buffer.from('file contents')
|
||||
});
|
||||
const output = { file };
|
||||
const lambda = new Lambda({
|
||||
files: {},
|
||||
runtime: 'provided',
|
||||
handler: 'example.js'
|
||||
})
|
||||
const output = {
|
||||
file,
|
||||
'withTrailingSlash/': lambda
|
||||
};
|
||||
return { output };
|
||||
};
|
||||
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "bad-remote-url"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/git/config
generated
vendored
Normal file
10
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/git/config
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = bababooey
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "existing-connection"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/HEAD
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/HEAD
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/heads/master
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/config
generated
vendored
Normal file
10
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/config
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user2/repo2
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/description
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/description
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "invalid-repo"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/git/config
generated
vendored
Normal file
10
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/git/config
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/laksfj/asdgklsadkl
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "new-connection"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/git/config
generated
vendored
Normal file
10
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/git/config
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/no-git-config/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/no-git-config/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/no-git-config/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/no-git-config/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "no-git-config"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "no-remote-url"
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/git/config
generated
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/git/config
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/.gitignore
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/.gitignore
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/.vercel/project.json
generated
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/.vercel/project.json
generated
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "new-connection"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/git/config
generated
vendored
Normal file
10
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/git/config
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
2
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/.gitignore
generated
vendored
Normal file
2
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/.gitignore
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
!.vercel
|
||||
.vercel
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/HEAD
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/HEAD
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/heads/master
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/config
generated
vendored
Normal file
10
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/config
generated
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo.git
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/description
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/description
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
@@ -1,4 +1,4 @@
|
||||
export function getDataFromIntro(output: string): {
|
||||
export function pluckIdentifiersFromDeploymentList(output: string): {
|
||||
project: string | undefined;
|
||||
org: string | undefined;
|
||||
} {
|
||||
@@ -11,7 +11,7 @@ export function getDataFromIntro(output: string): {
|
||||
};
|
||||
}
|
||||
|
||||
export function parseTable(output: string): string[] {
|
||||
export function parseSpacedTableRow(output: string): string[] {
|
||||
return output
|
||||
.trim()
|
||||
.replace(/ {1} +/g, ',')
|
||||
|
||||
@@ -5,17 +5,19 @@ export function readOutputStream(
|
||||
length: number = 3
|
||||
): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const chunks: Buffer[] = [];
|
||||
let output: string = '';
|
||||
let lines = 0;
|
||||
const timeout = setTimeout(() => {
|
||||
reject();
|
||||
}, 3000);
|
||||
|
||||
client.stderr.resume();
|
||||
client.stderr.on('data', chunk => {
|
||||
chunks.push(chunk);
|
||||
if (chunks.length === length) {
|
||||
output += chunk.toString();
|
||||
lines++;
|
||||
if (lines === length) {
|
||||
clearTimeout(timeout);
|
||||
resolve(chunks.toString().replace(/,/g, ''));
|
||||
resolve(output);
|
||||
}
|
||||
});
|
||||
client.stderr.on('error', reject);
|
||||
|
||||
9
packages/cli/test/integration.js
vendored
9
packages/cli/test/integration.js
vendored
@@ -1323,12 +1323,7 @@ test('[vc projects] should create a project successfully', async t => {
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const vc = execa(binaryPath, [
|
||||
'projects',
|
||||
'add',
|
||||
projectName,
|
||||
...defaultArgs,
|
||||
]);
|
||||
const vc = execa(binaryPath, ['project', 'add', projectName, ...defaultArgs]);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`Success! Project ${projectName} added`)
|
||||
@@ -1339,7 +1334,7 @@ test('[vc projects] should create a project successfully', async t => {
|
||||
|
||||
// creating the same project again should succeed
|
||||
const vc2 = execa(binaryPath, [
|
||||
'projects',
|
||||
'project',
|
||||
'add',
|
||||
projectName,
|
||||
...defaultArgs,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { client } from './client';
|
||||
import { Project } from '../../src/types';
|
||||
import { formatProvider } from '../../src/util/projects/connect-git-provider';
|
||||
|
||||
const envs = [
|
||||
{
|
||||
|
||||
@@ -589,8 +589,6 @@ describe('build', () => {
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
client.stderr.pipe(process.stderr);
|
||||
client.setArgv('build');
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
@@ -614,6 +612,40 @@ describe('build', () => {
|
||||
expect(await fs.readFile(join(output, 'static/file'), 'utf8')).toEqual(
|
||||
'file contents'
|
||||
);
|
||||
|
||||
// "functions" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions'));
|
||||
expect(functions.sort()).toEqual(['withTrailingSlash.func']);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should store Builder error in `builds.json`', async () => {
|
||||
const cwd = fixture('node-error');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
|
||||
// `builds.json` contains "error" build
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds.builds).toHaveLength(4);
|
||||
|
||||
const errorBuilds = builds.builds.filter((b: any) => 'error' in b);
|
||||
expect(errorBuilds).toHaveLength(1);
|
||||
|
||||
expect(errorBuilds[0].error.name).toEqual('Error');
|
||||
expect(errorBuilds[0].error.message).toMatch(`TS1005`);
|
||||
expect(errorBuilds[0].error.message).toMatch(`',' expected.`);
|
||||
expect(errorBuilds[0].error.hideStackTrace).toEqual(true);
|
||||
expect(errorBuilds[0].error.code).toEqual('NODE_TYPESCRIPT_ERROR');
|
||||
|
||||
// `config.json`` contains `version`
|
||||
const configJson = await fs.readJSON(join(output, 'config.json'));
|
||||
expect(configJson.version).toBe(3);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
|
||||
380
packages/cli/test/unit/commands/git.test.ts
Normal file
380
packages/cli/test/unit/commands/git.test.ts
Normal file
@@ -0,0 +1,380 @@
|
||||
import { join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import { useUser } from '../../mocks/user';
|
||||
import { useTeams } from '../../mocks/team';
|
||||
import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { client } from '../../mocks/client';
|
||||
import git from '../../../src/commands/git';
|
||||
import { Project } from '../../../src/types';
|
||||
|
||||
describe('git', () => {
|
||||
describe('connect', () => {
|
||||
const originalCwd = process.cwd();
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/git/connect', name);
|
||||
|
||||
it('connects an unlinked project', async () => {
|
||||
const cwd = fixture('unlinked');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'unlinked',
|
||||
name: 'unlinked',
|
||||
});
|
||||
client.setArgv('projects', 'connect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Set up');
|
||||
client.stdin.write('y\n');
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'Which scope should contain your project?'
|
||||
);
|
||||
client.stdin.write('\r');
|
||||
|
||||
await expect(client.stderr).toOutput('Found project');
|
||||
client.stdin.write('y\n');
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo.git`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
await expect(client.stderr).toOutput(
|
||||
'Connected GitHub repository user/repo!'
|
||||
);
|
||||
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const project: Project = await client.fetch(`/v8/projects/unlinked`);
|
||||
expect(project.link).toMatchObject({
|
||||
type: 'github',
|
||||
repo: 'user/repo',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when there is no git config', async () => {
|
||||
const cwd = fixture('no-git-config');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'no-git-config',
|
||||
name: 'no-git-config',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! No local git repo found. Run \`git clone <url>\` to clone a remote Git repository first.\n`
|
||||
);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when there is no remote url', async () => {
|
||||
const cwd = fixture('no-remote-url');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'no-remote-url',
|
||||
name: 'no-remote-url',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run \`git remote --help\` for more details.`
|
||||
);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when the remote url is bad', async () => {
|
||||
const cwd = fixture('bad-remote-url');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'bad-remote-url',
|
||||
name: 'bad-remote-url',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": bababooey`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! Failed to parse Git repo data from the following remote URL in your Git config: bababooey\n`
|
||||
);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should connect a repo to a project that is not already connected', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> Connected GitHub repository user/repo!\n`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const project: Project = await client.fetch(
|
||||
`/v8/projects/new-connection`
|
||||
);
|
||||
expect(project.link).toMatchObject({
|
||||
type: 'github',
|
||||
repo: 'user/repo',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should replace an old connection with a new one', async () => {
|
||||
const cwd = fixture('existing-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'existing-connection',
|
||||
name: 'existing-connection',
|
||||
});
|
||||
project.project.link = {
|
||||
type: 'github',
|
||||
repo: 'repo',
|
||||
org: 'user',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user2/repo2`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> Connected GitHub repository user2/repo2!\n`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const newProjectData: Project = await client.fetch(
|
||||
`/v8/projects/existing-connection`
|
||||
);
|
||||
expect(newProjectData.link).toMatchObject({
|
||||
type: 'github',
|
||||
repo: 'user2/repo2',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should exit when an already-connected repo is connected', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
project.project.link = {
|
||||
type: 'github',
|
||||
repo: 'repo',
|
||||
org: 'user',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> user/repo is already connected to your project.\n`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(1);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when it cannot find the repository', async () => {
|
||||
const cwd = fixture('invalid-repo');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'invalid-repo',
|
||||
name: 'invalid-repo',
|
||||
});
|
||||
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/laksfj/asdgklsadkl`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Failed to link laksfj/asdgklsadkl. Make sure there aren't any typos and that you have access to the repository if it's private.`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(1);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
describe('disconnect', () => {
|
||||
const originalCwd = process.cwd();
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/git/connect', name);
|
||||
|
||||
it('should disconnect a repository', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
project.project.link = {
|
||||
type: 'github',
|
||||
repo: 'repo',
|
||||
org: 'user',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
client.setArgv('git', 'disconnect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Are you sure you want to disconnect user/repo from your project?`
|
||||
);
|
||||
client.stdin.write('y\n');
|
||||
await expect(client.stderr).toOutput('Disconnected user/repo.');
|
||||
|
||||
const newProjectData: Project = await client.fetch(
|
||||
`/v8/projects/new-connection`
|
||||
);
|
||||
expect(newProjectData.link).toBeUndefined();
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail if there is no repository to disconnect', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
|
||||
client.setArgv('git', 'disconnect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'No Git repository connected. Run `vercel project connect` to connect one.'
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(1);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,7 +6,10 @@ import { useTeams } from '../../mocks/team';
|
||||
import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { useDeployment } from '../../mocks/deployment';
|
||||
import { readOutputStream } from '../../helpers/read-output-stream';
|
||||
import { parseTable, getDataFromIntro } from '../../helpers/parse-table';
|
||||
import {
|
||||
parseSpacedTableRow,
|
||||
pluckIdentifiersFromDeploymentList,
|
||||
} from '../../helpers/parse-table';
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/list', name);
|
||||
@@ -34,9 +37,9 @@ describe('list', () => {
|
||||
|
||||
const output = await readOutputStream(client);
|
||||
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
data.splice(2, 1);
|
||||
|
||||
expect(org).toEqual(team[0].slug);
|
||||
@@ -76,9 +79,9 @@ describe('list', () => {
|
||||
|
||||
const output = await readOutputStream(client);
|
||||
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
data.splice(2, 1);
|
||||
|
||||
expect(org).toEqual(teamSlug);
|
||||
|
||||
@@ -5,9 +5,12 @@ import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { client } from '../../mocks/client';
|
||||
import { Project } from '../../../src/types';
|
||||
import { readOutputStream } from '../../helpers/read-output-stream';
|
||||
import { getDataFromIntro, parseTable } from '../../helpers/parse-table';
|
||||
import {
|
||||
pluckIdentifiersFromDeploymentList,
|
||||
parseSpacedTableRow,
|
||||
} from '../../helpers/parse-table';
|
||||
|
||||
describe('projects', () => {
|
||||
describe('project', () => {
|
||||
describe('list', () => {
|
||||
it('should list deployments under a user', async () => {
|
||||
const user = useUser();
|
||||
@@ -19,9 +22,9 @@ describe('projects', () => {
|
||||
await projects(client);
|
||||
|
||||
const output = await readOutputStream(client, 2);
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
data.pop();
|
||||
|
||||
expect(org).toEqual(user.username);
|
||||
@@ -40,9 +43,9 @@ describe('projects', () => {
|
||||
await projects(client);
|
||||
|
||||
const output = await readOutputStream(client, 2);
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
data.pop();
|
||||
|
||||
expect(org).toEqual(team[0].slug);
|
||||
|
||||
@@ -101,7 +101,9 @@ describe('pull', () => {
|
||||
Object {
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "vercel-pull-next",
|
||||
"settings": Object {},
|
||||
"settings": Object {
|
||||
"createdAt": 1555413045188,
|
||||
},
|
||||
}
|
||||
`);
|
||||
} finally {
|
||||
|
||||
51
packages/cli/test/unit/util/build/sort-builders.test.ts
Normal file
51
packages/cli/test/unit/util/build/sort-builders.test.ts
Normal file
@@ -0,0 +1,51 @@
|
||||
import { sortBuilders } from '../../../../src/util/build/sort-builders';
|
||||
|
||||
describe('sortBuilders()', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'should sort @vercel/next from middle to beginning',
|
||||
input: ['@vercel/node', '@vercel/next', '@vercel/python'],
|
||||
output: ['@vercel/next', '@vercel/node', '@vercel/python'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/static-build from middle to beginning',
|
||||
input: ['@vercel/node', '@vercel/static-build', '@vercel/python'],
|
||||
output: ['@vercel/static-build', '@vercel/node', '@vercel/python'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/remix from end to beginning',
|
||||
input: ['@vercel/python', '@vercel/node', '@vercel/remix'],
|
||||
output: ['@vercel/remix', '@vercel/python', '@vercel/node'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/redwood from beginning to beginning',
|
||||
input: ['@vercel/redwood', '@vercel/python', '@vercel/ruby'],
|
||||
output: ['@vercel/redwood', '@vercel/python', '@vercel/ruby'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/hydrogen from end to beginning',
|
||||
input: ['@vercel/python', '@vercel/hydrogen'],
|
||||
output: ['@vercel/hydrogen', '@vercel/python'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/static-build to beginning with many @vercel/node',
|
||||
input: [
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/static-build',
|
||||
'@vercel/node',
|
||||
],
|
||||
output: [
|
||||
'@vercel/static-build',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
],
|
||||
},
|
||||
])('$name', ({ input, output }) => {
|
||||
const builders = sortBuilders(input.map(use => ({ use })));
|
||||
expect(builders.map(b => b.use)).toEqual(output);
|
||||
});
|
||||
});
|
||||
@@ -6,8 +6,10 @@ import {
|
||||
createGitMeta,
|
||||
getRemoteUrl,
|
||||
isDirty,
|
||||
} from '../../../../src/util/deploy/create-git-meta';
|
||||
} from '../../../../src/util/create-git-meta';
|
||||
import { client } from '../../../mocks/client';
|
||||
import { parseRepoUrl } from '../../../../src/util/projects/connect-git-provider';
|
||||
import { readOutputStream } from '../../../helpers/read-output-stream';
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../../fixtures/unit/create-git-meta', name);
|
||||
@@ -27,6 +29,97 @@ describe('getRemoteUrl', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseRepoUrl', () => {
|
||||
it('should be null when a url does not match the regex', () => {
|
||||
const parsedUrl = parseRepoUrl('https://examplecom/foo');
|
||||
expect(parsedUrl).toBeNull();
|
||||
});
|
||||
it('should be null when a url does not contain org and repo data', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/borked');
|
||||
expect(parsedUrl).toBeNull();
|
||||
});
|
||||
it('should parse url with a period in the repo name', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/next.js');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('next.js');
|
||||
});
|
||||
it('should parse url that ends with .git', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/next.js.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('next.js');
|
||||
});
|
||||
it('should parse github https url', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/vercel.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
it('should parse github https url without the .git suffix', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/vercel');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
it('should parse github git url', () => {
|
||||
const parsedUrl = parseRepoUrl('git://github.com/vercel/vercel.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
it('should parse github ssh url', () => {
|
||||
const parsedUrl = parseRepoUrl('git@github.com:vercel/vercel.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
|
||||
it('should parse gitlab https url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'https://gitlab.com/gitlab-examples/knative-kotlin-app.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('gitlab');
|
||||
expect(parsedUrl?.org).toEqual('gitlab-examples');
|
||||
expect(parsedUrl?.repo).toEqual('knative-kotlin-app');
|
||||
});
|
||||
it('should parse gitlab ssh url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'git@gitlab.com:gitlab-examples/knative-kotlin-app.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('gitlab');
|
||||
expect(parsedUrl?.org).toEqual('gitlab-examples');
|
||||
expect(parsedUrl?.repo).toEqual('knative-kotlin-app');
|
||||
});
|
||||
|
||||
it('should parse bitbucket https url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'https://bitbucket.org/atlassianlabs/maven-project-example.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('bitbucket');
|
||||
expect(parsedUrl?.org).toEqual('atlassianlabs');
|
||||
expect(parsedUrl?.repo).toEqual('maven-project-example');
|
||||
});
|
||||
it('should parse bitbucket ssh url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'git@bitbucket.org:atlassianlabs/maven-project-example.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('bitbucket');
|
||||
expect(parsedUrl?.org).toEqual('atlassianlabs');
|
||||
expect(parsedUrl?.repo).toEqual('maven-project-example');
|
||||
});
|
||||
});
|
||||
|
||||
describe('createGitMeta', () => {
|
||||
it('is undefined when it does not receive a remote url', async () => {
|
||||
const directory = fixture('no-origin');
|
||||
@@ -135,7 +228,17 @@ describe('createGitMeta', () => {
|
||||
await fs.copy(directory, tmpDir);
|
||||
await fs.rename(join(tmpDir, 'git'), join(tmpDir, '.git'));
|
||||
|
||||
client.output.debugEnabled = true;
|
||||
const data = await createGitMeta(tmpDir, client.output);
|
||||
|
||||
const output = await readOutputStream(client, 2);
|
||||
|
||||
expect(output).toContain(
|
||||
`Failed to get last commit. The directory is likely not a Git repo, there are no latest commits, or it is corrupted.`
|
||||
);
|
||||
expect(output).toContain(
|
||||
`Failed to determine if Git repo has been modified:`
|
||||
);
|
||||
expect(data).toBeUndefined();
|
||||
} finally {
|
||||
await fs.remove(tmpDir);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "12.1.0",
|
||||
"version": "12.1.2",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -42,8 +42,8 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.1",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/fs-detectors",
|
||||
"version": "1.0.1",
|
||||
"version": "2.0.1",
|
||||
"description": "Vercel filesystem detectors",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
@@ -20,8 +20,8 @@
|
||||
"test-unit": "yarn test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/frameworks": "1.1.0",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/frameworks": "1.1.1",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"glob": "8.0.3",
|
||||
"js-yaml": "4.1.0",
|
||||
"minimatch": "3.0.4",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import minimatch from 'minimatch';
|
||||
import { valid as validSemver } from 'semver';
|
||||
import { parse as parsePath, extname } from 'path';
|
||||
import type { Route, Source } from '@vercel/routing-utils';
|
||||
import type { Route, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import frameworkList, { Framework } from '@vercel/frameworks';
|
||||
import type {
|
||||
PackageJson,
|
||||
@@ -155,8 +155,8 @@ export async function detectBuilders(
|
||||
|
||||
let fallbackEntrypoint: string | null = null;
|
||||
|
||||
const apiRoutes: Source[] = [];
|
||||
const dynamicRoutes: Source[] = [];
|
||||
const apiRoutes: RouteWithSrc[] = [];
|
||||
const dynamicRoutes: RouteWithSrc[] = [];
|
||||
|
||||
// API
|
||||
for (const fileName of sortedFiles) {
|
||||
@@ -692,7 +692,7 @@ function getApiRoute(
|
||||
options: Options,
|
||||
absolutePathCache: Map<string, string>
|
||||
): {
|
||||
apiRoute: Source | null;
|
||||
apiRoute: RouteWithSrc | null;
|
||||
isDynamic: boolean;
|
||||
routeError: ErrorResponse | null;
|
||||
} {
|
||||
@@ -886,7 +886,7 @@ function createRouteFromPath(
|
||||
filePath: string,
|
||||
featHandleMiss: boolean,
|
||||
cleanUrls: boolean
|
||||
): { route: Source; isDynamic: boolean } {
|
||||
): { route: RouteWithSrc; isDynamic: boolean } {
|
||||
const parts = filePath.split('/');
|
||||
|
||||
let counter = 1;
|
||||
@@ -932,7 +932,7 @@ function createRouteFromPath(
|
||||
? `^/${srcParts.slice(0, -1).join('/')}${srcParts.slice(-1)[0]}$`
|
||||
: `^/${srcParts.join('/')}$`;
|
||||
|
||||
let route: Source;
|
||||
let route: RouteWithSrc;
|
||||
|
||||
if (featHandleMiss) {
|
||||
const extensionless = ext ? filePath.slice(0, -ext.length) : filePath;
|
||||
@@ -959,8 +959,8 @@ interface LimitedRoutes {
|
||||
|
||||
function getRouteResult(
|
||||
pkg: PackageJson | undefined | null,
|
||||
apiRoutes: Source[],
|
||||
dynamicRoutes: Source[],
|
||||
apiRoutes: RouteWithSrc[],
|
||||
dynamicRoutes: RouteWithSrc[],
|
||||
outputDirectory: string,
|
||||
apiBuilders: Builder[],
|
||||
frontendBuilder: Builder | null,
|
||||
|
||||
@@ -13,8 +13,6 @@ interface Metadata {
|
||||
hasMiddleware: boolean;
|
||||
}
|
||||
|
||||
const enableFileSystemApiFrameworks = new Set(['solidstart']);
|
||||
|
||||
/**
|
||||
* If the Deployment can be built with the new File System API,
|
||||
* return the new Builder. Otherwise an "Exclusion Condition"
|
||||
@@ -61,11 +59,7 @@ export async function detectFileSystemAPI({
|
||||
hasMiddleware,
|
||||
};
|
||||
|
||||
const isEnabled =
|
||||
enableFlag ||
|
||||
hasMiddleware ||
|
||||
hasDotOutput ||
|
||||
enableFileSystemApiFrameworks.has(framework);
|
||||
const isEnabled = enableFlag || hasMiddleware || hasDotOutput;
|
||||
if (!isEnabled) {
|
||||
return { metadata, fsApiBuilder: null, reason: 'Flag not enabled.' };
|
||||
}
|
||||
|
||||
@@ -80,11 +80,13 @@ export async function detectFramework({
|
||||
fs,
|
||||
frameworkList,
|
||||
}: DetectFrameworkOptions): Promise<string | null> {
|
||||
for (const framework of frameworkList) {
|
||||
if (await matches(fs, framework)) {
|
||||
return framework.slug;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
const result = await Promise.all(
|
||||
frameworkList.map(async frameworkMatch => {
|
||||
if (await matches(fs, frameworkMatch)) {
|
||||
return frameworkMatch.slug;
|
||||
}
|
||||
return null;
|
||||
})
|
||||
);
|
||||
return result.find(res => res !== null) ?? null;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import type { Source, Route, Handler } from '@vercel/routing-utils';
|
||||
import type {
|
||||
Route,
|
||||
RouteWithHandle as Handler,
|
||||
RouteWithSrc as Source,
|
||||
} from '@vercel/routing-utils';
|
||||
import {
|
||||
detectBuilders,
|
||||
detectOutputDirectory,
|
||||
|
||||
@@ -252,6 +252,19 @@ describe('DetectorFilesystem', () => {
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('nextjs');
|
||||
});
|
||||
|
||||
it('Detect frameworks based on ascending order in framework list', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'package.json': JSON.stringify({
|
||||
dependencies: {
|
||||
next: '9.0.0',
|
||||
gatsby: '4.18.0',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('nextjs');
|
||||
});
|
||||
|
||||
it('Detect Nuxt.js', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'package.json': JSON.stringify({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "2.0.5",
|
||||
"version": "2.0.7",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/hydrogen",
|
||||
"version": "0.0.2",
|
||||
"version": "0.0.4",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -22,7 +22,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "3.1.4",
|
||||
"version": "3.1.7",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -45,9 +45,9 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/nft": "0.20.1",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
"cheerio": "1.0.0-rc.10",
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
NodejsLambda,
|
||||
BuildResultV2Typical as BuildResult,
|
||||
} from '@vercel/build-utils';
|
||||
import { Handler, Route, Source } from '@vercel/routing-utils';
|
||||
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import {
|
||||
convertHeaders,
|
||||
convertRedirects,
|
||||
@@ -399,6 +399,7 @@ export const build: BuildV2 = async ({
|
||||
const env: typeof process.env = { ...spawnOpts.env };
|
||||
const memoryToConsume = Math.floor(os.totalmem() / 1024 ** 2) - 128;
|
||||
env.NODE_OPTIONS = `--max_old_space_size=${memoryToConsume}`;
|
||||
env.NEXT_EDGE_RUNTIME_PROVIDER = 'vercel';
|
||||
|
||||
if (target) {
|
||||
// Since version v10.0.8-canary.15 of Next.js the NEXT_PRIVATE_TARGET env
|
||||
@@ -895,7 +896,7 @@ export const build: BuildV2 = async ({
|
||||
...(output[path.join('./', entryDirectory, '404')] ||
|
||||
output[path.join('./', entryDirectory, '404/index')]
|
||||
? [
|
||||
{ handle: 'error' } as Handler,
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
|
||||
{
|
||||
status: 404,
|
||||
@@ -927,7 +928,7 @@ export const build: BuildV2 = async ({
|
||||
let trailingSlash = false;
|
||||
|
||||
redirects = redirects.filter(_redir => {
|
||||
const redir = _redir as Source;
|
||||
const redir = _redir as RouteWithSrc;
|
||||
// detect the trailing slash redirect and make sure it's
|
||||
// kept above the wildcard mapping to prevent erroneous redirects
|
||||
// since non-continue routes come after continue the $wildcard
|
||||
@@ -1145,7 +1146,7 @@ export const build: BuildV2 = async ({
|
||||
continue;
|
||||
}
|
||||
|
||||
const route: Source & { dest: string } = {
|
||||
const route: RouteWithSrc & { dest: string } = {
|
||||
src: (
|
||||
dataRoute.namedDataRouteRegex || dataRoute.dataRouteRegex
|
||||
).replace(/^\^/, `^${appMountPrefixNoTrailingSlash}`),
|
||||
@@ -1174,7 +1175,7 @@ export const build: BuildV2 = async ({
|
||||
if (isOmittedRoute && isServerMode) {
|
||||
// only match this route when in preview mode so
|
||||
// preview works for non-prerender fallback: false pages
|
||||
(route as Source).has = [
|
||||
(route as RouteWithSrc).has = [
|
||||
{
|
||||
type: 'cookie',
|
||||
key: '__prerender_bypass',
|
||||
@@ -2453,7 +2454,7 @@ export const build: BuildV2 = async ({
|
||||
? []
|
||||
: [
|
||||
// Custom Next.js 404 page
|
||||
{ handle: 'error' } as Handler,
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
|
||||
...(i18n && (static404Page || hasIsr404Page)
|
||||
? [
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
Files,
|
||||
BuildResultV2Typical as BuildResult,
|
||||
} from '@vercel/build-utils';
|
||||
import { Handler, Route, Source } from '@vercel/routing-utils';
|
||||
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import { MAX_AGE_ONE_YEAR } from '.';
|
||||
import {
|
||||
NextRequiredServerFilesManifest,
|
||||
@@ -56,6 +56,7 @@ import prettyBytes from 'pretty-bytes';
|
||||
const CORRECT_NOT_FOUND_ROUTES_VERSION = 'v12.0.1';
|
||||
const CORRECT_MIDDLEWARE_ORDER_VERSION = 'v12.1.7-canary.29';
|
||||
const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
|
||||
const EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION = 'v12.2.0';
|
||||
|
||||
export async function serverBuild({
|
||||
dynamicPages,
|
||||
@@ -133,6 +134,10 @@ export async function serverBuild({
|
||||
const lambdaPageKeys = Object.keys(lambdaPages);
|
||||
const internalPages = ['_app.js', '_error.js', '_document.js'];
|
||||
const pageBuildTraces = await glob('**/*.js.nft.json', pagesDir);
|
||||
const isEmptyAllowQueryForPrendered = semver.gte(
|
||||
nextVersion,
|
||||
EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION
|
||||
);
|
||||
const isCorrectNotFoundRoutes = semver.gte(
|
||||
nextVersion,
|
||||
CORRECT_NOT_FOUND_ROUTES_VERSION
|
||||
@@ -756,6 +761,7 @@ export async function serverBuild({
|
||||
static404Page,
|
||||
hasPages404: routesManifest.pages404,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
});
|
||||
|
||||
Object.keys(prerenderManifest.staticRoutes).forEach(route =>
|
||||
@@ -822,7 +828,7 @@ export async function serverBuild({
|
||||
const { staticFiles, publicDirectoryFiles, staticDirectoryFiles } =
|
||||
await getStaticFiles(entryPath, entryDirectory, outputDirectory);
|
||||
|
||||
const notFoundPreviewRoutes: Source[] = [];
|
||||
const notFoundPreviewRoutes: RouteWithSrc[] = [];
|
||||
|
||||
if (prerenderManifest.notFoundRoutes?.length > 0 && canUsePreviewMode) {
|
||||
// we combine routes into one src here to reduce the number of needed
|
||||
@@ -1378,7 +1384,7 @@ export async function serverBuild({
|
||||
},
|
||||
|
||||
// error handling
|
||||
{ handle: 'error' } as Handler,
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
|
||||
// Custom Next.js 404 page
|
||||
...(i18n && (static404Page || hasIsr404Page || lambdaPages['404.js'])
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
EdgeFunction,
|
||||
} from '@vercel/build-utils';
|
||||
import { NodeFileTraceReasons } from '@vercel/nft';
|
||||
import { Header, Rewrite, Route, Source } from '@vercel/routing-utils';
|
||||
import { Header, Rewrite, Route, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import { Sema } from 'async-sema';
|
||||
import crc32 from 'buffer-crc32';
|
||||
import fs, { lstat, stat } from 'fs-extra';
|
||||
@@ -273,8 +273,8 @@ export async function getDynamicRoutes(
|
||||
canUsePreviewMode?: boolean,
|
||||
bypassToken?: string,
|
||||
isServerMode?: boolean,
|
||||
dynamicMiddlewareRouteMap?: Map<string, Source>
|
||||
): Promise<Source[]> {
|
||||
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>
|
||||
): Promise<RouteWithSrc[]> {
|
||||
if (routesManifest) {
|
||||
switch (routesManifest.version) {
|
||||
case 1:
|
||||
@@ -307,7 +307,7 @@ export async function getDynamicRoutes(
|
||||
}
|
||||
|
||||
const { page, namedRegex, regex, routeKeys } = params;
|
||||
const route: Source = {
|
||||
const route: RouteWithSrc = {
|
||||
src: namedRegex || regex,
|
||||
dest: `${!isDev ? path.join('/', entryDirectory, page) : page}${
|
||||
routeKeys
|
||||
@@ -400,7 +400,7 @@ export async function getDynamicRoutes(
|
||||
matcher: getRouteRegex && getRouteRegex(pageName).re,
|
||||
}));
|
||||
|
||||
const routes: Source[] = [];
|
||||
const routes: RouteWithSrc[] = [];
|
||||
pageMatchers.forEach(pageMatcher => {
|
||||
// in `vercel dev` we don't need to prefix the destination
|
||||
const dest = !isDev
|
||||
@@ -419,7 +419,7 @@ export async function getDynamicRoutes(
|
||||
}
|
||||
|
||||
export function localizeDynamicRoutes(
|
||||
dynamicRoutes: Source[],
|
||||
dynamicRoutes: RouteWithSrc[],
|
||||
dynamicPrefix: string,
|
||||
entryDirectory: string,
|
||||
staticPages: Files,
|
||||
@@ -427,8 +427,8 @@ export function localizeDynamicRoutes(
|
||||
routesManifest?: RoutesManifest,
|
||||
isServerMode?: boolean,
|
||||
isCorrectLocaleAPIRoutes?: boolean
|
||||
): Source[] {
|
||||
return dynamicRoutes.map((route: Source) => {
|
||||
): RouteWithSrc[] {
|
||||
return dynamicRoutes.map((route: RouteWithSrc) => {
|
||||
// i18n is already handled for middleware
|
||||
if (route.middleware !== undefined || route.middlewarePath !== undefined)
|
||||
return route;
|
||||
@@ -1665,6 +1665,7 @@ type OnPrerenderRouteArgs = {
|
||||
pageLambdaMap: { [key: string]: string };
|
||||
routesManifest?: RoutesManifest;
|
||||
isCorrectNotFoundRoutes?: boolean;
|
||||
isEmptyAllowQueryForPrendered?: boolean;
|
||||
};
|
||||
let prerenderGroup = 1;
|
||||
|
||||
@@ -1698,6 +1699,7 @@ export const onPrerenderRoute =
|
||||
pageLambdaMap,
|
||||
routesManifest,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
} = prerenderRouteArgs;
|
||||
|
||||
if (isBlocking && isFallback) {
|
||||
@@ -1901,7 +1903,6 @@ export const onPrerenderRoute =
|
||||
// a given path. All other query keys will be striped. We can automatically
|
||||
// detect this for prerender (ISR) pages by reading the routes manifest file.
|
||||
const pageKey = srcRoute || routeKey;
|
||||
const isDynamic = isDynamicRoute(pageKey);
|
||||
const route = routesManifest?.dynamicRoutes.find(
|
||||
(r): r is RoutesManifestRoute =>
|
||||
r.page === pageKey && !('isMiddleware' in r)
|
||||
@@ -1911,14 +1912,33 @@ export const onPrerenderRoute =
|
||||
// we have sufficient information to set it
|
||||
let allowQuery: string[] | undefined;
|
||||
|
||||
if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
} else if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
allowQuery = [];
|
||||
if (isEmptyAllowQueryForPrendered) {
|
||||
const isDynamic = isDynamicRoute(routeKey);
|
||||
|
||||
if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
// prerendered paths also do not pass allowQuery as they match
|
||||
// during handle: 'filesystem' so should not cache differently
|
||||
// by query values
|
||||
allowQuery = [];
|
||||
} else if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
}
|
||||
} else {
|
||||
const isDynamic = isDynamicRoute(pageKey);
|
||||
|
||||
if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
} else if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
allowQuery = [];
|
||||
}
|
||||
}
|
||||
|
||||
prerenders[outputPathPage] = new Prerender({
|
||||
@@ -2148,6 +2168,7 @@ interface EdgeFunctionInfo {
|
||||
page: string;
|
||||
regexp: string;
|
||||
wasm?: { filePath: string; name: string }[];
|
||||
assets?: { filePath: string; name: string }[];
|
||||
}
|
||||
|
||||
export async function getMiddlewareBundle({
|
||||
@@ -2234,6 +2255,23 @@ export async function getMiddlewareBundle({
|
||||
{}
|
||||
);
|
||||
|
||||
const assetFiles = (edgeFunction.assets ?? []).reduce(
|
||||
(acc: Files, { filePath, name }) => {
|
||||
const fullFilePath = path.join(
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
filePath
|
||||
);
|
||||
acc[`assets/${name}`] = new FileFsRef({
|
||||
mode: 0o644,
|
||||
contentType: 'application/octet-stream',
|
||||
fsPath: fullFilePath,
|
||||
});
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
return new EdgeFunction({
|
||||
deploymentTarget: 'v8-worker',
|
||||
name: edgeFunction.name,
|
||||
@@ -2251,9 +2289,16 @@ export async function getMiddlewareBundle({
|
||||
}),
|
||||
}),
|
||||
...wasmFiles,
|
||||
...assetFiles,
|
||||
},
|
||||
entrypoint: 'index.js',
|
||||
envVarsInUse: edgeFunction.env,
|
||||
assets: (edgeFunction.assets ?? []).map(({ name }) => {
|
||||
return {
|
||||
name,
|
||||
path: `assets/${name}`,
|
||||
};
|
||||
}),
|
||||
});
|
||||
})(),
|
||||
routeSrc: getRouteSrc(edgeFunction, routesManifest),
|
||||
@@ -2267,7 +2312,7 @@ export async function getMiddlewareBundle({
|
||||
|
||||
const source: {
|
||||
staticRoutes: Route[];
|
||||
dynamicRouteMap: Map<string, Source>;
|
||||
dynamicRouteMap: Map<string, RouteWithSrc>;
|
||||
edgeFunctions: Record<string, EdgeFunction>;
|
||||
} = {
|
||||
staticRoutes: [],
|
||||
|
||||
@@ -1,8 +1,144 @@
|
||||
/* eslint-env jest */
|
||||
const path = require('path');
|
||||
const { deployAndTest } = require('../../utils');
|
||||
const cheerio = require('cheerio');
|
||||
const { deployAndTest, check, waitFor } = require('../../utils');
|
||||
const fetch = require('../../../../../test/lib/deployment/fetch-retry');
|
||||
|
||||
async function checkForChange(url, initialValue, getNewValue) {
|
||||
return check(async () => {
|
||||
const res = await fetch(url);
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(`Invalid status code ${res.status}`);
|
||||
}
|
||||
const newValue = await getNewValue(res);
|
||||
|
||||
return initialValue !== newValue
|
||||
? 'success'
|
||||
: JSON.stringify({ initialValue, newValue });
|
||||
}, 'success');
|
||||
}
|
||||
|
||||
const ctx = {};
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should deploy and pass probe checks', async () => {
|
||||
await deployAndTest(__dirname);
|
||||
const info = await deployAndTest(__dirname);
|
||||
Object.assign(ctx, info);
|
||||
});
|
||||
|
||||
it.each([
|
||||
{
|
||||
title: 'should update content for prerendered path correctly',
|
||||
pathsToCheck: [
|
||||
{ urlPath: '/fallback-blocking/first' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?slug=first' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?slug=random' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?another=value' },
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'should update content for non-prerendered path correctly',
|
||||
pathsToCheck: [
|
||||
{ urlPath: '/fallback-blocking/on-demand-2' },
|
||||
{
|
||||
urlPath: '/fallback-blocking/on-demand-2',
|
||||
query: '?slug=on-demand-2',
|
||||
},
|
||||
{ urlPath: '/fallback-blocking/on-demand-2', query: '?slug=random' },
|
||||
{ urlPath: '/fallback-blocking/on-demand-2', query: '?another=value' },
|
||||
],
|
||||
},
|
||||
])('$title', async ({ pathsToCheck }) => {
|
||||
let initialRandom;
|
||||
let initialRandomData;
|
||||
let preRevalidateRandom;
|
||||
let preRevalidateRandomData;
|
||||
|
||||
const checkPaths = async pathsToCheck => {
|
||||
for (const { urlPath, query } of pathsToCheck) {
|
||||
console.log('checking', {
|
||||
urlPath,
|
||||
query,
|
||||
initialRandom,
|
||||
preRevalidateRandom,
|
||||
});
|
||||
|
||||
if (preRevalidateRandom) {
|
||||
// wait for change as cache may take a little to propagate
|
||||
const initialUrl = `${ctx.deploymentUrl}${urlPath}${query || ''}`;
|
||||
await checkForChange(initialUrl, preRevalidateRandom, async () => {
|
||||
const res = await fetch(initialUrl);
|
||||
const $ = cheerio.load(await res.text());
|
||||
return JSON.parse($('#props').text()).random;
|
||||
});
|
||||
}
|
||||
|
||||
const res = await fetch(`${ctx.deploymentUrl}${urlPath}${query || ''}`);
|
||||
expect(res.status).toBe(200);
|
||||
|
||||
const $ = await cheerio.load(await res.text());
|
||||
const props = JSON.parse($('#props').text());
|
||||
|
||||
if (initialRandom) {
|
||||
// for fallback paths the initial value is generated
|
||||
// in the foreground and then a revalidation is kicked off
|
||||
// in the background so the initial value will be replaced
|
||||
if (initialRandom !== props.random && urlPath.includes('on-demand')) {
|
||||
initialRandom = props.random;
|
||||
} else {
|
||||
expect(initialRandom).toBe(props.random);
|
||||
}
|
||||
} else {
|
||||
initialRandom = props.random;
|
||||
}
|
||||
expect(isNaN(initialRandom)).toBe(false);
|
||||
|
||||
const dataRes = await fetch(
|
||||
`${ctx.deploymentUrl}/_next/data/testing-build-id${urlPath}.json${
|
||||
query || ''
|
||||
}`
|
||||
);
|
||||
expect(dataRes.status).toBe(200);
|
||||
|
||||
const { pageProps: dataProps } = await dataRes.json();
|
||||
|
||||
if (initialRandomData) {
|
||||
// for fallback paths the initial value is generated
|
||||
// in the foreground and then a revalidation is kicked off
|
||||
// in the background so the initial value will be replaced
|
||||
if (
|
||||
initialRandomData !== dataProps.random &&
|
||||
urlPath.includes('on-demand-2')
|
||||
) {
|
||||
initialRandomData = dataProps.random;
|
||||
} else {
|
||||
expect(initialRandomData).toBe(dataProps.random);
|
||||
}
|
||||
} else {
|
||||
initialRandomData = dataProps.random;
|
||||
}
|
||||
expect(isNaN(initialRandomData)).toBe(false);
|
||||
}
|
||||
};
|
||||
|
||||
await checkPaths(pathsToCheck);
|
||||
|
||||
preRevalidateRandom = initialRandom;
|
||||
preRevalidateRandomData = initialRandomData;
|
||||
|
||||
initialRandom = undefined;
|
||||
initialRandomData = undefined;
|
||||
|
||||
const revalidateRes = await fetch(
|
||||
`${ctx.deploymentUrl}/api/revalidate?urlPath=${pathsToCheck[0].urlPath}`
|
||||
);
|
||||
expect(revalidateRes.status).toBe(200);
|
||||
expect((await revalidateRes.json()).revalidated).toBe(true);
|
||||
|
||||
await checkPaths(pathsToCheck);
|
||||
|
||||
expect(preRevalidateRandom).toBeDefined();
|
||||
expect(preRevalidateRandomData).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
10
packages/next/test/fixtures/00-server-build/pages/api/revalidate.js
vendored
Normal file
10
packages/next/test/fixtures/00-server-build/pages/api/revalidate.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
export default async function handler(req, res) {
|
||||
try {
|
||||
console.log('revalidating', req.query.urlPath);
|
||||
await res.revalidate(req.query.urlPath);
|
||||
return res.json({ revalidated: true });
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return res.json({ revalidated: false });
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,7 @@ export const getStaticProps = ({ params }) => {
|
||||
|
||||
export const getStaticPaths = () => {
|
||||
return {
|
||||
paths: ['/fallback-blocking/first'],
|
||||
paths: ['/fallback-blocking/first', '/fallback-blocking/on-demand-1'],
|
||||
fallback: 'blocking',
|
||||
};
|
||||
};
|
||||
|
||||
@@ -78,8 +78,20 @@ it('should build using server build', async () => {
|
||||
expect(output['dynamic/[slug]'].maxDuration).toBe(5);
|
||||
expect(output['fallback/[slug]'].type).toBe('Prerender');
|
||||
expect(output['fallback/[slug]'].allowQuery).toEqual(['slug']);
|
||||
expect(output['_next/data/testing-build-id/fallback/[slug].json'].type).toBe(
|
||||
'Prerender'
|
||||
);
|
||||
expect(
|
||||
output['_next/data/testing-build-id/fallback/[slug].json'].allowQuery
|
||||
).toEqual(['slug']);
|
||||
expect(output['fallback/first'].type).toBe('Prerender');
|
||||
expect(output['fallback/first'].allowQuery).toEqual(['slug']);
|
||||
expect(output['fallback/first'].allowQuery).toEqual([]);
|
||||
expect(output['_next/data/testing-build-id/fallback/first.json'].type).toBe(
|
||||
'Prerender'
|
||||
);
|
||||
expect(
|
||||
output['_next/data/testing-build-id/fallback/first.json'].allowQuery
|
||||
).toEqual([]);
|
||||
expect(output['api'].type).toBe('Lambda');
|
||||
expect(output['api'].allowQuery).toBe(undefined);
|
||||
expect(output['api'].memory).toBe(128);
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
module.exports = (phase, { defaultConfig }) => ({
|
||||
pageExtensions: [...defaultConfig.pageExtensions, 'hello.js'],
|
||||
generateBuildId() {
|
||||
return 'testing-build-id';
|
||||
},
|
||||
});
|
||||
|
||||
2
packages/next/test/utils.ts
vendored
2
packages/next/test/utils.ts
vendored
@@ -129,7 +129,7 @@ export async function deployAndTest(fixtureDir) {
|
||||
};
|
||||
}
|
||||
|
||||
async function waitFor(milliseconds) {
|
||||
export async function waitFor(milliseconds) {
|
||||
return new Promise(resolve => {
|
||||
setTimeout(resolve, milliseconds);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "2.4.1",
|
||||
"version": "2.4.4",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -31,7 +31,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/node-bridge": "3.0.0",
|
||||
"@vercel/static-config": "2.0.1",
|
||||
"edge-runtime": "1.0.1",
|
||||
|
||||
@@ -193,7 +193,7 @@ async function compileUserCode(entrypoint: string) {
|
||||
|
||||
let edgeHandler = module.exports.default;
|
||||
if (!edgeHandler) {
|
||||
throw new Error('No default export was found. Add a default export to handle requests.');
|
||||
throw new Error('No default export was found. Add a default export to handle requests. Learn more: https://vercel.link/creating-edge-middleware');
|
||||
}
|
||||
|
||||
let response = await edgeHandler(event.request, event);
|
||||
@@ -305,7 +305,7 @@ function parseRuntime(
|
||||
throw new Error(
|
||||
`Invalid function runtime "${runtime}" for "${entrypoint}". Valid runtimes are: ${JSON.stringify(
|
||||
validRuntimes
|
||||
)}`
|
||||
)}. Learn more: https://vercel.link/creating-edge-functions`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/python",
|
||||
"version": "3.0.5",
|
||||
"version": "3.0.7",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
|
||||
@@ -23,7 +23,7 @@
|
||||
"devDependencies": {
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/jest": "27.4.1",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"execa": "^1.0.0",
|
||||
"typescript": "4.3.4"
|
||||
|
||||
@@ -25,9 +25,6 @@ const allOptions: PythonVersion[] = [
|
||||
},
|
||||
];
|
||||
|
||||
const upstreamProvider =
|
||||
'This change is the result of a decision made by an upstream infrastructure provider (AWS)';
|
||||
|
||||
function getDevPythonVersion(): PythonVersion {
|
||||
// Use the system-installed version of `python3` when running `vercel dev`
|
||||
return {
|
||||
@@ -75,14 +72,14 @@ export function getSupportedPythonVersion({
|
||||
throw new NowBuildError({
|
||||
code: 'BUILD_UTILS_PYTHON_VERSION_DISCONTINUED',
|
||||
link: 'http://vercel.link/python-version',
|
||||
message: `Python version "${selection.version}" detected in Pipfile.lock is discontinued and must be upgraded. ${upstreamProvider}.`,
|
||||
message: `Python version "${selection.version}" detected in Pipfile.lock is discontinued and must be upgraded.`,
|
||||
});
|
||||
}
|
||||
|
||||
if (selection.discontinueDate) {
|
||||
const d = selection.discontinueDate.toISOString().split('T')[0];
|
||||
console.warn(
|
||||
`Error: Python version "${selection.version}" detected in Pipfile.lock is deprecated. Deployments created on or after ${d} will fail to build. ${upstreamProvider}. http://vercel.link/python-version`
|
||||
`Error: Python version "${selection.version}" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after ${d} will fail to build. http://vercel.link/python-version`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "wsgi:RANDOMNESS_PLACEHOLDER",
|
||||
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build"
|
||||
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after 2022-07-18 will fail to build"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "RANDOMNESS_PLACEHOLDER:env",
|
||||
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build"
|
||||
"logMustContain": "Python version \"3.6\" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after 2022-07-18 will fail to build"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
4
packages/python/test/unit.test.ts
vendored
4
packages/python/test/unit.test.ts
vendored
@@ -58,7 +58,7 @@ it('should throw for discontinued versions', async () => {
|
||||
expect(() =>
|
||||
getSupportedPythonVersion({ pipLockPythonVersion: '3.6' })
|
||||
).toThrow(
|
||||
'Python version "3.6" detected in Pipfile.lock is discontinued and must be upgraded. This change is the result of a decision made by an upstream infrastructure provider (AWS).'
|
||||
'Python version "3.6" detected in Pipfile.lock is discontinued and must be upgraded.'
|
||||
);
|
||||
expect(warningMessages).toStrictEqual([]);
|
||||
});
|
||||
@@ -70,6 +70,6 @@ it('should warn for deprecated versions, soon to be discontinued', async () => {
|
||||
getSupportedPythonVersion({ pipLockPythonVersion: '3.6' })
|
||||
).toHaveProperty('runtime', 'python3.6');
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Error: Python version "3.6" detected in Pipfile.lock is deprecated. Deployments created on or after 2022-07-18 will fail to build. This change is the result of a decision made by an upstream infrastructure provider (AWS). http://vercel.link/python-version',
|
||||
'Error: Python version "3.6" detected in Pipfile.lock has reached End-of-Life. Deployments created on or after 2022-07-18 will fail to build. http://vercel.link/python-version',
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/redwood",
|
||||
"version": "1.0.6",
|
||||
"version": "1.0.8",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -21,13 +21,13 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.20.1",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"semver": "6.1.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "*",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "5.0.1"
|
||||
"@vercel/build-utils": "5.0.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -283,12 +283,10 @@ export const build: BuildV2 = async ({
|
||||
: '/index';
|
||||
|
||||
const defaultRoutesConfig = getTransformedRoutes({
|
||||
nowConfig: {
|
||||
// this makes sure we send back 200.html for unprerendered pages
|
||||
rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }],
|
||||
cleanUrls: true,
|
||||
trailingSlash: false,
|
||||
},
|
||||
// this makes sure we send back 200.html for unprerendered pages
|
||||
rewrites: [{ source: '/(.*)', destination: fallbackHtmlPage }],
|
||||
cleanUrls: true,
|
||||
trailingSlash: false,
|
||||
});
|
||||
|
||||
if (defaultRoutesConfig.error) {
|
||||
|
||||
8
packages/redwood/test/test.js
vendored
8
packages/redwood/test/test.js
vendored
@@ -1,6 +1,5 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { version } = require('../package.json');
|
||||
|
||||
const {
|
||||
packAndDeploy,
|
||||
@@ -9,10 +8,15 @@ const {
|
||||
|
||||
jest.setTimeout(12 * 60 * 1000);
|
||||
|
||||
let buildUtilsUrl;
|
||||
let builderUrl;
|
||||
const buildUtilsUrl = version.includes('canary') ? '@canary' : undefined;
|
||||
|
||||
beforeAll(async () => {
|
||||
if (!buildUtilsUrl) {
|
||||
const buildUtilsPath = path.resolve(__dirname, '..', '..', 'build-utils');
|
||||
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
|
||||
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||
}
|
||||
const builderPath = path.resolve(__dirname, '..');
|
||||
builderUrl = await packAndDeploy(builderPath);
|
||||
console.log('builderUrl', builderUrl);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/remix",
|
||||
"version": "1.0.6",
|
||||
"version": "1.0.9",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -26,7 +26,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { promises as fs } from 'fs';
|
||||
import { dirname, join } from 'path';
|
||||
import { dirname, join, relative } from 'path';
|
||||
import {
|
||||
debug,
|
||||
download,
|
||||
@@ -187,6 +187,15 @@ export const build: BuildV2 = async ({
|
||||
// Explicit directory path the server output will be
|
||||
serverBuildPath = join(remixConfig.serverBuildDirectory, 'index.js');
|
||||
}
|
||||
|
||||
// Also check for whether were in a monorepo.
|
||||
// If we are, prepend the app root directory from config onto the build path.
|
||||
// e.g. `/apps/my-remix-app/api/index.js`
|
||||
const isMonorepo = repoRootPath && repoRootPath !== workPath;
|
||||
if (isMonorepo) {
|
||||
const rootDirectory = relative(repoRootPath, workPath);
|
||||
serverBuildPath = join(rootDirectory, serverBuildPath);
|
||||
}
|
||||
} catch (err: any) {
|
||||
// Ignore error if `remix.config.js` does not exist
|
||||
if (err.code !== 'MODULE_NOT_FOUND') throw err;
|
||||
@@ -196,6 +205,7 @@ export const build: BuildV2 = async ({
|
||||
glob('**', join(entrypointFsDirname, 'public')),
|
||||
createRenderFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
serverBuildPath,
|
||||
needsHandler,
|
||||
nodeVersion
|
||||
@@ -230,6 +240,7 @@ function hasScript(scriptName: string, pkg: PackageJson | null) {
|
||||
}
|
||||
|
||||
async function createRenderFunction(
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
needsHandler: boolean,
|
||||
@@ -250,6 +261,7 @@ async function createRenderFunction(
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user