mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-24 03:39:11 +00:00
Compare commits
35 Commits
@vercel/no
...
add/vc-ope
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8ebe21ec09 | ||
|
|
5e25067cac | ||
|
|
c76d787693 | ||
|
|
33c9f8a5df | ||
|
|
cdd1ce903f | ||
|
|
2252e423d2 | ||
|
|
78a96169b0 | ||
|
|
862f0bf9b6 | ||
|
|
8044c492d8 | ||
|
|
14f6b4fc36 | ||
|
|
e5faa2d61f | ||
|
|
5160e357d7 | ||
|
|
e56858ed2b | ||
|
|
b93d5dacd3 | ||
|
|
2e1b19e54f | ||
|
|
1588a2c3ba | ||
|
|
38e193617d | ||
|
|
d799c3bb55 | ||
|
|
9b070359fc | ||
|
|
11f5b296e4 | ||
|
|
a22e47c9f2 | ||
|
|
1ac1d73834 | ||
|
|
d2bc2a084d | ||
|
|
968d832855 | ||
|
|
39cccaf0d4 | ||
|
|
1fc799b801 | ||
|
|
96772ad9c0 | ||
|
|
0f6d02f285 | ||
|
|
fddb0e899e | ||
|
|
b40c0bb07c | ||
|
|
8402013311 | ||
|
|
ec0fbda657 | ||
|
|
23cccf0310 | ||
|
|
3ffee4e999 | ||
|
|
2010f884ca |
@@ -1,5 +1,4 @@
|
||||
# https://prettier.io/docs/en/ignore.html
|
||||
|
||||
# ignore these files with an intentional syntax error
|
||||
# ignore this file with an intentional syntax error
|
||||
packages/cli/test/dev/fixtures/edge-function-error/api/edge-error-syntax.js
|
||||
packages/cli/test/fixtures/unit/commands/build/node-error/api/typescript.ts
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"prettier": "2.6.2",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "28.0.5",
|
||||
"turbo": "1.3.2-canary.1"
|
||||
"turbo": "1.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "5.0.3",
|
||||
"version": "5.0.1",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -33,11 +33,6 @@ export class EdgeFunction {
|
||||
*/
|
||||
envVarsInUse?: string[];
|
||||
|
||||
/**
|
||||
* Extra binary files to be included in the edge function
|
||||
*/
|
||||
assets?: { name: string; path: string }[];
|
||||
|
||||
constructor(params: Omit<EdgeFunction, 'type'>) {
|
||||
this.type = 'EdgeFunction';
|
||||
this.name = params.name;
|
||||
@@ -45,6 +40,5 @@ export class EdgeFunction {
|
||||
this.entrypoint = params.entrypoint;
|
||||
this.files = params.files;
|
||||
this.envVarsInUse = params.envVarsInUse;
|
||||
this.assets = params.assets;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,6 +33,9 @@ function getHint(isAuto = false) {
|
||||
: `Please set "engines": { "node": "${range}" } in your \`package.json\` file to use Node.js ${major}.`;
|
||||
}
|
||||
|
||||
const upstreamProvider =
|
||||
'This change is the result of a decision made by an upstream infrastructure provider (AWS).';
|
||||
|
||||
export function getLatestNodeVersion() {
|
||||
return allOptions[0];
|
||||
}
|
||||
@@ -72,7 +75,7 @@ export async function getSupportedNodeVersion(
|
||||
throw new NowBuildError({
|
||||
code: 'BUILD_UTILS_NODE_VERSION_DISCONTINUED',
|
||||
link: 'http://vercel.link/node-version',
|
||||
message: `${intro} ${getHint(isAuto)}`,
|
||||
message: `${intro} ${getHint(isAuto)} ${upstreamProvider}`,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -83,9 +86,9 @@ export async function getSupportedNodeVersion(
|
||||
console.warn(
|
||||
`Error: Node.js version ${
|
||||
selection.range
|
||||
} has reached End-of-Life. Deployments created on or after ${d} will fail to build. ${getHint(
|
||||
} is deprecated. Deployments created on or after ${d} will fail to build. ${getHint(
|
||||
isAuto
|
||||
)}`
|
||||
)} ${upstreamProvider}`
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
56
packages/build-utils/test/unit.test.ts
vendored
56
packages/build-utils/test/unit.test.ts
vendored
@@ -1,7 +1,6 @@
|
||||
import ms from 'ms';
|
||||
import path from 'path';
|
||||
import fs, { readlink } from 'fs-extra';
|
||||
import retry from 'async-retry';
|
||||
import { strict as assert, strictEqual } from 'assert';
|
||||
import { createZip } from '../src/lambda';
|
||||
import { getSupportedNodeVersion } from '../src/fs/node-version';
|
||||
@@ -387,10 +386,10 @@ it('should warn for deprecated versions, soon to be discontinued', async () => {
|
||||
12
|
||||
);
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
|
||||
'Error: Node.js version 10.x has reached End-of-Life. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
|
||||
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16.',
|
||||
'Error: Node.js version 12.x has reached End-of-Life. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16.',
|
||||
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 10.x is deprecated. Deployments created on or after 2021-04-20 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set "engines": { "node": "16.x" } in your `package.json` file to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
'Error: Node.js version 12.x is deprecated. Deployments created on or after 2022-08-09 will fail to build. Please set Node.js Version to 16.x in your Project Settings to use Node.js 16. This change is the result of a decision made by an upstream infrastructure provider (AWS).',
|
||||
]);
|
||||
|
||||
global.Date.now = realDateNow;
|
||||
@@ -495,43 +494,28 @@ it('should only invoke `runNpmInstall()` once per `package.json` file (serial)',
|
||||
const meta: Meta = {};
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const apiDir = path.join(fixture, 'api');
|
||||
const retryOpts = { maxRetryTime: 1000 };
|
||||
let run1, run2, run3;
|
||||
await retry(async () => {
|
||||
run1 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run1).toEqual(true);
|
||||
expect(
|
||||
(meta.runNpmInstallSet as Set<string>).has(
|
||||
path.join(fixture, 'package.json')
|
||||
)
|
||||
).toEqual(true);
|
||||
}, retryOpts);
|
||||
await retry(async () => {
|
||||
run2 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run2).toEqual(false);
|
||||
}, retryOpts);
|
||||
await retry(async () => {
|
||||
run3 = await runNpmInstall(fixture, [], undefined, meta);
|
||||
expect(run3).toEqual(false);
|
||||
}, retryOpts);
|
||||
const run1 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run1).toEqual(true);
|
||||
expect(
|
||||
(meta.runNpmInstallSet as Set<string>).has(
|
||||
path.join(fixture, 'package.json')
|
||||
)
|
||||
).toEqual(true);
|
||||
const run2 = await runNpmInstall(apiDir, [], undefined, meta);
|
||||
expect(run2).toEqual(false);
|
||||
const run3 = await runNpmInstall(fixture, [], undefined, meta);
|
||||
expect(run3).toEqual(false);
|
||||
});
|
||||
|
||||
it('should only invoke `runNpmInstall()` once per `package.json` file (parallel)', async () => {
|
||||
const meta: Meta = {};
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const apiDir = path.join(fixture, 'api');
|
||||
let results: [boolean, boolean, boolean] | undefined;
|
||||
await retry(
|
||||
async () => {
|
||||
results = await Promise.all([
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(fixture, [], undefined, meta),
|
||||
]);
|
||||
},
|
||||
{ maxRetryTime: 3000 }
|
||||
);
|
||||
const [run1, run2, run3] = results || [];
|
||||
const [run1, run2, run3] = await Promise.all([
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(apiDir, [], undefined, meta),
|
||||
runNpmInstall(fixture, [], undefined, meta),
|
||||
]);
|
||||
expect(run1).toEqual(true);
|
||||
expect(run2).toEqual(false);
|
||||
expect(run3).toEqual(false);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "27.1.5",
|
||||
"version": "27.0.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -42,16 +42,17 @@
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/go": "2.0.7",
|
||||
"@vercel/hydrogen": "0.0.4",
|
||||
"@vercel/next": "3.1.7",
|
||||
"@vercel/node": "2.4.4",
|
||||
"@vercel/python": "3.0.7",
|
||||
"@vercel/redwood": "1.0.8",
|
||||
"@vercel/remix": "1.0.9",
|
||||
"@vercel/ruby": "1.3.15",
|
||||
"@vercel/static-build": "1.0.8",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/go": "2.0.5",
|
||||
"@vercel/hydrogen": "0.0.2",
|
||||
"@vercel/next": "3.1.4",
|
||||
"@vercel/node": "2.4.1",
|
||||
"@vercel/python": "3.0.5",
|
||||
"@vercel/redwood": "1.0.6",
|
||||
"@vercel/remix": "1.0.6",
|
||||
"@vercel/ruby": "1.3.13",
|
||||
"@vercel/static-build": "1.0.5",
|
||||
"open": "8.4.0",
|
||||
"update-notifier": "5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -96,9 +97,9 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "12.1.2",
|
||||
"@vercel/frameworks": "1.1.1",
|
||||
"@vercel/fs-detectors": "2.0.1",
|
||||
"@vercel/client": "12.1.0",
|
||||
"@vercel/frameworks": "1.1.0",
|
||||
"@vercel/fs-detectors": "1.0.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
|
||||
@@ -15,7 +15,7 @@ import getArgs from '../../util/get-args';
|
||||
import Client from '../../util/client';
|
||||
import { getPkgName } from '../../util/pkg-name';
|
||||
import { Deployment, PaginationOptions } from '../../types';
|
||||
import { normalizeURL } from '../../util/bisect/normalize-url';
|
||||
import { normalizeURL } from '../../util/normalize-url';
|
||||
|
||||
interface DeploymentV6
|
||||
extends Pick<
|
||||
|
||||
@@ -48,17 +48,9 @@ import {
|
||||
} from '../util/build/write-build-result';
|
||||
import { importBuilders, BuilderWithPkg } from '../util/build/import-builders';
|
||||
import { initCorepack, cleanupCorepack } from '../util/build/corepack';
|
||||
import { sortBuilders } from '../util/build/sort-builders';
|
||||
|
||||
type BuildResult = BuildResultV2 | BuildResultV3;
|
||||
|
||||
interface SerializedBuilder extends Builder {
|
||||
error?: Error;
|
||||
require?: string;
|
||||
requirePath?: string;
|
||||
apiVersion: number;
|
||||
}
|
||||
|
||||
const help = () => {
|
||||
return console.log(`
|
||||
${chalk.bold(`${cli.logo} ${cli.name} build`)}
|
||||
@@ -206,7 +198,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
normalizePath(relative(workPath, f))
|
||||
);
|
||||
|
||||
const routesResult = getTransformedRoutes(vercelConfig || {});
|
||||
const routesResult = getTransformedRoutes({ nowConfig: vercelConfig || {} });
|
||||
if (routesResult.error) {
|
||||
output.prettyError(routesResult.error);
|
||||
return 1;
|
||||
@@ -304,36 +296,32 @@ export default async function main(client: Client): Promise<number> {
|
||||
const ops: Promise<Error | void>[] = [];
|
||||
|
||||
// Write the `detectedBuilders` result to output dir
|
||||
const buildsJsonBuilds = new Map<Builder, SerializedBuilder>(
|
||||
builds.map(build => {
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
ops.push(
|
||||
fs.writeJSON(
|
||||
join(outputDir, 'builds.json'),
|
||||
{
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
argv: process.argv,
|
||||
builds: builds.map(build => {
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
return {
|
||||
require: builderPkg.name,
|
||||
requirePath: builderWithPkg.path,
|
||||
apiVersion: builder.version,
|
||||
...build,
|
||||
};
|
||||
}),
|
||||
},
|
||||
{
|
||||
spaces: 2,
|
||||
}
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
return [
|
||||
build,
|
||||
{
|
||||
require: builderPkg.name,
|
||||
requirePath: builderWithPkg.path,
|
||||
apiVersion: builder.version,
|
||||
...build,
|
||||
},
|
||||
];
|
||||
})
|
||||
)
|
||||
);
|
||||
const buildsJson = {
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
argv: process.argv,
|
||||
builds: Array.from(buildsJsonBuilds.values()),
|
||||
};
|
||||
const buildsJsonPath = join(outputDir, 'builds.json');
|
||||
const writeBuildsJsonPromise = fs.writeJSON(buildsJsonPath, buildsJson, {
|
||||
spaces: 2,
|
||||
});
|
||||
|
||||
ops.push(writeBuildsJsonPromise);
|
||||
|
||||
// The `meta` config property is re-used for each Builder
|
||||
// invocation so that Builders can share state between
|
||||
@@ -344,95 +332,65 @@ export default async function main(client: Client): Promise<number> {
|
||||
};
|
||||
|
||||
// Execute Builders for detected entrypoints
|
||||
// TODO: parallelize builds (except for frontend)
|
||||
const sortedBuilders = sortBuilders(builds);
|
||||
// TODO: parallelize builds
|
||||
const buildResults: Map<Builder, BuildResult> = new Map();
|
||||
const overrides: PathOverride[] = [];
|
||||
const repoRootPath = cwd;
|
||||
const corepackShimDir = await initCorepack({ repoRootPath });
|
||||
const rootPackageJsonPath = repoRootPath || workPath;
|
||||
const corepackShimDir = await initCorepack({ cwd, rootPackageJsonPath });
|
||||
|
||||
for (const build of sortedBuilders) {
|
||||
for (const build of builds) {
|
||||
if (typeof build.src !== 'string') continue;
|
||||
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
throw new Error(`Failed to load Builder "${build.use}"`);
|
||||
}
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
|
||||
try {
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
const buildConfig: Config = {
|
||||
outputDirectory: project.settings.outputDirectory ?? undefined,
|
||||
...build.config,
|
||||
projectSettings: project.settings,
|
||||
installCommand: project.settings.installCommand ?? undefined,
|
||||
devCommand: project.settings.devCommand ?? undefined,
|
||||
buildCommand: project.settings.buildCommand ?? undefined,
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config: buildConfig,
|
||||
meta,
|
||||
};
|
||||
output.debug(
|
||||
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
|
||||
);
|
||||
const buildResult = await builder.build(buildOptions);
|
||||
|
||||
const buildConfig: Config = {
|
||||
outputDirectory: project.settings.outputDirectory ?? undefined,
|
||||
...build.config,
|
||||
projectSettings: project.settings,
|
||||
installCommand: project.settings.installCommand ?? undefined,
|
||||
devCommand: project.settings.devCommand ?? undefined,
|
||||
buildCommand: project.settings.buildCommand ?? undefined,
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config: buildConfig,
|
||||
meta,
|
||||
};
|
||||
output.debug(
|
||||
`Building entrypoint "${build.src}" with "${builderPkg.name}"`
|
||||
);
|
||||
const buildResult = await builder.build(buildOptions);
|
||||
// Store the build result to generate the final `config.json` after
|
||||
// all builds have completed
|
||||
buildResults.set(build, buildResult);
|
||||
|
||||
// Store the build result to generate the final `config.json` after
|
||||
// all builds have completed
|
||||
buildResults.set(build, buildResult);
|
||||
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
builderPkg,
|
||||
vercelConfig?.cleanUrls
|
||||
).then(
|
||||
override => {
|
||||
if (override) overrides.push(override);
|
||||
},
|
||||
err => err
|
||||
)
|
||||
);
|
||||
} catch (err: any) {
|
||||
const configJson = {
|
||||
version: 3,
|
||||
};
|
||||
const configJsonPromise = fs.writeJSON(
|
||||
join(outputDir, 'config.json'),
|
||||
configJson,
|
||||
{ spaces: 2 }
|
||||
);
|
||||
|
||||
await Promise.all([writeBuildsJsonPromise, configJsonPromise]);
|
||||
|
||||
const buildJsonBuild = buildsJsonBuilds.get(build);
|
||||
if (buildJsonBuild) {
|
||||
buildJsonBuild.error = {
|
||||
name: err.name,
|
||||
message: err.message,
|
||||
stack: err.stack,
|
||||
...err,
|
||||
};
|
||||
|
||||
await fs.writeJSON(buildsJsonPath, buildsJson, {
|
||||
spaces: 2,
|
||||
});
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
builderPkg,
|
||||
vercelConfig?.cleanUrls
|
||||
).then(
|
||||
override => {
|
||||
if (override) overrides.push(override);
|
||||
},
|
||||
err => err
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
if (corepackShimDir) {
|
||||
|
||||
@@ -15,13 +15,13 @@ export const help = () => `
|
||||
)}
|
||||
dev Start a local development server
|
||||
env Manages the Environment Variables for your current Project
|
||||
git Manage Git provider repository for your current Project
|
||||
init [example] Initialize an example project
|
||||
ls | list [app] Lists deployments
|
||||
inspect [id] Displays information related to a deployment
|
||||
link [path] Link local directory to a Vercel Project
|
||||
login [email] Logs into your account or creates a new one
|
||||
logout Logs out of your account
|
||||
open [options] Opens a Project URL in your browser
|
||||
pull [path] Pull your Project Settings from the cloud
|
||||
switch [scope] Switches between teams and your personal account
|
||||
help [cmd] Displays complete help for [cmd]
|
||||
|
||||
@@ -64,7 +64,7 @@ import { help } from './args';
|
||||
import { getDeploymentChecks } from '../../util/deploy/get-deployment-checks';
|
||||
import parseTarget from '../../util/deploy/parse-target';
|
||||
import getPrebuiltJson from '../../util/deploy/get-prebuilt-json';
|
||||
import { createGitMeta } from '../../util/create-git-meta';
|
||||
import { createGitMeta } from '../../util/deploy/create-git-meta';
|
||||
|
||||
export default async (client: Client) => {
|
||||
const { output } = client;
|
||||
@@ -95,7 +95,6 @@ export default async (client: Client) => {
|
||||
// deprecated
|
||||
'--name': String,
|
||||
'-n': '--name',
|
||||
'--no-clipboard': Boolean,
|
||||
'--target': String,
|
||||
});
|
||||
} catch (error) {
|
||||
@@ -184,17 +183,6 @@ export default async (client: Client) => {
|
||||
);
|
||||
}
|
||||
|
||||
if (argv['--no-clipboard']) {
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
`The ${param(
|
||||
'--no-clipboard'
|
||||
)} option was ignored because it is the default behavior. Please remove it.`,
|
||||
emoji('warning')
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// build `target`
|
||||
const target = parseTarget(output, argv['--target'], argv['--prod']);
|
||||
if (typeof target === 'number') {
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
import chalk from 'chalk';
|
||||
import { join } from 'path';
|
||||
import { Org, Project } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import { parseGitConfig, pluckRemoteUrl } from '../../util/create-git-meta';
|
||||
import confirm from '../../util/input/confirm';
|
||||
import { Output } from '../../util/output';
|
||||
import link from '../../util/output/link';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
import {
|
||||
connectGitProvider,
|
||||
disconnectGitProvider,
|
||||
formatProvider,
|
||||
parseRepoUrl,
|
||||
} from '../../util/projects/connect-git-provider';
|
||||
import validatePaths from '../../util/validate-paths';
|
||||
|
||||
export default async function connect(
|
||||
client: Client,
|
||||
argv: any,
|
||||
args: string[],
|
||||
project: Project | undefined,
|
||||
org: Org | undefined
|
||||
) {
|
||||
const { output } = client;
|
||||
const confirm = Boolean(argv['--confirm']);
|
||||
|
||||
if (args.length !== 0) {
|
||||
output.error(
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan(
|
||||
`${getCommandName('project connect')}`
|
||||
)}`
|
||||
);
|
||||
return 2;
|
||||
}
|
||||
if (!project || !org) {
|
||||
output.error(
|
||||
`Can't find \`org\` or \`project\`. Make sure your current directory is linked to a Vercel projet by running ${getCommandName(
|
||||
'link'
|
||||
)}.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
let paths = [process.cwd()];
|
||||
|
||||
const validate = await validatePaths(client, paths);
|
||||
if (!validate.valid) {
|
||||
return validate.exitCode;
|
||||
}
|
||||
const { path } = validate;
|
||||
|
||||
const gitProviderLink = project.link;
|
||||
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
|
||||
|
||||
// get project from .git
|
||||
const gitConfigPath = join(path, '.git/config');
|
||||
const gitConfig = await parseGitConfig(gitConfigPath, output);
|
||||
if (!gitConfig) {
|
||||
output.error(
|
||||
`No local git repo found. Run ${chalk.cyan(
|
||||
'`git clone <url>`'
|
||||
)} to clone a remote Git repository first.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
const remoteUrl = pluckRemoteUrl(gitConfig);
|
||||
if (!remoteUrl) {
|
||||
output.error(
|
||||
`No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run ${chalk.cyan(
|
||||
'`git remote --help`'
|
||||
)} for more details.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
output.log(`Identified Git remote "origin": ${link(remoteUrl)}`);
|
||||
const parsedUrl = parseRepoUrl(remoteUrl);
|
||||
if (!parsedUrl) {
|
||||
output.error(
|
||||
`Failed to parse Git repo data from the following remote URL in your Git config: ${link(
|
||||
remoteUrl
|
||||
)}`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
const { provider, org: gitOrg, repo } = parsedUrl;
|
||||
const repoPath = `${gitOrg}/${repo}`;
|
||||
let connectedRepoPath;
|
||||
|
||||
if (!gitProviderLink) {
|
||||
const connect = await connectGitProvider(
|
||||
client,
|
||||
org,
|
||||
project.id,
|
||||
provider,
|
||||
repoPath
|
||||
);
|
||||
if (typeof connect === 'number') {
|
||||
return connect;
|
||||
}
|
||||
} else {
|
||||
const connectedProvider = gitProviderLink.type;
|
||||
const connectedOrg = gitProviderLink.org;
|
||||
const connectedRepo = gitProviderLink.repo;
|
||||
connectedRepoPath = `${connectedOrg}/${connectedRepo}`;
|
||||
|
||||
const isSameRepo =
|
||||
connectedProvider === provider &&
|
||||
connectedOrg === gitOrg &&
|
||||
connectedRepo === repo;
|
||||
if (isSameRepo) {
|
||||
output.log(
|
||||
`${chalk.cyan(connectedRepoPath)} is already connected to your project.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const shouldReplaceRepo = await confirmRepoConnect(
|
||||
client,
|
||||
output,
|
||||
confirm,
|
||||
connectedRepoPath
|
||||
);
|
||||
if (!shouldReplaceRepo) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
await disconnectGitProvider(client, org, project.id);
|
||||
const connect = await connectGitProvider(
|
||||
client,
|
||||
org,
|
||||
project.id,
|
||||
provider,
|
||||
repoPath
|
||||
);
|
||||
if (typeof connect === 'number') {
|
||||
return connect;
|
||||
}
|
||||
}
|
||||
|
||||
output.log(
|
||||
`Connected ${formatProvider(provider)} repository ${chalk.cyan(repoPath)}!`
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
async function confirmRepoConnect(
|
||||
client: Client,
|
||||
output: Output,
|
||||
yes: boolean,
|
||||
connectedRepoPath: string
|
||||
) {
|
||||
let shouldReplaceProject = yes;
|
||||
if (!shouldReplaceProject) {
|
||||
shouldReplaceProject = await confirm(
|
||||
client,
|
||||
`Looks like you already have a repository connected: ${chalk.cyan(
|
||||
connectedRepoPath
|
||||
)}. Do you want to replace it?`,
|
||||
true
|
||||
);
|
||||
if (!shouldReplaceProject) {
|
||||
output.log(`Aborted. Repo not connected.`);
|
||||
}
|
||||
}
|
||||
return shouldReplaceProject;
|
||||
}
|
||||
@@ -1,58 +0,0 @@
|
||||
import chalk from 'chalk';
|
||||
import { Org, Project } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import confirm from '../../util/input/confirm';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
import { disconnectGitProvider } from '../../util/projects/connect-git-provider';
|
||||
|
||||
export default async function disconnect(
|
||||
client: Client,
|
||||
args: string[],
|
||||
project: Project | undefined,
|
||||
org: Org | undefined
|
||||
) {
|
||||
const { output } = client;
|
||||
|
||||
if (args.length !== 0) {
|
||||
output.error(
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan(
|
||||
`${getCommandName('project disconnect')}`
|
||||
)}`
|
||||
);
|
||||
return 2;
|
||||
}
|
||||
if (!project || !org) {
|
||||
output.error('An unexpected error occurred.');
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (project.link) {
|
||||
const { org: linkOrg, repo } = project.link;
|
||||
output.print(
|
||||
`Your Vercel project will no longer create deployments when you push to this repository.\n`
|
||||
);
|
||||
const confirmDisconnect = await confirm(
|
||||
client,
|
||||
`Are you sure you want to disconnect ${chalk.cyan(
|
||||
`${linkOrg}/${repo}`
|
||||
)} from your project?`,
|
||||
false
|
||||
);
|
||||
|
||||
if (confirmDisconnect) {
|
||||
await disconnectGitProvider(client, org, project.id);
|
||||
output.log(`Disconnected ${chalk.cyan(`${linkOrg}/${repo}`)}.`);
|
||||
} else {
|
||||
output.log('Aborted.');
|
||||
}
|
||||
} else {
|
||||
output.error(
|
||||
`No Git repository connected. Run ${getCommandName(
|
||||
'project connect'
|
||||
)} to connect one.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
@@ -1,94 +0,0 @@
|
||||
import chalk from 'chalk';
|
||||
import Client from '../../util/client';
|
||||
import { ensureLink } from '../../util/ensure-link';
|
||||
import getArgs from '../../util/get-args';
|
||||
import getInvalidSubcommand from '../../util/get-invalid-subcommand';
|
||||
import handleError from '../../util/handle-error';
|
||||
import logo from '../../util/output/logo';
|
||||
import { getPkgName } from '../../util/pkg-name';
|
||||
import validatePaths from '../../util/validate-paths';
|
||||
import connect from './connect';
|
||||
import disconnect from './disconnect';
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
${chalk.bold(`${logo} ${getPkgName()} git`)} <command>
|
||||
|
||||
${chalk.dim('Commands:')}
|
||||
|
||||
connect Connect your Git config "origin" remote as a Git provider to your project
|
||||
disconnect Disconnect the Git provider repository from your project
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
-t ${chalk.bold.underline('TOKEN')}, --token=${chalk.bold.underline(
|
||||
'TOKEN'
|
||||
)} Login token
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} Connect a Git provider repository
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} git connect`)}
|
||||
|
||||
${chalk.gray('–')} Disconnect the Git provider repository
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} git disconnect`)}
|
||||
`);
|
||||
};
|
||||
|
||||
const COMMAND_CONFIG = {
|
||||
connect: ['connect'],
|
||||
disconnect: ['disconnect'],
|
||||
};
|
||||
|
||||
export default async function main(client: Client) {
|
||||
let argv: any;
|
||||
let subcommand: string | string[];
|
||||
|
||||
try {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--confirm': Boolean,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (argv['--help']) {
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
subcommand = argv._[0];
|
||||
const args = argv._.slice(1);
|
||||
const confirm = Boolean(argv['--confirm']);
|
||||
const { output } = client;
|
||||
|
||||
let paths = [process.cwd()];
|
||||
const pathValidation = await validatePaths(client, paths);
|
||||
if (!pathValidation.valid) {
|
||||
return pathValidation.exitCode;
|
||||
}
|
||||
const { path } = pathValidation;
|
||||
|
||||
const linkedProject = await ensureLink('git', client, path, confirm);
|
||||
if (typeof linkedProject === 'number') {
|
||||
return linkedProject;
|
||||
}
|
||||
|
||||
const { org, project } = linkedProject;
|
||||
|
||||
switch (subcommand) {
|
||||
case 'connect':
|
||||
return await connect(client, argv, args, project, org);
|
||||
case 'disconnect':
|
||||
return await disconnect(client, args, project, org);
|
||||
default:
|
||||
output.error(getInvalidSubcommand(COMMAND_CONFIG));
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,6 @@ export default new Map([
|
||||
['domain', 'domains'],
|
||||
['domains', 'domains'],
|
||||
['env', 'env'],
|
||||
['git', 'git'],
|
||||
['help', 'help'],
|
||||
['init', 'init'],
|
||||
['inspect', 'inspect'],
|
||||
@@ -26,8 +25,9 @@ export default new Map([
|
||||
['logout', 'logout'],
|
||||
['logs', 'logs'],
|
||||
['ls', 'list'],
|
||||
['project', 'project'],
|
||||
['projects', 'project'],
|
||||
['open', 'open'],
|
||||
['project', 'projects'],
|
||||
['projects', 'projects'],
|
||||
['pull', 'pull'],
|
||||
['remove', 'remove'],
|
||||
['rm', 'remove'],
|
||||
|
||||
319
packages/cli/src/commands/open.ts
Normal file
319
packages/cli/src/commands/open.ts
Normal file
@@ -0,0 +1,319 @@
|
||||
import chalk from 'chalk';
|
||||
import Client from '../util/client';
|
||||
import getArgs from '../util/get-args';
|
||||
import getScope from '../util/get-scope';
|
||||
import handleError from '../util/handle-error';
|
||||
import logo from '../util/output/logo';
|
||||
import { getCommandName, getPkgName } from '../util/pkg-name';
|
||||
import validatePaths from '../util/validate-paths';
|
||||
import { ensureLink } from '../util/ensure-link';
|
||||
import list from '../util/input/list';
|
||||
import { Org, Project, Team } from '../types';
|
||||
import { stringify } from 'querystring';
|
||||
import openUrl from 'open';
|
||||
import link from '../util/output/link';
|
||||
import { getDeployment } from '../util/get-deployment';
|
||||
import { normalizeURL } from '../util/normalize-url';
|
||||
import { emoji, prependEmoji } from '../util/emoji';
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
${chalk.bold(`${logo} ${getPkgName()} open`)} [options]
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
--confirm Skip confirmation prompts
|
||||
--prod Filter for production deployments
|
||||
dash Open the dashboard in a browser
|
||||
latest Open the latest preview deployment URL in a browser
|
||||
[url] Open the specified deployment URL in a browser
|
||||
-A ${chalk.bold.underline('FILE')}, --local-config=${chalk.bold.underline(
|
||||
'FILE'
|
||||
)} Path to the local ${'`vercel.json`'} file
|
||||
-Q ${chalk.bold.underline('DIR')}, --global-config=${chalk.bold.underline(
|
||||
'DIR'
|
||||
)} Path to the global ${'`.vercel`'} directory
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} View all options
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} open`)}
|
||||
|
||||
${chalk.gray('–')} Open the dashboard for the current project
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} open dash`)}
|
||||
|
||||
${chalk.gray('–')} Open the latest preview deployment URL
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} open latest`)}
|
||||
|
||||
${chalk.gray('–')} Open the latest production deployment URL
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} open latest --prod`)}
|
||||
|
||||
${chalk.gray('–')} Open the dashboard for the latest preview deployment
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} open dash latest`)}
|
||||
|
||||
${chalk.gray('–')} Open the dashboard for the latest production deployment
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} open dash latest --prod`)}
|
||||
|
||||
${chalk.gray('–')} Open the dashboard for a specific deployment URL
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} open dash [url]`)}
|
||||
`);
|
||||
};
|
||||
|
||||
export default async function open(
|
||||
client: Client,
|
||||
test: Boolean
|
||||
): Promise<number> {
|
||||
const { output } = client;
|
||||
let argv;
|
||||
let subcommand: string | string[];
|
||||
let narrow: string | string[];
|
||||
|
||||
try {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--confirm': Boolean,
|
||||
'--prod': Boolean,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
return 1;
|
||||
}
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
subcommand = argv._[0];
|
||||
narrow = argv._[1];
|
||||
|
||||
if (argv['--help']) {
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
|
||||
const confirm = argv['--confirm'] || false;
|
||||
const prod = argv['--prod'] || false;
|
||||
|
||||
let scope = null;
|
||||
|
||||
try {
|
||||
scope = await getScope(client);
|
||||
} catch (err) {
|
||||
if (err.code === 'NOT_AUTHORIZED' || err.code === 'TEAM_DELETED') {
|
||||
output.error(err.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
|
||||
const { team, contextName } = scope;
|
||||
|
||||
let paths = [process.cwd()];
|
||||
|
||||
const validate = await validatePaths(client, paths);
|
||||
if (!validate.valid) {
|
||||
return validate.exitCode;
|
||||
}
|
||||
const { path } = validate;
|
||||
|
||||
const linkedProject = await ensureLink('open', client, path, confirm);
|
||||
if (typeof linkedProject === 'number') {
|
||||
return linkedProject;
|
||||
}
|
||||
|
||||
const { project, org } = linkedProject;
|
||||
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
|
||||
|
||||
const choice = await getChoice(
|
||||
subcommand,
|
||||
narrow,
|
||||
contextName,
|
||||
client,
|
||||
project,
|
||||
org,
|
||||
team,
|
||||
prod
|
||||
);
|
||||
if (typeof choice === 'number') {
|
||||
return choice;
|
||||
}
|
||||
|
||||
if (choice === 'not_found') {
|
||||
output.log(
|
||||
`No deployments found. Run ${chalk.cyan(
|
||||
getCommandName('deploy')
|
||||
)} to create a deployment.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
if (choice === '') {
|
||||
// User aborted
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (!test) openUrl(choice);
|
||||
output.log(`🪄 Opened ${link(choice)}`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
async function getChoice(
|
||||
subcommand: string,
|
||||
narrow: string,
|
||||
contextName: string,
|
||||
client: Client,
|
||||
project: Project,
|
||||
org: Org,
|
||||
team: Team | null,
|
||||
prod: Boolean
|
||||
): Promise<string | number> {
|
||||
if (subcommand === 'dash' || subcommand === 'dashboard') {
|
||||
if (narrow === 'latest') {
|
||||
return await getInspectorUrl(client, project, org, team, prod);
|
||||
} else if (narrow) {
|
||||
// Assume they're trying to pass in a deployment URL
|
||||
const deployment = await verifyDeployment(client, narrow, contextName);
|
||||
if (typeof deployment === 'number') {
|
||||
return deployment;
|
||||
}
|
||||
|
||||
return deployment.inspectorUrl;
|
||||
} else {
|
||||
return getDashboardUrl(org, project);
|
||||
}
|
||||
} else if (subcommand === 'latest') {
|
||||
return await getLatestDeploymentUrl(client, project, team, prod);
|
||||
} else {
|
||||
if (subcommand) {
|
||||
client.output.print(
|
||||
prependEmoji('Unknown subcommand.\n', emoji('warning'))
|
||||
);
|
||||
}
|
||||
return await listOptions(client, project, org, team);
|
||||
}
|
||||
}
|
||||
|
||||
async function listOptions(
|
||||
client: Client,
|
||||
project: Project,
|
||||
org: Org,
|
||||
team: Team | null
|
||||
): Promise<string> {
|
||||
return await list(client, {
|
||||
message: 'What do you want to open?',
|
||||
choices: [
|
||||
{
|
||||
name: `Dashboard ${chalk.gray('(vc open dash)')}`,
|
||||
value: getDashboardUrl(org, project),
|
||||
short: 'Dashboard',
|
||||
},
|
||||
{
|
||||
name: `Latest Preview Deployment ${chalk.gray('(vc open latest)')}`,
|
||||
value: await getLatestDeploymentUrl(client, project, team),
|
||||
short: 'Latest Preview Deployment',
|
||||
},
|
||||
{
|
||||
name: `Inspect Latest Preview Deployment ${chalk.gray(
|
||||
'(vc open dash latest)'
|
||||
)}`,
|
||||
value: await getInspectorUrl(client, project, org, team),
|
||||
short: 'Deployment Inspector',
|
||||
},
|
||||
{
|
||||
name: `Latest Production Deployment ${chalk.gray(
|
||||
'(vc open latest --prod)'
|
||||
)}`,
|
||||
value: await getLatestDeploymentUrl(client, project, team, true),
|
||||
short: 'Latest Production Deployment',
|
||||
},
|
||||
{
|
||||
name: `Inspect Latest Production Deployment ${chalk.gray(
|
||||
'(vc open dash latest --prod)'
|
||||
)}`,
|
||||
value: await getInspectorUrl(client, project, org, team, true),
|
||||
short: 'Latest Production Deployment Inspector',
|
||||
},
|
||||
],
|
||||
});
|
||||
}
|
||||
|
||||
async function verifyDeployment(
|
||||
client: Client,
|
||||
url: string,
|
||||
contextName: string
|
||||
) {
|
||||
try {
|
||||
const deployment = await getDeployment(client, url);
|
||||
return {
|
||||
url: normalizeURL(deployment.url),
|
||||
inspectorUrl: deployment.inspectorUrl || '',
|
||||
};
|
||||
} catch (err) {
|
||||
if (err.status === 404) {
|
||||
client.output.error(
|
||||
`Could not find a deployment with URL ${link(url)} in ${contextName}.`
|
||||
);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
function getDashboardUrl(org: Org, project: Project): string {
|
||||
return `https://vercel.com/${org.slug}/${project.name}`;
|
||||
}
|
||||
async function getInspectorUrl(
|
||||
client: Client,
|
||||
project: Project,
|
||||
org: Org,
|
||||
team: Team | null,
|
||||
prod: Boolean = false
|
||||
): Promise<string> {
|
||||
const proj = await getProject(client, project, team);
|
||||
if (proj) {
|
||||
let latestDeploymentId = (
|
||||
prod ? proj?.targets?.production?.id : proj.latestDeployments?.[0]?.id
|
||||
)?.replace('dpl_', '');
|
||||
if (latestDeploymentId) {
|
||||
return `https://vercel.com/${org.slug}/${project.name}/${latestDeploymentId}`;
|
||||
}
|
||||
}
|
||||
|
||||
return 'not_found';
|
||||
}
|
||||
async function getLatestDeploymentUrl(
|
||||
client: Client,
|
||||
project: Project,
|
||||
team: Team | null,
|
||||
prod: Boolean = false
|
||||
): Promise<string> {
|
||||
const proj = await getProject(client, project, team);
|
||||
if (prod && proj?.targets?.production) {
|
||||
return `https://${proj.targets.production.url}`;
|
||||
} else if (proj?.latestDeployments?.[0]?.url) {
|
||||
return `https://${proj.latestDeployments[0].url}`;
|
||||
}
|
||||
|
||||
return 'not_found';
|
||||
}
|
||||
|
||||
async function getProject(
|
||||
client: Client,
|
||||
project: Project,
|
||||
team: Team | null
|
||||
): Promise<Project> {
|
||||
const proj = await client
|
||||
.fetch(
|
||||
`/v9/projects/${project.name}?${stringify({
|
||||
teamId: team?.id,
|
||||
})}`
|
||||
)
|
||||
.catch(err => {
|
||||
client.output.error(err.message);
|
||||
return;
|
||||
});
|
||||
return proj as Project;
|
||||
}
|
||||
@@ -6,6 +6,7 @@ import getScope from '../../util/get-scope';
|
||||
import handleError from '../../util/handle-error';
|
||||
import logo from '../../util/output/logo';
|
||||
import { getPkgName } from '../../util/pkg-name';
|
||||
import validatePaths from '../../util/validate-paths';
|
||||
import add from './add';
|
||||
import list from './list';
|
||||
import rm from './rm';
|
||||
@@ -47,6 +48,7 @@ const COMMAND_CONFIG = {
|
||||
ls: ['ls', 'list'],
|
||||
add: ['add'],
|
||||
rm: ['rm', 'remove'],
|
||||
connect: ['connect'],
|
||||
};
|
||||
|
||||
export default async function main(client: Client) {
|
||||
@@ -57,6 +59,7 @@ export default async function main(client: Client) {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--next': Number,
|
||||
'-N': '--next',
|
||||
'--yes': Boolean,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
@@ -73,6 +76,12 @@ export default async function main(client: Client) {
|
||||
const args = argv._.slice(1);
|
||||
const { output } = client;
|
||||
|
||||
let paths = [process.cwd()];
|
||||
const pathValidation = await validatePaths(client, paths);
|
||||
if (!pathValidation.valid) {
|
||||
return pathValidation.exitCode;
|
||||
}
|
||||
|
||||
let contextName = '';
|
||||
|
||||
try {
|
||||
|
||||
@@ -173,7 +173,7 @@ const main = async () => {
|
||||
const targetOrSubcommand = argv._[2];
|
||||
|
||||
// Currently no beta commands - add here as needed
|
||||
const betaCommands: string[] = [];
|
||||
const betaCommands: string[] = [''];
|
||||
if (betaCommands.includes(targetOrSubcommand)) {
|
||||
console.log(
|
||||
`${chalk.grey(
|
||||
@@ -632,9 +632,6 @@ const main = async () => {
|
||||
case 'env':
|
||||
func = require('./commands/env').default;
|
||||
break;
|
||||
case 'git':
|
||||
func = require('./commands/git').default;
|
||||
break;
|
||||
case 'init':
|
||||
func = require('./commands/init').default;
|
||||
break;
|
||||
@@ -656,7 +653,10 @@ const main = async () => {
|
||||
case 'logout':
|
||||
func = require('./commands/logout').default;
|
||||
break;
|
||||
case 'project':
|
||||
case 'open':
|
||||
func = require('./commands/open').default;
|
||||
break;
|
||||
case 'projects':
|
||||
func = require('./commands/project').default;
|
||||
break;
|
||||
case 'pull':
|
||||
|
||||
@@ -117,6 +117,7 @@ export type Cert = {
|
||||
|
||||
export type Deployment = {
|
||||
uid: string;
|
||||
id?: string;
|
||||
url: string;
|
||||
name: string;
|
||||
type: 'LAMBDAS';
|
||||
@@ -248,36 +249,17 @@ export interface ProjectEnvVariable {
|
||||
gitBranch?: string;
|
||||
}
|
||||
|
||||
export interface DeployHook {
|
||||
createdAt: number;
|
||||
id: string;
|
||||
name: string;
|
||||
ref: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ProjectLinkData {
|
||||
type: string;
|
||||
repo: string;
|
||||
repoId: number;
|
||||
org?: string;
|
||||
gitCredentialId: string;
|
||||
productionBranch?: string | null;
|
||||
sourceless: boolean;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
deployHooks?: DeployHook[];
|
||||
}
|
||||
|
||||
export interface Project extends ProjectSettings {
|
||||
id: string;
|
||||
name: string;
|
||||
accountId: string;
|
||||
updatedAt: number;
|
||||
createdAt: number;
|
||||
link?: ProjectLinkData;
|
||||
alias?: ProjectAliasTarget[];
|
||||
latestDeployments?: Partial<Deployment>[];
|
||||
targets?: {
|
||||
production?: Partial<Deployment>;
|
||||
};
|
||||
}
|
||||
|
||||
export interface Org {
|
||||
|
||||
@@ -6,9 +6,11 @@ import { VERCEL_DIR } from '../projects/link';
|
||||
import readJSONFile from '../read-json-file';
|
||||
|
||||
export async function initCorepack({
|
||||
repoRootPath,
|
||||
cwd,
|
||||
rootPackageJsonPath,
|
||||
}: {
|
||||
repoRootPath: string;
|
||||
cwd: string;
|
||||
rootPackageJsonPath: string;
|
||||
}): Promise<string | null> {
|
||||
if (process.env.ENABLE_EXPERIMENTAL_COREPACK !== '1') {
|
||||
// Since corepack is experimental, we need to exit early
|
||||
@@ -16,7 +18,7 @@ export async function initCorepack({
|
||||
return null;
|
||||
}
|
||||
const pkg = await readJSONFile<PackageJson>(
|
||||
join(repoRootPath, 'package.json')
|
||||
join(rootPackageJsonPath, 'package.json')
|
||||
);
|
||||
if (pkg instanceof CantParseJSONFile) {
|
||||
console.warn(
|
||||
@@ -30,13 +32,16 @@ export async function initCorepack({
|
||||
console.log(
|
||||
`Detected ENABLE_EXPERIMENTAL_COREPACK=1 and "${pkg.packageManager}" in package.json`
|
||||
);
|
||||
const corepackRootDir = join(repoRootPath, VERCEL_DIR, 'cache', 'corepack');
|
||||
const corepackRootDir = join(cwd, VERCEL_DIR, 'cache', 'corepack');
|
||||
const corepackHomeDir = join(corepackRootDir, 'home');
|
||||
const corepackShimDir = join(corepackRootDir, 'shim');
|
||||
await fs.mkdirp(corepackHomeDir);
|
||||
await fs.mkdirp(corepackShimDir);
|
||||
process.env.COREPACK_HOME = corepackHomeDir;
|
||||
process.env.PATH = `${corepackShimDir}${delimiter}${process.env.PATH}`;
|
||||
process.env.DEBUG = process.env.DEBUG
|
||||
? `corepack,${process.env.DEBUG}`
|
||||
: 'corepack';
|
||||
const pkgManagerName = pkg.packageManager.split('@')[0];
|
||||
// We must explicitly call `corepack enable npm` since `corepack enable`
|
||||
// doesn't work with npm. See https://github.com/nodejs/corepack/pull/24
|
||||
@@ -67,4 +72,11 @@ export function cleanupCorepack(corepackShimDir: string) {
|
||||
''
|
||||
);
|
||||
}
|
||||
if (process.env.DEBUG) {
|
||||
if (process.env.DEBUG === 'corepack') {
|
||||
delete process.env.DEBUG;
|
||||
} else {
|
||||
process.env.DEBUG = process.env.DEBUG.replace('corepack,', '');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
import frameworkList from '@vercel/frameworks';
|
||||
|
||||
export function sortBuilders<B extends { use: string }>(builds: B[]): B[] {
|
||||
const frontendRuntimeSet = new Set(
|
||||
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
|
||||
);
|
||||
const toNumber = (build: B) => (frontendRuntimeSet.has(build.use) ? 0 : 1);
|
||||
|
||||
return builds.sort((build1, build2) => {
|
||||
return toNumber(build1) - toNumber(build2);
|
||||
});
|
||||
}
|
||||
@@ -1,14 +1,6 @@
|
||||
import fs from 'fs-extra';
|
||||
import mimeTypes from 'mime-types';
|
||||
import {
|
||||
basename,
|
||||
dirname,
|
||||
extname,
|
||||
join,
|
||||
relative,
|
||||
resolve,
|
||||
posix,
|
||||
} from 'path';
|
||||
import { basename, dirname, extname, join, relative, resolve } from 'path';
|
||||
import {
|
||||
Builder,
|
||||
BuildResultV2,
|
||||
@@ -28,7 +20,6 @@ import pipe from 'promisepipe';
|
||||
import { unzip } from './unzip';
|
||||
import { VERCEL_DIR } from '../projects/link';
|
||||
|
||||
const { normalize } = posix;
|
||||
export const OUTPUT_DIR = join(VERCEL_DIR, 'output');
|
||||
|
||||
export async function writeBuildResult(
|
||||
@@ -76,13 +67,6 @@ export interface PathOverride {
|
||||
path?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove duplicate slashes as well as leading/trailing slashes.
|
||||
*/
|
||||
function stripDuplicateSlashes(path: string): string {
|
||||
return normalize(path).replace(/(^\/|\/$)/g, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Writes the output from the `build()` return value of a v2 Builder to
|
||||
* the filesystem.
|
||||
@@ -100,17 +84,16 @@ async function writeBuildResultV2(
|
||||
const lambdas = new Map<Lambda, string>();
|
||||
const overrides: Record<string, PathOverride> = {};
|
||||
for (const [path, output] of Object.entries(buildResult.output)) {
|
||||
const normalizedPath = stripDuplicateSlashes(path);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, normalizedPath, lambdas);
|
||||
await writeLambda(outputDir, output, path, lambdas);
|
||||
} else if (isPrerender(output)) {
|
||||
await writeLambda(outputDir, output.lambda, normalizedPath, lambdas);
|
||||
await writeLambda(outputDir, output.lambda, path, lambdas);
|
||||
|
||||
// Write the fallback file alongside the Lambda directory
|
||||
let fallback = output.fallback;
|
||||
if (fallback) {
|
||||
const ext = getFileExtension(fallback);
|
||||
const fallbackName = `${normalizedPath}.prerender-fallback${ext}`;
|
||||
const fallbackName = `${path}.prerender-fallback${ext}`;
|
||||
const fallbackPath = join(outputDir, 'functions', fallbackName);
|
||||
const stream = fallback.toStream();
|
||||
await pipe(
|
||||
@@ -126,7 +109,7 @@ async function writeBuildResultV2(
|
||||
const prerenderConfigPath = join(
|
||||
outputDir,
|
||||
'functions',
|
||||
`${normalizedPath}.prerender-config.json`
|
||||
`${path}.prerender-config.json`
|
||||
);
|
||||
const prerenderConfig = {
|
||||
...output,
|
||||
@@ -135,20 +118,12 @@ async function writeBuildResultV2(
|
||||
};
|
||||
await fs.writeJSON(prerenderConfigPath, prerenderConfig, { spaces: 2 });
|
||||
} else if (isFile(output)) {
|
||||
await writeStaticFile(
|
||||
outputDir,
|
||||
output,
|
||||
normalizedPath,
|
||||
overrides,
|
||||
cleanUrls
|
||||
);
|
||||
await writeStaticFile(outputDir, output, path, overrides, cleanUrls);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(outputDir, output, normalizedPath);
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${
|
||||
(output as any).type
|
||||
}" for ${normalizedPath}`
|
||||
`Unsupported output type: "${(output as any).type}" for ${path}`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -170,9 +145,9 @@ async function writeBuildResultV3(
|
||||
throw new Error(`Expected "build.src" to be a string`);
|
||||
}
|
||||
const ext = extname(src);
|
||||
const path = stripDuplicateSlashes(
|
||||
build.config?.zeroConfig ? src.substring(0, src.length - ext.length) : src
|
||||
);
|
||||
const path = build.config?.zeroConfig
|
||||
? src.substring(0, src.length - ext.length)
|
||||
: src;
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, path);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
|
||||
@@ -3,17 +3,17 @@ import { join } from 'path';
|
||||
import ini from 'ini';
|
||||
import git from 'git-last-commit';
|
||||
import { exec } from 'child_process';
|
||||
import { GitMetadata } from '../types';
|
||||
import { Output } from './output';
|
||||
import { GitMetadata } from '../../types';
|
||||
import { Output } from '../output';
|
||||
|
||||
export function isDirty(directory: string, output: Output): Promise<boolean> {
|
||||
return new Promise(resolve => {
|
||||
exec('git status -s', { cwd: directory }, function (err, stdout, stderr) {
|
||||
let debugMessage = `Failed to determine if Git repo has been modified:`;
|
||||
if (err || stderr) {
|
||||
if (err) debugMessage += `\n${err}`;
|
||||
if (stderr) debugMessage += `\n${stderr.trim()}`;
|
||||
output.debug(debugMessage);
|
||||
if (err) return resolve(false);
|
||||
if (stderr) {
|
||||
output.debug(
|
||||
`Failed to determine if git repo has been modified: ${stderr.trim()}`
|
||||
);
|
||||
return resolve(false);
|
||||
}
|
||||
resolve(stdout.trim().length > 0);
|
||||
@@ -33,31 +33,21 @@ function getLastCommit(directory: string): Promise<git.Commit> {
|
||||
});
|
||||
}
|
||||
|
||||
export async function parseGitConfig(configPath: string, output: Output) {
|
||||
try {
|
||||
return ini.parse(await fs.readFile(configPath, 'utf-8'));
|
||||
} catch (error) {
|
||||
output.debug(`Error while parsing repo data: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
export function pluckRemoteUrl(gitConfig: {
|
||||
[key: string]: any;
|
||||
}): string | undefined {
|
||||
// Assuming "origin" is the remote url that the user would want to use
|
||||
return gitConfig['remote "origin"']?.url;
|
||||
}
|
||||
|
||||
export async function getRemoteUrl(
|
||||
configPath: string,
|
||||
output: Output
|
||||
): Promise<string | null> {
|
||||
let gitConfig = await parseGitConfig(configPath, output);
|
||||
let gitConfig;
|
||||
try {
|
||||
gitConfig = ini.parse(await fs.readFile(configPath, 'utf-8'));
|
||||
} catch (error) {
|
||||
output.debug(`Error while parsing repo data: ${error.message}`);
|
||||
}
|
||||
if (!gitConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const originUrl = pluckRemoteUrl(gitConfig);
|
||||
const originUrl: string = gitConfig['remote "origin"']?.url;
|
||||
if (originUrl) {
|
||||
return originUrl;
|
||||
}
|
||||
@@ -74,10 +64,7 @@ export async function createGitMeta(
|
||||
return;
|
||||
}
|
||||
const [commit, dirty] = await Promise.all([
|
||||
getLastCommit(directory).catch(err => {
|
||||
output.debug(
|
||||
`Failed to get last commit. The directory is likely not a Git repo, there are no latest commits, or it is corrupted.\n${err}`
|
||||
);
|
||||
getLastCommit(directory).catch(() => {
|
||||
return;
|
||||
}),
|
||||
isDirty(directory, output),
|
||||
@@ -558,8 +558,9 @@ export default class DevServer {
|
||||
]);
|
||||
|
||||
await this.validateVercelConfig(vercelConfig);
|
||||
const { error: routeError, routes: maybeRoutes } =
|
||||
getTransformedRoutes(vercelConfig);
|
||||
const { error: routeError, routes: maybeRoutes } = getTransformedRoutes({
|
||||
nowConfig: vercelConfig,
|
||||
});
|
||||
if (routeError) {
|
||||
this.output.prettyError(routeError);
|
||||
await this.exit();
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import inquirer from 'inquirer';
|
||||
import Client from '../client';
|
||||
import getUser from '../get-user';
|
||||
import getTeams from '../teams/get-teams';
|
||||
@@ -42,7 +43,7 @@ export default async function selectOrg(
|
||||
return choices[defaultOrgIndex].value;
|
||||
}
|
||||
|
||||
const answers = await client.prompt({
|
||||
const answers = await inquirer.prompt({
|
||||
type: 'list',
|
||||
name: 'org',
|
||||
message: question,
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
import Client from '../client';
|
||||
import { stringify } from 'qs';
|
||||
import { Org } from '../../types';
|
||||
import chalk from 'chalk';
|
||||
import link from '../output/link';
|
||||
|
||||
export async function disconnectGitProvider(
|
||||
client: Client,
|
||||
org: Org,
|
||||
projectId: string
|
||||
) {
|
||||
const fetchUrl = `/v4/projects/${projectId}/link?${stringify({
|
||||
teamId: org.type === 'team' ? org.id : undefined,
|
||||
})}`;
|
||||
return client.fetch(fetchUrl, {
|
||||
method: 'DELETE',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function connectGitProvider(
|
||||
client: Client,
|
||||
org: Org,
|
||||
projectId: string,
|
||||
type: string,
|
||||
repo: string
|
||||
) {
|
||||
const fetchUrl = `/v4/projects/${projectId}/link?${stringify({
|
||||
teamId: org.type === 'team' ? org.id : undefined,
|
||||
})}`;
|
||||
try {
|
||||
return await client.fetch(fetchUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
type,
|
||||
repo,
|
||||
}),
|
||||
});
|
||||
} catch (err) {
|
||||
if (
|
||||
err.meta?.action === 'Install GitHub App' ||
|
||||
err.code === 'repo_not_found'
|
||||
) {
|
||||
client.output.error(
|
||||
`Failed to link ${chalk.cyan(
|
||||
repo
|
||||
)}. Make sure there aren't any typos and that you have access to the repository if it's private.`
|
||||
);
|
||||
} else if (err.action === 'Add a Login Connection') {
|
||||
client.output.error(
|
||||
err.message.replace(repo, chalk.cyan(repo)) +
|
||||
`\nVisit ${link(err.link)} for more information.`
|
||||
);
|
||||
} else {
|
||||
client.output.error(
|
||||
`Failed to connect the ${formatProvider(
|
||||
type
|
||||
)} repository ${repo}.\n${err}`
|
||||
);
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
export function formatProvider(type: string): string {
|
||||
switch (type) {
|
||||
case 'github':
|
||||
return 'GitHub';
|
||||
case 'gitlab':
|
||||
return 'GitLab';
|
||||
case 'bitbucket':
|
||||
return 'Bitbucket';
|
||||
default:
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
export function parseRepoUrl(originUrl: string): {
|
||||
provider: string;
|
||||
org: string;
|
||||
repo: string;
|
||||
} | null {
|
||||
const isSSH = originUrl.startsWith('git@');
|
||||
// Matches all characters between (// or @) and (.com or .org)
|
||||
// eslint-disable-next-line prefer-named-capture-group
|
||||
const provider = /(?<=(\/\/|@)).*(?=(\.com|\.org))/.exec(originUrl);
|
||||
if (!provider) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let org;
|
||||
let repo;
|
||||
|
||||
if (isSSH) {
|
||||
org = originUrl.split(':')[1].split('/')[0];
|
||||
repo = originUrl.split('/')[1]?.replace('.git', '');
|
||||
} else {
|
||||
// Assume https:// or git://
|
||||
org = originUrl.split('/')[3];
|
||||
repo = originUrl.split('/')[4]?.replace('.git', '');
|
||||
}
|
||||
|
||||
if (!org || !repo) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
provider: provider[0],
|
||||
org,
|
||||
repo,
|
||||
};
|
||||
}
|
||||
@@ -5,7 +5,6 @@ import { join } from 'path';
|
||||
|
||||
export type ProjectLinkAndSettings = ProjectLink & {
|
||||
settings: {
|
||||
createdAt: Project['createdAt'];
|
||||
installCommand: Project['installCommand'];
|
||||
buildCommand: Project['buildCommand'];
|
||||
devCommand: Project['devCommand'];
|
||||
@@ -29,7 +28,6 @@ export async function writeProjectSettings(
|
||||
projectId: project.id,
|
||||
orgId: org.id,
|
||||
settings: {
|
||||
createdAt: project.createdAt,
|
||||
framework: project.framework,
|
||||
devCommand: project.devCommand,
|
||||
installCommand: project.installCommand,
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
@@ -1 +0,0 @@
|
||||
module.exports = (req, res) => res.end('Vercel');
|
||||
@@ -1 +0,0 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
@@ -1,4 +0,0 @@
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
|
||||
// Intentional syntax error to make the build fail
|
||||
export default (req: IncomingMessage, res: ServerResponse => res.end('Vercel');
|
||||
@@ -1,17 +1,9 @@
|
||||
const { FileBlob, Lambda } = require('@vercel/build-utils');
|
||||
const { FileBlob } = require('@vercel/build-utils');
|
||||
|
||||
exports.build = async () => {
|
||||
const file = new FileBlob({
|
||||
data: Buffer.from('file contents')
|
||||
});
|
||||
const lambda = new Lambda({
|
||||
files: {},
|
||||
runtime: 'provided',
|
||||
handler: 'example.js'
|
||||
})
|
||||
const output = {
|
||||
file,
|
||||
'withTrailingSlash/': lambda
|
||||
};
|
||||
const output = { file };
|
||||
return { output };
|
||||
};
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "bad-remote-url"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/git/config
generated
vendored
10
packages/cli/test/fixtures/unit/commands/git/connect/bad-remote-url/git/config
generated
vendored
@@ -1,10 +0,0 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = bababooey
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "existing-connection"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/HEAD
generated
vendored
1
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/HEAD
generated
vendored
@@ -1 +0,0 @@
|
||||
ref: refs/heads/master
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/config
generated
vendored
10
packages/cli/test/fixtures/unit/commands/git/connect/existing-connection/git/config
generated
vendored
@@ -1,10 +0,0 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user2/repo2
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
@@ -1 +0,0 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "invalid-repo"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/git/config
generated
vendored
10
packages/cli/test/fixtures/unit/commands/git/connect/invalid-repo/git/config
generated
vendored
@@ -1,10 +0,0 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/laksfj/asdgklsadkl
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "new-connection"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/git/config
generated
vendored
10
packages/cli/test/fixtures/unit/commands/git/connect/new-connection/git/config
generated
vendored
@@ -1,10 +0,0 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/no-git-config/.gitignore
generated
vendored
1
packages/cli/test/fixtures/unit/commands/git/connect/no-git-config/.gitignore
generated
vendored
@@ -1 +0,0 @@
|
||||
!.vercel
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "no-git-config"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/.gitignore
generated
vendored
1
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/.gitignore
generated
vendored
@@ -1 +0,0 @@
|
||||
!.vercel
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "no-remote-url"
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/git/config
generated
vendored
7
packages/cli/test/fixtures/unit/commands/git/connect/no-remote-url/git/config
generated
vendored
@@ -1,7 +0,0 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/.gitignore
generated
vendored
1
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/.gitignore
generated
vendored
@@ -1 +0,0 @@
|
||||
!.vercel
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "new-connection"
|
||||
}
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/git/config
generated
vendored
10
packages/cli/test/fixtures/unit/commands/git/connect/same-repo-connection/git/config
generated
vendored
@@ -1,10 +0,0 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
2
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/.gitignore
generated
vendored
2
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/.gitignore
generated
vendored
@@ -1,2 +0,0 @@
|
||||
!.vercel
|
||||
.vercel
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/HEAD
generated
vendored
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/HEAD
generated
vendored
@@ -1 +0,0 @@
|
||||
ref: refs/heads/master
|
||||
10
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/config
generated
vendored
10
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/config
generated
vendored
@@ -1,10 +0,0 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
[remote "origin"]
|
||||
url = https://github.com/user/repo.git
|
||||
fetch = +refs/heads/*:refs/remotes/origin/*
|
||||
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/description
generated
vendored
1
packages/cli/test/fixtures/unit/commands/git/connect/unlinked/git/description
generated
vendored
@@ -1 +0,0 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
1
packages/cli/test/fixtures/unit/commands/open/default/.vercel/project.json
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/default/.vercel/project.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "projectId": "test-project", "orgId": "team_dashboard" }
|
||||
1
packages/cli/test/fixtures/unit/commands/open/default/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/default/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>hi</h1>
|
||||
1
packages/cli/test/fixtures/unit/commands/open/no-deployments/.vercel/project.json
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/no-deployments/.vercel/project.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "projectId": "no-deployments", "orgId": "team_dummy" }
|
||||
1
packages/cli/test/fixtures/unit/commands/open/no-deployments/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/no-deployments/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>hi</h1>
|
||||
1
packages/cli/test/fixtures/unit/commands/open/preview/.vercel/project.json
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/preview/.vercel/project.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "projectId": "test-project", "orgId": "team_preview" }
|
||||
1
packages/cli/test/fixtures/unit/commands/open/preview/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/preview/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>hi</h1>
|
||||
1
packages/cli/test/fixtures/unit/commands/open/prod/.vercel/project.json
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/prod/.vercel/project.json
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{ "projectId": "test-project", "orgId": "team_prod" }
|
||||
1
packages/cli/test/fixtures/unit/commands/open/prod/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/open/prod/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>hi</h1>
|
||||
@@ -1,4 +1,4 @@
|
||||
export function pluckIdentifiersFromDeploymentList(output: string): {
|
||||
export function getDataFromIntro(output: string): {
|
||||
project: string | undefined;
|
||||
org: string | undefined;
|
||||
} {
|
||||
@@ -11,7 +11,7 @@ export function pluckIdentifiersFromDeploymentList(output: string): {
|
||||
};
|
||||
}
|
||||
|
||||
export function parseSpacedTableRow(output: string): string[] {
|
||||
export function parseTable(output: string): string[] {
|
||||
return output
|
||||
.trim()
|
||||
.replace(/ {1} +/g, ',')
|
||||
|
||||
@@ -5,19 +5,17 @@ export function readOutputStream(
|
||||
length: number = 3
|
||||
): Promise<string> {
|
||||
return new Promise((resolve, reject) => {
|
||||
let output: string = '';
|
||||
let lines = 0;
|
||||
const chunks: Buffer[] = [];
|
||||
const timeout = setTimeout(() => {
|
||||
reject();
|
||||
}, 3000);
|
||||
|
||||
client.stderr.resume();
|
||||
client.stderr.on('data', chunk => {
|
||||
output += chunk.toString();
|
||||
lines++;
|
||||
if (lines === length) {
|
||||
chunks.push(chunk);
|
||||
if (chunks.length === length) {
|
||||
clearTimeout(timeout);
|
||||
resolve(output);
|
||||
resolve(chunks.toString().replace(/,/g, ''));
|
||||
}
|
||||
});
|
||||
client.stderr.on('error', reject);
|
||||
|
||||
9
packages/cli/test/integration.js
vendored
9
packages/cli/test/integration.js
vendored
@@ -1323,7 +1323,12 @@ test('[vc projects] should create a project successfully', async t => {
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const vc = execa(binaryPath, ['project', 'add', projectName, ...defaultArgs]);
|
||||
const vc = execa(binaryPath, [
|
||||
'projects',
|
||||
'add',
|
||||
projectName,
|
||||
...defaultArgs,
|
||||
]);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`Success! Project ${projectName} added`)
|
||||
@@ -1334,7 +1339,7 @@ test('[vc projects] should create a project successfully', async t => {
|
||||
|
||||
// creating the same project again should succeed
|
||||
const vc2 = execa(binaryPath, [
|
||||
'project',
|
||||
'projects',
|
||||
'add',
|
||||
projectName,
|
||||
...defaultArgs,
|
||||
|
||||
@@ -24,6 +24,7 @@ export function useDeployment({
|
||||
plan: 'hobby',
|
||||
public: false,
|
||||
version: 2,
|
||||
buildingAt: Date.now(),
|
||||
createdAt,
|
||||
createdIn: 'sfo1',
|
||||
ownerId: creator.id,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { client } from './client';
|
||||
import { Project } from '../../src/types';
|
||||
import { formatProvider } from '../../src/util/projects/connect-git-provider';
|
||||
|
||||
const envs = [
|
||||
{
|
||||
@@ -97,7 +96,7 @@ export const defaultProject = {
|
||||
requestedAt: 1571239348998,
|
||||
target: 'production',
|
||||
teamId: null,
|
||||
type: 'LAMBDAS',
|
||||
type: undefined,
|
||||
url: 'a-project-name-rjtr4pz3f.vercel.app',
|
||||
userId: 'K4amb7K9dAt5R2vBJWF32bmY',
|
||||
},
|
||||
@@ -132,6 +131,9 @@ export function useProject(project: Partial<Project> = defaultProject) {
|
||||
client.scenario.get(`/v8/projects/${project.id}`, (_req, res) => {
|
||||
res.json(project);
|
||||
});
|
||||
client.scenario.get(`/v9/projects/${project.id}`, (_req, res) => {
|
||||
res.json(project);
|
||||
});
|
||||
client.scenario.get(
|
||||
`/v6/projects/${project.id}/system-env-values`,
|
||||
(_req, res) => {
|
||||
|
||||
@@ -589,6 +589,8 @@ describe('build', () => {
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
client.stderr.pipe(process.stderr);
|
||||
client.setArgv('build');
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
@@ -612,40 +614,6 @@ describe('build', () => {
|
||||
expect(await fs.readFile(join(output, 'static/file'), 'utf8')).toEqual(
|
||||
'file contents'
|
||||
);
|
||||
|
||||
// "functions" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions'));
|
||||
expect(functions.sort()).toEqual(['withTrailingSlash.func']);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should store Builder error in `builds.json`', async () => {
|
||||
const cwd = fixture('node-error');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
|
||||
// `builds.json` contains "error" build
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds.builds).toHaveLength(4);
|
||||
|
||||
const errorBuilds = builds.builds.filter((b: any) => 'error' in b);
|
||||
expect(errorBuilds).toHaveLength(1);
|
||||
|
||||
expect(errorBuilds[0].error.name).toEqual('Error');
|
||||
expect(errorBuilds[0].error.message).toMatch(`TS1005`);
|
||||
expect(errorBuilds[0].error.message).toMatch(`',' expected.`);
|
||||
expect(errorBuilds[0].error.hideStackTrace).toEqual(true);
|
||||
expect(errorBuilds[0].error.code).toEqual('NODE_TYPESCRIPT_ERROR');
|
||||
|
||||
// `config.json`` contains `version`
|
||||
const configJson = await fs.readJSON(join(output, 'config.json'));
|
||||
expect(configJson.version).toBe(3);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
|
||||
@@ -1,380 +0,0 @@
|
||||
import { join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import { useUser } from '../../mocks/user';
|
||||
import { useTeams } from '../../mocks/team';
|
||||
import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { client } from '../../mocks/client';
|
||||
import git from '../../../src/commands/git';
|
||||
import { Project } from '../../../src/types';
|
||||
|
||||
describe('git', () => {
|
||||
describe('connect', () => {
|
||||
const originalCwd = process.cwd();
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/git/connect', name);
|
||||
|
||||
it('connects an unlinked project', async () => {
|
||||
const cwd = fixture('unlinked');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'unlinked',
|
||||
name: 'unlinked',
|
||||
});
|
||||
client.setArgv('projects', 'connect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Set up');
|
||||
client.stdin.write('y\n');
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'Which scope should contain your project?'
|
||||
);
|
||||
client.stdin.write('\r');
|
||||
|
||||
await expect(client.stderr).toOutput('Found project');
|
||||
client.stdin.write('y\n');
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo.git`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
await expect(client.stderr).toOutput(
|
||||
'Connected GitHub repository user/repo!'
|
||||
);
|
||||
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const project: Project = await client.fetch(`/v8/projects/unlinked`);
|
||||
expect(project.link).toMatchObject({
|
||||
type: 'github',
|
||||
repo: 'user/repo',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when there is no git config', async () => {
|
||||
const cwd = fixture('no-git-config');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'no-git-config',
|
||||
name: 'no-git-config',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! No local git repo found. Run \`git clone <url>\` to clone a remote Git repository first.\n`
|
||||
);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when there is no remote url', async () => {
|
||||
const cwd = fixture('no-remote-url');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'no-remote-url',
|
||||
name: 'no-remote-url',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! No remote origin URL found in your Git config. Make sure you've configured a remote repo in your local Git config. Run \`git remote --help\` for more details.`
|
||||
);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when the remote url is bad', async () => {
|
||||
const cwd = fixture('bad-remote-url');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'bad-remote-url',
|
||||
name: 'bad-remote-url',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const exitCode = await git(client);
|
||||
expect(exitCode).toEqual(1);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": bababooey`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error! Failed to parse Git repo data from the following remote URL in your Git config: bababooey\n`
|
||||
);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should connect a repo to a project that is not already connected', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> Connected GitHub repository user/repo!\n`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const project: Project = await client.fetch(
|
||||
`/v8/projects/new-connection`
|
||||
);
|
||||
expect(project.link).toMatchObject({
|
||||
type: 'github',
|
||||
repo: 'user/repo',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should replace an old connection with a new one', async () => {
|
||||
const cwd = fixture('existing-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'existing-connection',
|
||||
name: 'existing-connection',
|
||||
});
|
||||
project.project.link = {
|
||||
type: 'github',
|
||||
repo: 'repo',
|
||||
org: 'user',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user2/repo2`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> Connected GitHub repository user2/repo2!\n`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const newProjectData: Project = await client.fetch(
|
||||
`/v8/projects/existing-connection`
|
||||
);
|
||||
expect(newProjectData.link).toMatchObject({
|
||||
type: 'github',
|
||||
repo: 'user2/repo2',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
});
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should exit when an already-connected repo is connected', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
project.project.link = {
|
||||
type: 'github',
|
||||
repo: 'repo',
|
||||
org: 'user',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/user/repo`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`> user/repo is already connected to your project.\n`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(1);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when it cannot find the repository', async () => {
|
||||
const cwd = fixture('invalid-repo');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'invalid-repo',
|
||||
name: 'invalid-repo',
|
||||
});
|
||||
|
||||
client.setArgv('projects', 'connect', '--confirm');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Identified Git remote "origin": https://github.com/laksfj/asdgklsadkl`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Failed to link laksfj/asdgklsadkl. Make sure there aren't any typos and that you have access to the repository if it's private.`
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(1);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
describe('disconnect', () => {
|
||||
const originalCwd = process.cwd();
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/git/connect', name);
|
||||
|
||||
it('should disconnect a repository', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
project.project.link = {
|
||||
type: 'github',
|
||||
repo: 'repo',
|
||||
org: 'user',
|
||||
repoId: 1010,
|
||||
gitCredentialId: '',
|
||||
sourceless: true,
|
||||
createdAt: 1656109539791,
|
||||
updatedAt: 1656109539791,
|
||||
};
|
||||
client.setArgv('git', 'disconnect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
`Are you sure you want to disconnect user/repo from your project?`
|
||||
);
|
||||
client.stdin.write('y\n');
|
||||
await expect(client.stderr).toOutput('Disconnected user/repo.');
|
||||
|
||||
const newProjectData: Project = await client.fetch(
|
||||
`/v8/projects/new-connection`
|
||||
);
|
||||
expect(newProjectData.link).toBeUndefined();
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail if there is no repository to disconnect', async () => {
|
||||
const cwd = fixture('new-connection');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
await fs.rename(join(cwd, 'git'), join(cwd, '.git'));
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'new-connection',
|
||||
name: 'new-connection',
|
||||
});
|
||||
|
||||
client.setArgv('git', 'disconnect');
|
||||
const gitPromise = git(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'No Git repository connected. Run `vercel project connect` to connect one.'
|
||||
);
|
||||
|
||||
const exitCode = await gitPromise;
|
||||
expect(exitCode).toEqual(1);
|
||||
} finally {
|
||||
await fs.rename(join(cwd, '.git'), join(cwd, 'git'));
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -6,10 +6,7 @@ import { useTeams } from '../../mocks/team';
|
||||
import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { useDeployment } from '../../mocks/deployment';
|
||||
import { readOutputStream } from '../../helpers/read-output-stream';
|
||||
import {
|
||||
parseSpacedTableRow,
|
||||
pluckIdentifiersFromDeploymentList,
|
||||
} from '../../helpers/parse-table';
|
||||
import { parseTable, getDataFromIntro } from '../../helpers/parse-table';
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/list', name);
|
||||
@@ -37,9 +34,9 @@ describe('list', () => {
|
||||
|
||||
const output = await readOutputStream(client);
|
||||
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
data.splice(2, 1);
|
||||
|
||||
expect(org).toEqual(team[0].slug);
|
||||
@@ -79,9 +76,9 @@ describe('list', () => {
|
||||
|
||||
const output = await readOutputStream(client);
|
||||
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
data.splice(2, 1);
|
||||
|
||||
expect(org).toEqual(teamSlug);
|
||||
|
||||
208
packages/cli/test/unit/commands/open.test.ts
Normal file
208
packages/cli/test/unit/commands/open.test.ts
Normal file
@@ -0,0 +1,208 @@
|
||||
import { join } from 'path';
|
||||
import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { useTeams } from '../../mocks/team';
|
||||
import { useUser } from '../../mocks/user';
|
||||
import open from '../../../src/commands/open';
|
||||
import { client } from '../../mocks/client';
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/open', name);
|
||||
|
||||
describe('open', () => {
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
it('should open the dashboard', async () => {
|
||||
const cwd = fixture('default');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
|
||||
useUser();
|
||||
const team = useTeams('team_dashboard');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
id: 'test-project',
|
||||
name: 'test-project',
|
||||
});
|
||||
|
||||
client.setArgv('open', 'dash');
|
||||
const openPromise = open(client, true);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Opened https://vercel.com/${team[0].slug}/test-project`
|
||||
);
|
||||
|
||||
const exitCode = await openPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should open the preview inspect url', async () => {
|
||||
const cwd = fixture('preview');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
|
||||
useUser();
|
||||
const team = useTeams('team_preview');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'test-project',
|
||||
name: 'test-project',
|
||||
});
|
||||
const deploymentId = project?.project?.latestDeployments?.[0].id?.replace(
|
||||
'dpl_',
|
||||
''
|
||||
);
|
||||
|
||||
client.setArgv('open', 'dash', 'latest');
|
||||
const openPromise = open(client, true);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Opened https://vercel.com/${team[0].slug}/test-project/${deploymentId}`
|
||||
);
|
||||
|
||||
const exitCode = await openPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should open the production inspect url', async () => {
|
||||
const cwd = fixture('prod');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
|
||||
useUser();
|
||||
const team = useTeams('team_prod');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'test-project',
|
||||
name: 'test-project',
|
||||
});
|
||||
const deploymentId = project?.project?.targets?.production?.id?.replace(
|
||||
'dpl_',
|
||||
''
|
||||
);
|
||||
|
||||
client.setArgv('open', 'dash', 'latest', '--prod');
|
||||
const openPromise = open(client, true);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Opened https://vercel.com/${team[0].slug}/test-project/${deploymentId}`
|
||||
);
|
||||
|
||||
const exitCode = await openPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should open the preview deploy url', async () => {
|
||||
const cwd = fixture('default');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'test-project',
|
||||
name: 'test-project',
|
||||
});
|
||||
const url = project?.project?.latestDeployments?.[0]?.url;
|
||||
|
||||
client.setArgv('open', 'latest');
|
||||
const openPromise = open(client, true);
|
||||
await expect(client.stderr).toOutput(`Opened https://${url}`);
|
||||
|
||||
const exitCode = await openPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should open the production deploy url', async () => {
|
||||
const cwd = fixture('default');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'test-project',
|
||||
name: 'test-project',
|
||||
});
|
||||
const url = project?.project?.targets?.production?.url;
|
||||
|
||||
client.setArgv('open', 'latest', '--prod');
|
||||
const openPromise = open(client, true);
|
||||
await expect(client.stderr).toOutput(`Opened https://${url}`);
|
||||
|
||||
const exitCode = await openPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should open the latest preview deploy url from dropdown', async () => {
|
||||
const cwd = fixture('default');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'test-project',
|
||||
name: 'test-project',
|
||||
});
|
||||
const url = project?.project?.latestDeployments?.[0]?.url;
|
||||
|
||||
client.setArgv('open');
|
||||
const openPromise = open(client, true);
|
||||
|
||||
await expect(client.stderr).toOutput('What do you want to open?');
|
||||
client.stdin.write('\x1B[B'); // down arrow
|
||||
client.stdin.write('\r'); // return
|
||||
|
||||
await expect(client.stderr).toOutput(`Opened https://${url}`);
|
||||
const exitCode = await openPromise;
|
||||
expect(exitCode).toEqual(0);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
it('should fail when there are no deployments', async () => {
|
||||
const cwd = fixture('no-deployments');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
|
||||
useUser();
|
||||
useTeams('team_dummy');
|
||||
const project = useProject({
|
||||
...defaultProject,
|
||||
id: 'no-deployments',
|
||||
name: 'no-deployments',
|
||||
});
|
||||
project.project.latestDeployments = undefined;
|
||||
|
||||
client.setArgv('open', 'dash', 'latest');
|
||||
const openPromiseInspect = open(client, true);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'No deployments found. Run `vercel deploy` to create a deployment.'
|
||||
);
|
||||
const exitCodeInspect = await openPromiseInspect;
|
||||
expect(exitCodeInspect).toEqual(1);
|
||||
|
||||
client.setArgv('open', 'latest');
|
||||
const openPromiseDeploy = open(client, true);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'No deployments found. Run `vercel deploy` to create a deployment.'
|
||||
);
|
||||
const exitCodeDeploy = await openPromiseDeploy;
|
||||
expect(exitCodeDeploy).toEqual(1);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -5,12 +5,9 @@ import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { client } from '../../mocks/client';
|
||||
import { Project } from '../../../src/types';
|
||||
import { readOutputStream } from '../../helpers/read-output-stream';
|
||||
import {
|
||||
pluckIdentifiersFromDeploymentList,
|
||||
parseSpacedTableRow,
|
||||
} from '../../helpers/parse-table';
|
||||
import { getDataFromIntro, parseTable } from '../../helpers/parse-table';
|
||||
|
||||
describe('project', () => {
|
||||
describe('projects', () => {
|
||||
describe('list', () => {
|
||||
it('should list deployments under a user', async () => {
|
||||
const user = useUser();
|
||||
@@ -22,9 +19,9 @@ describe('project', () => {
|
||||
await projects(client);
|
||||
|
||||
const output = await readOutputStream(client, 2);
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
data.pop();
|
||||
|
||||
expect(org).toEqual(user.username);
|
||||
@@ -43,9 +40,9 @@ describe('project', () => {
|
||||
await projects(client);
|
||||
|
||||
const output = await readOutputStream(client, 2);
|
||||
const { org } = pluckIdentifiersFromDeploymentList(output.split('\n')[0]);
|
||||
const header: string[] = parseSpacedTableRow(output.split('\n')[2]);
|
||||
const data: string[] = parseSpacedTableRow(output.split('\n')[3]);
|
||||
const { org } = getDataFromIntro(output.split('\n')[0]);
|
||||
const header: string[] = parseTable(output.split('\n')[2]);
|
||||
const data: string[] = parseTable(output.split('\n')[3]);
|
||||
data.pop();
|
||||
|
||||
expect(org).toEqual(team[0].slug);
|
||||
|
||||
@@ -101,9 +101,7 @@ describe('pull', () => {
|
||||
Object {
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "vercel-pull-next",
|
||||
"settings": Object {
|
||||
"createdAt": 1555413045188,
|
||||
},
|
||||
"settings": Object {},
|
||||
}
|
||||
`);
|
||||
} finally {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { normalizeURL } from '../../../../src/util/bisect/normalize-url';
|
||||
import { normalizeURL } from '../../../../src/util/normalize-url';
|
||||
|
||||
describe('normalize-url', () => {
|
||||
it('should add https to url without scheme', () => {
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
import { sortBuilders } from '../../../../src/util/build/sort-builders';
|
||||
|
||||
describe('sortBuilders()', () => {
|
||||
test.each([
|
||||
{
|
||||
name: 'should sort @vercel/next from middle to beginning',
|
||||
input: ['@vercel/node', '@vercel/next', '@vercel/python'],
|
||||
output: ['@vercel/next', '@vercel/node', '@vercel/python'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/static-build from middle to beginning',
|
||||
input: ['@vercel/node', '@vercel/static-build', '@vercel/python'],
|
||||
output: ['@vercel/static-build', '@vercel/node', '@vercel/python'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/remix from end to beginning',
|
||||
input: ['@vercel/python', '@vercel/node', '@vercel/remix'],
|
||||
output: ['@vercel/remix', '@vercel/python', '@vercel/node'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/redwood from beginning to beginning',
|
||||
input: ['@vercel/redwood', '@vercel/python', '@vercel/ruby'],
|
||||
output: ['@vercel/redwood', '@vercel/python', '@vercel/ruby'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/hydrogen from end to beginning',
|
||||
input: ['@vercel/python', '@vercel/hydrogen'],
|
||||
output: ['@vercel/hydrogen', '@vercel/python'],
|
||||
},
|
||||
{
|
||||
name: 'should sort @vercel/static-build to beginning with many @vercel/node',
|
||||
input: [
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/static-build',
|
||||
'@vercel/node',
|
||||
],
|
||||
output: [
|
||||
'@vercel/static-build',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
'@vercel/node',
|
||||
],
|
||||
},
|
||||
])('$name', ({ input, output }) => {
|
||||
const builders = sortBuilders(input.map(use => ({ use })));
|
||||
expect(builders.map(b => b.use)).toEqual(output);
|
||||
});
|
||||
});
|
||||
@@ -6,10 +6,8 @@ import {
|
||||
createGitMeta,
|
||||
getRemoteUrl,
|
||||
isDirty,
|
||||
} from '../../../../src/util/create-git-meta';
|
||||
} from '../../../../src/util/deploy/create-git-meta';
|
||||
import { client } from '../../../mocks/client';
|
||||
import { parseRepoUrl } from '../../../../src/util/projects/connect-git-provider';
|
||||
import { readOutputStream } from '../../../helpers/read-output-stream';
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../../fixtures/unit/create-git-meta', name);
|
||||
@@ -29,97 +27,6 @@ describe('getRemoteUrl', () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseRepoUrl', () => {
|
||||
it('should be null when a url does not match the regex', () => {
|
||||
const parsedUrl = parseRepoUrl('https://examplecom/foo');
|
||||
expect(parsedUrl).toBeNull();
|
||||
});
|
||||
it('should be null when a url does not contain org and repo data', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/borked');
|
||||
expect(parsedUrl).toBeNull();
|
||||
});
|
||||
it('should parse url with a period in the repo name', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/next.js');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('next.js');
|
||||
});
|
||||
it('should parse url that ends with .git', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/next.js.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('next.js');
|
||||
});
|
||||
it('should parse github https url', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/vercel.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
it('should parse github https url without the .git suffix', () => {
|
||||
const parsedUrl = parseRepoUrl('https://github.com/vercel/vercel');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
it('should parse github git url', () => {
|
||||
const parsedUrl = parseRepoUrl('git://github.com/vercel/vercel.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
it('should parse github ssh url', () => {
|
||||
const parsedUrl = parseRepoUrl('git@github.com:vercel/vercel.git');
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('github');
|
||||
expect(parsedUrl?.org).toEqual('vercel');
|
||||
expect(parsedUrl?.repo).toEqual('vercel');
|
||||
});
|
||||
|
||||
it('should parse gitlab https url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'https://gitlab.com/gitlab-examples/knative-kotlin-app.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('gitlab');
|
||||
expect(parsedUrl?.org).toEqual('gitlab-examples');
|
||||
expect(parsedUrl?.repo).toEqual('knative-kotlin-app');
|
||||
});
|
||||
it('should parse gitlab ssh url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'git@gitlab.com:gitlab-examples/knative-kotlin-app.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('gitlab');
|
||||
expect(parsedUrl?.org).toEqual('gitlab-examples');
|
||||
expect(parsedUrl?.repo).toEqual('knative-kotlin-app');
|
||||
});
|
||||
|
||||
it('should parse bitbucket https url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'https://bitbucket.org/atlassianlabs/maven-project-example.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('bitbucket');
|
||||
expect(parsedUrl?.org).toEqual('atlassianlabs');
|
||||
expect(parsedUrl?.repo).toEqual('maven-project-example');
|
||||
});
|
||||
it('should parse bitbucket ssh url', () => {
|
||||
const parsedUrl = parseRepoUrl(
|
||||
'git@bitbucket.org:atlassianlabs/maven-project-example.git'
|
||||
);
|
||||
expect(parsedUrl).toBeDefined();
|
||||
expect(parsedUrl?.provider).toEqual('bitbucket');
|
||||
expect(parsedUrl?.org).toEqual('atlassianlabs');
|
||||
expect(parsedUrl?.repo).toEqual('maven-project-example');
|
||||
});
|
||||
});
|
||||
|
||||
describe('createGitMeta', () => {
|
||||
it('is undefined when it does not receive a remote url', async () => {
|
||||
const directory = fixture('no-origin');
|
||||
@@ -228,17 +135,7 @@ describe('createGitMeta', () => {
|
||||
await fs.copy(directory, tmpDir);
|
||||
await fs.rename(join(tmpDir, 'git'), join(tmpDir, '.git'));
|
||||
|
||||
client.output.debugEnabled = true;
|
||||
const data = await createGitMeta(tmpDir, client.output);
|
||||
|
||||
const output = await readOutputStream(client, 2);
|
||||
|
||||
expect(output).toContain(
|
||||
`Failed to get last commit. The directory is likely not a Git repo, there are no latest commits, or it is corrupted.`
|
||||
);
|
||||
expect(output).toContain(
|
||||
`Failed to determine if Git repo has been modified:`
|
||||
);
|
||||
expect(data).toBeUndefined();
|
||||
} finally {
|
||||
await fs.remove(tmpDir);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "12.1.2",
|
||||
"version": "12.1.0",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -42,8 +42,8 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -63,6 +63,7 @@ export interface Deployment {
|
||||
| 'ERROR';
|
||||
createdAt: number;
|
||||
createdIn: string;
|
||||
inspectorUrl?: string;
|
||||
buildingAt?: number;
|
||||
creator?: {
|
||||
uid?: string;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "1.1.1",
|
||||
"version": "1.1.0",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/fs-detectors",
|
||||
"version": "2.0.1",
|
||||
"version": "1.0.1",
|
||||
"description": "Vercel filesystem detectors",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
@@ -20,8 +20,8 @@
|
||||
"test-unit": "yarn test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/frameworks": "1.1.1",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"@vercel/frameworks": "1.1.0",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"glob": "8.0.3",
|
||||
"js-yaml": "4.1.0",
|
||||
"minimatch": "3.0.4",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import minimatch from 'minimatch';
|
||||
import { valid as validSemver } from 'semver';
|
||||
import { parse as parsePath, extname } from 'path';
|
||||
import type { Route, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import type { Route, Source } from '@vercel/routing-utils';
|
||||
import frameworkList, { Framework } from '@vercel/frameworks';
|
||||
import type {
|
||||
PackageJson,
|
||||
@@ -155,8 +155,8 @@ export async function detectBuilders(
|
||||
|
||||
let fallbackEntrypoint: string | null = null;
|
||||
|
||||
const apiRoutes: RouteWithSrc[] = [];
|
||||
const dynamicRoutes: RouteWithSrc[] = [];
|
||||
const apiRoutes: Source[] = [];
|
||||
const dynamicRoutes: Source[] = [];
|
||||
|
||||
// API
|
||||
for (const fileName of sortedFiles) {
|
||||
@@ -692,7 +692,7 @@ function getApiRoute(
|
||||
options: Options,
|
||||
absolutePathCache: Map<string, string>
|
||||
): {
|
||||
apiRoute: RouteWithSrc | null;
|
||||
apiRoute: Source | null;
|
||||
isDynamic: boolean;
|
||||
routeError: ErrorResponse | null;
|
||||
} {
|
||||
@@ -886,7 +886,7 @@ function createRouteFromPath(
|
||||
filePath: string,
|
||||
featHandleMiss: boolean,
|
||||
cleanUrls: boolean
|
||||
): { route: RouteWithSrc; isDynamic: boolean } {
|
||||
): { route: Source; isDynamic: boolean } {
|
||||
const parts = filePath.split('/');
|
||||
|
||||
let counter = 1;
|
||||
@@ -932,7 +932,7 @@ function createRouteFromPath(
|
||||
? `^/${srcParts.slice(0, -1).join('/')}${srcParts.slice(-1)[0]}$`
|
||||
: `^/${srcParts.join('/')}$`;
|
||||
|
||||
let route: RouteWithSrc;
|
||||
let route: Source;
|
||||
|
||||
if (featHandleMiss) {
|
||||
const extensionless = ext ? filePath.slice(0, -ext.length) : filePath;
|
||||
@@ -959,8 +959,8 @@ interface LimitedRoutes {
|
||||
|
||||
function getRouteResult(
|
||||
pkg: PackageJson | undefined | null,
|
||||
apiRoutes: RouteWithSrc[],
|
||||
dynamicRoutes: RouteWithSrc[],
|
||||
apiRoutes: Source[],
|
||||
dynamicRoutes: Source[],
|
||||
outputDirectory: string,
|
||||
apiBuilders: Builder[],
|
||||
frontendBuilder: Builder | null,
|
||||
|
||||
@@ -13,6 +13,8 @@ interface Metadata {
|
||||
hasMiddleware: boolean;
|
||||
}
|
||||
|
||||
const enableFileSystemApiFrameworks = new Set(['solidstart']);
|
||||
|
||||
/**
|
||||
* If the Deployment can be built with the new File System API,
|
||||
* return the new Builder. Otherwise an "Exclusion Condition"
|
||||
@@ -59,7 +61,11 @@ export async function detectFileSystemAPI({
|
||||
hasMiddleware,
|
||||
};
|
||||
|
||||
const isEnabled = enableFlag || hasMiddleware || hasDotOutput;
|
||||
const isEnabled =
|
||||
enableFlag ||
|
||||
hasMiddleware ||
|
||||
hasDotOutput ||
|
||||
enableFileSystemApiFrameworks.has(framework);
|
||||
if (!isEnabled) {
|
||||
return { metadata, fsApiBuilder: null, reason: 'Flag not enabled.' };
|
||||
}
|
||||
|
||||
@@ -80,13 +80,11 @@ export async function detectFramework({
|
||||
fs,
|
||||
frameworkList,
|
||||
}: DetectFrameworkOptions): Promise<string | null> {
|
||||
const result = await Promise.all(
|
||||
frameworkList.map(async frameworkMatch => {
|
||||
if (await matches(fs, frameworkMatch)) {
|
||||
return frameworkMatch.slug;
|
||||
}
|
||||
return null;
|
||||
})
|
||||
);
|
||||
return result.find(res => res !== null) ?? null;
|
||||
for (const framework of frameworkList) {
|
||||
if (await matches(fs, framework)) {
|
||||
return framework.slug;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
import type {
|
||||
Route,
|
||||
RouteWithHandle as Handler,
|
||||
RouteWithSrc as Source,
|
||||
} from '@vercel/routing-utils';
|
||||
import type { Source, Route, Handler } from '@vercel/routing-utils';
|
||||
import {
|
||||
detectBuilders,
|
||||
detectOutputDirectory,
|
||||
|
||||
@@ -252,19 +252,6 @@ describe('DetectorFilesystem', () => {
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('nextjs');
|
||||
});
|
||||
|
||||
it('Detect frameworks based on ascending order in framework list', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'package.json': JSON.stringify({
|
||||
dependencies: {
|
||||
next: '9.0.0',
|
||||
gatsby: '4.18.0',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('nextjs');
|
||||
});
|
||||
|
||||
it('Detect Nuxt.js', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'package.json': JSON.stringify({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "2.0.7",
|
||||
"version": "2.0.5",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/hydrogen",
|
||||
"version": "0.0.4",
|
||||
"version": "0.0.2",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -22,7 +22,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "*",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "3.1.7",
|
||||
"version": "3.1.4",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -45,9 +45,9 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "5.0.3",
|
||||
"@vercel/build-utils": "5.0.1",
|
||||
"@vercel/nft": "0.20.1",
|
||||
"@vercel/routing-utils": "2.0.0",
|
||||
"@vercel/routing-utils": "1.13.5",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
"cheerio": "1.0.0-rc.10",
|
||||
|
||||
@@ -24,7 +24,7 @@ import {
|
||||
NodejsLambda,
|
||||
BuildResultV2Typical as BuildResult,
|
||||
} from '@vercel/build-utils';
|
||||
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import { Handler, Route, Source } from '@vercel/routing-utils';
|
||||
import {
|
||||
convertHeaders,
|
||||
convertRedirects,
|
||||
@@ -399,7 +399,6 @@ export const build: BuildV2 = async ({
|
||||
const env: typeof process.env = { ...spawnOpts.env };
|
||||
const memoryToConsume = Math.floor(os.totalmem() / 1024 ** 2) - 128;
|
||||
env.NODE_OPTIONS = `--max_old_space_size=${memoryToConsume}`;
|
||||
env.NEXT_EDGE_RUNTIME_PROVIDER = 'vercel';
|
||||
|
||||
if (target) {
|
||||
// Since version v10.0.8-canary.15 of Next.js the NEXT_PRIVATE_TARGET env
|
||||
@@ -896,7 +895,7 @@ export const build: BuildV2 = async ({
|
||||
...(output[path.join('./', entryDirectory, '404')] ||
|
||||
output[path.join('./', entryDirectory, '404/index')]
|
||||
? [
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
{ handle: 'error' } as Handler,
|
||||
|
||||
{
|
||||
status: 404,
|
||||
@@ -928,7 +927,7 @@ export const build: BuildV2 = async ({
|
||||
let trailingSlash = false;
|
||||
|
||||
redirects = redirects.filter(_redir => {
|
||||
const redir = _redir as RouteWithSrc;
|
||||
const redir = _redir as Source;
|
||||
// detect the trailing slash redirect and make sure it's
|
||||
// kept above the wildcard mapping to prevent erroneous redirects
|
||||
// since non-continue routes come after continue the $wildcard
|
||||
@@ -1146,7 +1145,7 @@ export const build: BuildV2 = async ({
|
||||
continue;
|
||||
}
|
||||
|
||||
const route: RouteWithSrc & { dest: string } = {
|
||||
const route: Source & { dest: string } = {
|
||||
src: (
|
||||
dataRoute.namedDataRouteRegex || dataRoute.dataRouteRegex
|
||||
).replace(/^\^/, `^${appMountPrefixNoTrailingSlash}`),
|
||||
@@ -1175,7 +1174,7 @@ export const build: BuildV2 = async ({
|
||||
if (isOmittedRoute && isServerMode) {
|
||||
// only match this route when in preview mode so
|
||||
// preview works for non-prerender fallback: false pages
|
||||
(route as RouteWithSrc).has = [
|
||||
(route as Source).has = [
|
||||
{
|
||||
type: 'cookie',
|
||||
key: '__prerender_bypass',
|
||||
@@ -2454,7 +2453,7 @@ export const build: BuildV2 = async ({
|
||||
? []
|
||||
: [
|
||||
// Custom Next.js 404 page
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
{ handle: 'error' } as Handler,
|
||||
|
||||
...(i18n && (static404Page || hasIsr404Page)
|
||||
? [
|
||||
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
Files,
|
||||
BuildResultV2Typical as BuildResult,
|
||||
} from '@vercel/build-utils';
|
||||
import { Route, RouteWithHandle, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import { Handler, Route, Source } from '@vercel/routing-utils';
|
||||
import { MAX_AGE_ONE_YEAR } from '.';
|
||||
import {
|
||||
NextRequiredServerFilesManifest,
|
||||
@@ -56,7 +56,6 @@ import prettyBytes from 'pretty-bytes';
|
||||
const CORRECT_NOT_FOUND_ROUTES_VERSION = 'v12.0.1';
|
||||
const CORRECT_MIDDLEWARE_ORDER_VERSION = 'v12.1.7-canary.29';
|
||||
const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
|
||||
const EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION = 'v12.2.0';
|
||||
|
||||
export async function serverBuild({
|
||||
dynamicPages,
|
||||
@@ -134,10 +133,6 @@ export async function serverBuild({
|
||||
const lambdaPageKeys = Object.keys(lambdaPages);
|
||||
const internalPages = ['_app.js', '_error.js', '_document.js'];
|
||||
const pageBuildTraces = await glob('**/*.js.nft.json', pagesDir);
|
||||
const isEmptyAllowQueryForPrendered = semver.gte(
|
||||
nextVersion,
|
||||
EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION
|
||||
);
|
||||
const isCorrectNotFoundRoutes = semver.gte(
|
||||
nextVersion,
|
||||
CORRECT_NOT_FOUND_ROUTES_VERSION
|
||||
@@ -761,7 +756,6 @@ export async function serverBuild({
|
||||
static404Page,
|
||||
hasPages404: routesManifest.pages404,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
});
|
||||
|
||||
Object.keys(prerenderManifest.staticRoutes).forEach(route =>
|
||||
@@ -828,7 +822,7 @@ export async function serverBuild({
|
||||
const { staticFiles, publicDirectoryFiles, staticDirectoryFiles } =
|
||||
await getStaticFiles(entryPath, entryDirectory, outputDirectory);
|
||||
|
||||
const notFoundPreviewRoutes: RouteWithSrc[] = [];
|
||||
const notFoundPreviewRoutes: Source[] = [];
|
||||
|
||||
if (prerenderManifest.notFoundRoutes?.length > 0 && canUsePreviewMode) {
|
||||
// we combine routes into one src here to reduce the number of needed
|
||||
@@ -1384,7 +1378,7 @@ export async function serverBuild({
|
||||
},
|
||||
|
||||
// error handling
|
||||
{ handle: 'error' } as RouteWithHandle,
|
||||
{ handle: 'error' } as Handler,
|
||||
|
||||
// Custom Next.js 404 page
|
||||
...(i18n && (static404Page || hasIsr404Page || lambdaPages['404.js'])
|
||||
|
||||
@@ -16,7 +16,7 @@ import {
|
||||
EdgeFunction,
|
||||
} from '@vercel/build-utils';
|
||||
import { NodeFileTraceReasons } from '@vercel/nft';
|
||||
import { Header, Rewrite, Route, RouteWithSrc } from '@vercel/routing-utils';
|
||||
import { Header, Rewrite, Route, Source } from '@vercel/routing-utils';
|
||||
import { Sema } from 'async-sema';
|
||||
import crc32 from 'buffer-crc32';
|
||||
import fs, { lstat, stat } from 'fs-extra';
|
||||
@@ -273,8 +273,8 @@ export async function getDynamicRoutes(
|
||||
canUsePreviewMode?: boolean,
|
||||
bypassToken?: string,
|
||||
isServerMode?: boolean,
|
||||
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>
|
||||
): Promise<RouteWithSrc[]> {
|
||||
dynamicMiddlewareRouteMap?: Map<string, Source>
|
||||
): Promise<Source[]> {
|
||||
if (routesManifest) {
|
||||
switch (routesManifest.version) {
|
||||
case 1:
|
||||
@@ -307,7 +307,7 @@ export async function getDynamicRoutes(
|
||||
}
|
||||
|
||||
const { page, namedRegex, regex, routeKeys } = params;
|
||||
const route: RouteWithSrc = {
|
||||
const route: Source = {
|
||||
src: namedRegex || regex,
|
||||
dest: `${!isDev ? path.join('/', entryDirectory, page) : page}${
|
||||
routeKeys
|
||||
@@ -400,7 +400,7 @@ export async function getDynamicRoutes(
|
||||
matcher: getRouteRegex && getRouteRegex(pageName).re,
|
||||
}));
|
||||
|
||||
const routes: RouteWithSrc[] = [];
|
||||
const routes: Source[] = [];
|
||||
pageMatchers.forEach(pageMatcher => {
|
||||
// in `vercel dev` we don't need to prefix the destination
|
||||
const dest = !isDev
|
||||
@@ -419,7 +419,7 @@ export async function getDynamicRoutes(
|
||||
}
|
||||
|
||||
export function localizeDynamicRoutes(
|
||||
dynamicRoutes: RouteWithSrc[],
|
||||
dynamicRoutes: Source[],
|
||||
dynamicPrefix: string,
|
||||
entryDirectory: string,
|
||||
staticPages: Files,
|
||||
@@ -427,8 +427,8 @@ export function localizeDynamicRoutes(
|
||||
routesManifest?: RoutesManifest,
|
||||
isServerMode?: boolean,
|
||||
isCorrectLocaleAPIRoutes?: boolean
|
||||
): RouteWithSrc[] {
|
||||
return dynamicRoutes.map((route: RouteWithSrc) => {
|
||||
): Source[] {
|
||||
return dynamicRoutes.map((route: Source) => {
|
||||
// i18n is already handled for middleware
|
||||
if (route.middleware !== undefined || route.middlewarePath !== undefined)
|
||||
return route;
|
||||
@@ -1665,7 +1665,6 @@ type OnPrerenderRouteArgs = {
|
||||
pageLambdaMap: { [key: string]: string };
|
||||
routesManifest?: RoutesManifest;
|
||||
isCorrectNotFoundRoutes?: boolean;
|
||||
isEmptyAllowQueryForPrendered?: boolean;
|
||||
};
|
||||
let prerenderGroup = 1;
|
||||
|
||||
@@ -1699,7 +1698,6 @@ export const onPrerenderRoute =
|
||||
pageLambdaMap,
|
||||
routesManifest,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
} = prerenderRouteArgs;
|
||||
|
||||
if (isBlocking && isFallback) {
|
||||
@@ -1903,6 +1901,7 @@ export const onPrerenderRoute =
|
||||
// a given path. All other query keys will be striped. We can automatically
|
||||
// detect this for prerender (ISR) pages by reading the routes manifest file.
|
||||
const pageKey = srcRoute || routeKey;
|
||||
const isDynamic = isDynamicRoute(pageKey);
|
||||
const route = routesManifest?.dynamicRoutes.find(
|
||||
(r): r is RoutesManifestRoute =>
|
||||
r.page === pageKey && !('isMiddleware' in r)
|
||||
@@ -1912,33 +1911,14 @@ export const onPrerenderRoute =
|
||||
// we have sufficient information to set it
|
||||
let allowQuery: string[] | undefined;
|
||||
|
||||
if (isEmptyAllowQueryForPrendered) {
|
||||
const isDynamic = isDynamicRoute(routeKey);
|
||||
|
||||
if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
// prerendered paths also do not pass allowQuery as they match
|
||||
// during handle: 'filesystem' so should not cache differently
|
||||
// by query values
|
||||
allowQuery = [];
|
||||
} else if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
}
|
||||
} else {
|
||||
const isDynamic = isDynamicRoute(pageKey);
|
||||
|
||||
if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
} else if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
allowQuery = [];
|
||||
}
|
||||
if (routeKeys) {
|
||||
// if we have routeKeys in the routes-manifest we use those
|
||||
// for allowQuery for dynamic routes
|
||||
allowQuery = Object.values(routeKeys);
|
||||
} else if (!isDynamic) {
|
||||
// for non-dynamic routes we use an empty array since
|
||||
// no query values bust the cache for non-dynamic prerenders
|
||||
allowQuery = [];
|
||||
}
|
||||
|
||||
prerenders[outputPathPage] = new Prerender({
|
||||
@@ -2168,7 +2148,6 @@ interface EdgeFunctionInfo {
|
||||
page: string;
|
||||
regexp: string;
|
||||
wasm?: { filePath: string; name: string }[];
|
||||
assets?: { filePath: string; name: string }[];
|
||||
}
|
||||
|
||||
export async function getMiddlewareBundle({
|
||||
@@ -2255,23 +2234,6 @@ export async function getMiddlewareBundle({
|
||||
{}
|
||||
);
|
||||
|
||||
const assetFiles = (edgeFunction.assets ?? []).reduce(
|
||||
(acc: Files, { filePath, name }) => {
|
||||
const fullFilePath = path.join(
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
filePath
|
||||
);
|
||||
acc[`assets/${name}`] = new FileFsRef({
|
||||
mode: 0o644,
|
||||
contentType: 'application/octet-stream',
|
||||
fsPath: fullFilePath,
|
||||
});
|
||||
return acc;
|
||||
},
|
||||
{}
|
||||
);
|
||||
|
||||
return new EdgeFunction({
|
||||
deploymentTarget: 'v8-worker',
|
||||
name: edgeFunction.name,
|
||||
@@ -2289,16 +2251,9 @@ export async function getMiddlewareBundle({
|
||||
}),
|
||||
}),
|
||||
...wasmFiles,
|
||||
...assetFiles,
|
||||
},
|
||||
entrypoint: 'index.js',
|
||||
envVarsInUse: edgeFunction.env,
|
||||
assets: (edgeFunction.assets ?? []).map(({ name }) => {
|
||||
return {
|
||||
name,
|
||||
path: `assets/${name}`,
|
||||
};
|
||||
}),
|
||||
});
|
||||
})(),
|
||||
routeSrc: getRouteSrc(edgeFunction, routesManifest),
|
||||
@@ -2312,7 +2267,7 @@ export async function getMiddlewareBundle({
|
||||
|
||||
const source: {
|
||||
staticRoutes: Route[];
|
||||
dynamicRouteMap: Map<string, RouteWithSrc>;
|
||||
dynamicRouteMap: Map<string, Source>;
|
||||
edgeFunctions: Record<string, EdgeFunction>;
|
||||
} = {
|
||||
staticRoutes: [],
|
||||
|
||||
@@ -1,144 +1,8 @@
|
||||
/* eslint-env jest */
|
||||
const path = require('path');
|
||||
const cheerio = require('cheerio');
|
||||
const { deployAndTest, check, waitFor } = require('../../utils');
|
||||
const fetch = require('../../../../../test/lib/deployment/fetch-retry');
|
||||
|
||||
async function checkForChange(url, initialValue, getNewValue) {
|
||||
return check(async () => {
|
||||
const res = await fetch(url);
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(`Invalid status code ${res.status}`);
|
||||
}
|
||||
const newValue = await getNewValue(res);
|
||||
|
||||
return initialValue !== newValue
|
||||
? 'success'
|
||||
: JSON.stringify({ initialValue, newValue });
|
||||
}, 'success');
|
||||
}
|
||||
|
||||
const ctx = {};
|
||||
const { deployAndTest } = require('../../utils');
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should deploy and pass probe checks', async () => {
|
||||
const info = await deployAndTest(__dirname);
|
||||
Object.assign(ctx, info);
|
||||
});
|
||||
|
||||
it.each([
|
||||
{
|
||||
title: 'should update content for prerendered path correctly',
|
||||
pathsToCheck: [
|
||||
{ urlPath: '/fallback-blocking/first' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?slug=first' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?slug=random' },
|
||||
{ urlPath: '/fallback-blocking/first', query: '?another=value' },
|
||||
],
|
||||
},
|
||||
{
|
||||
title: 'should update content for non-prerendered path correctly',
|
||||
pathsToCheck: [
|
||||
{ urlPath: '/fallback-blocking/on-demand-2' },
|
||||
{
|
||||
urlPath: '/fallback-blocking/on-demand-2',
|
||||
query: '?slug=on-demand-2',
|
||||
},
|
||||
{ urlPath: '/fallback-blocking/on-demand-2', query: '?slug=random' },
|
||||
{ urlPath: '/fallback-blocking/on-demand-2', query: '?another=value' },
|
||||
],
|
||||
},
|
||||
])('$title', async ({ pathsToCheck }) => {
|
||||
let initialRandom;
|
||||
let initialRandomData;
|
||||
let preRevalidateRandom;
|
||||
let preRevalidateRandomData;
|
||||
|
||||
const checkPaths = async pathsToCheck => {
|
||||
for (const { urlPath, query } of pathsToCheck) {
|
||||
console.log('checking', {
|
||||
urlPath,
|
||||
query,
|
||||
initialRandom,
|
||||
preRevalidateRandom,
|
||||
});
|
||||
|
||||
if (preRevalidateRandom) {
|
||||
// wait for change as cache may take a little to propagate
|
||||
const initialUrl = `${ctx.deploymentUrl}${urlPath}${query || ''}`;
|
||||
await checkForChange(initialUrl, preRevalidateRandom, async () => {
|
||||
const res = await fetch(initialUrl);
|
||||
const $ = cheerio.load(await res.text());
|
||||
return JSON.parse($('#props').text()).random;
|
||||
});
|
||||
}
|
||||
|
||||
const res = await fetch(`${ctx.deploymentUrl}${urlPath}${query || ''}`);
|
||||
expect(res.status).toBe(200);
|
||||
|
||||
const $ = await cheerio.load(await res.text());
|
||||
const props = JSON.parse($('#props').text());
|
||||
|
||||
if (initialRandom) {
|
||||
// for fallback paths the initial value is generated
|
||||
// in the foreground and then a revalidation is kicked off
|
||||
// in the background so the initial value will be replaced
|
||||
if (initialRandom !== props.random && urlPath.includes('on-demand')) {
|
||||
initialRandom = props.random;
|
||||
} else {
|
||||
expect(initialRandom).toBe(props.random);
|
||||
}
|
||||
} else {
|
||||
initialRandom = props.random;
|
||||
}
|
||||
expect(isNaN(initialRandom)).toBe(false);
|
||||
|
||||
const dataRes = await fetch(
|
||||
`${ctx.deploymentUrl}/_next/data/testing-build-id${urlPath}.json${
|
||||
query || ''
|
||||
}`
|
||||
);
|
||||
expect(dataRes.status).toBe(200);
|
||||
|
||||
const { pageProps: dataProps } = await dataRes.json();
|
||||
|
||||
if (initialRandomData) {
|
||||
// for fallback paths the initial value is generated
|
||||
// in the foreground and then a revalidation is kicked off
|
||||
// in the background so the initial value will be replaced
|
||||
if (
|
||||
initialRandomData !== dataProps.random &&
|
||||
urlPath.includes('on-demand-2')
|
||||
) {
|
||||
initialRandomData = dataProps.random;
|
||||
} else {
|
||||
expect(initialRandomData).toBe(dataProps.random);
|
||||
}
|
||||
} else {
|
||||
initialRandomData = dataProps.random;
|
||||
}
|
||||
expect(isNaN(initialRandomData)).toBe(false);
|
||||
}
|
||||
};
|
||||
|
||||
await checkPaths(pathsToCheck);
|
||||
|
||||
preRevalidateRandom = initialRandom;
|
||||
preRevalidateRandomData = initialRandomData;
|
||||
|
||||
initialRandom = undefined;
|
||||
initialRandomData = undefined;
|
||||
|
||||
const revalidateRes = await fetch(
|
||||
`${ctx.deploymentUrl}/api/revalidate?urlPath=${pathsToCheck[0].urlPath}`
|
||||
);
|
||||
expect(revalidateRes.status).toBe(200);
|
||||
expect((await revalidateRes.json()).revalidated).toBe(true);
|
||||
|
||||
await checkPaths(pathsToCheck);
|
||||
|
||||
expect(preRevalidateRandom).toBeDefined();
|
||||
expect(preRevalidateRandomData).toBeDefined();
|
||||
await deployAndTest(__dirname);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
export default async function handler(req, res) {
|
||||
try {
|
||||
console.log('revalidating', req.query.urlPath);
|
||||
await res.revalidate(req.query.urlPath);
|
||||
return res.json({ revalidated: true });
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return res.json({ revalidated: false });
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,7 @@ export const getStaticProps = ({ params }) => {
|
||||
|
||||
export const getStaticPaths = () => {
|
||||
return {
|
||||
paths: ['/fallback-blocking/first', '/fallback-blocking/on-demand-1'],
|
||||
paths: ['/fallback-blocking/first'],
|
||||
fallback: 'blocking',
|
||||
};
|
||||
};
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user