mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
9 Commits
@vercel/py
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b37ac5f798 | ||
|
|
01ad4c4c8e | ||
|
|
577fd3e979 | ||
|
|
d649a3c931 | ||
|
|
a036b03398 | ||
|
|
1a6a030df5 | ||
|
|
fc8b68eda2 | ||
|
|
9ecc89a3c7 | ||
|
|
2a4e066163 |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "5.6.0",
|
||||
"version": "5.7.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -425,6 +425,9 @@ export interface BuildResultV2Typical {
|
||||
domain: string;
|
||||
value: string;
|
||||
}>;
|
||||
framework?: {
|
||||
version: string;
|
||||
};
|
||||
}
|
||||
|
||||
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "28.6.0",
|
||||
"version": "28.7.1",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -41,17 +41,16 @@
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/go": "2.2.18",
|
||||
"@vercel/hydrogen": "0.0.32",
|
||||
"@vercel/next": "3.3.0",
|
||||
"@vercel/node": "2.7.0",
|
||||
"@vercel/python": "3.1.28",
|
||||
"@vercel/redwood": "1.0.38",
|
||||
"@vercel/remix": "1.1.0",
|
||||
"@vercel/ruby": "1.3.44",
|
||||
"@vercel/static-build": "1.0.40",
|
||||
"update-notifier": "5.1.0"
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/go": "2.2.19",
|
||||
"@vercel/hydrogen": "0.0.33",
|
||||
"@vercel/next": "3.3.2",
|
||||
"@vercel/node": "2.7.1",
|
||||
"@vercel/python": "3.1.29",
|
||||
"@vercel/redwood": "1.0.39",
|
||||
"@vercel/remix": "1.1.1",
|
||||
"@vercel/ruby": "1.3.45",
|
||||
"@vercel/static-build": "1.0.42"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@alex_neo/jest-expect-message": "1.0.5",
|
||||
@@ -95,10 +94,10 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "12.2.20",
|
||||
"@vercel/client": "12.2.21",
|
||||
"@vercel/error-utils": "1.0.3",
|
||||
"@vercel/frameworks": "1.1.13",
|
||||
"@vercel/fs-detectors": "3.5.3",
|
||||
"@vercel/frameworks": "1.1.14",
|
||||
"@vercel/fs-detectors": "3.5.4",
|
||||
"@vercel/fun": "1.0.4",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import cpy from 'cpy';
|
||||
import execa from 'execa';
|
||||
import { join } from 'path';
|
||||
import { remove, writeFile } from 'fs-extra';
|
||||
import { remove, readJSON, writeFile } from 'fs-extra';
|
||||
|
||||
const dirRoot = join(__dirname, '..');
|
||||
const distRoot = join(dirRoot, 'dist');
|
||||
@@ -43,15 +43,15 @@ async function main() {
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
const pkg = await readJSON(join(dirRoot, 'package.json'));
|
||||
const dependencies = Object.keys(pkg?.dependencies ?? {});
|
||||
// Do the initial `ncc` build
|
||||
console.log();
|
||||
const args = [
|
||||
'ncc',
|
||||
'build',
|
||||
'--external',
|
||||
'update-notifier',
|
||||
'src/index.ts',
|
||||
];
|
||||
console.log('Dependencies:', dependencies);
|
||||
const externs = [];
|
||||
for (const dep of dependencies) {
|
||||
externs.push('--external', dep);
|
||||
}
|
||||
const args = ['ncc', 'build', 'src/index.ts', ...externs];
|
||||
await execa('yarn', args, { stdio: 'inherit', cwd: dirRoot });
|
||||
|
||||
// `ncc` has some issues with `@vercel/fun`'s runtime files:
|
||||
@@ -78,6 +78,10 @@ async function main() {
|
||||
// Band-aid to bundle stuff that `ncc` neglects to bundle
|
||||
await cpy(join(dirRoot, 'src/util/projects/VERCEL_DIR_README.txt'), distRoot);
|
||||
await cpy(join(dirRoot, 'src/util/dev/builder-worker.js'), distRoot);
|
||||
await cpy(
|
||||
join(dirRoot, 'src/util/get-latest-version/get-latest-worker.js'),
|
||||
distRoot
|
||||
);
|
||||
|
||||
console.log('Finished building Vercel CLI');
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ export const help = () => `
|
||||
login [email] Logs into your account or creates a new one
|
||||
logout Logs out of your account
|
||||
pull [path] Pull your Project Settings from the cloud
|
||||
rollback [url|id] Quickly revert back to a previous deployment [beta]
|
||||
switch [scope] Switches between teams and your personal account
|
||||
|
||||
${chalk.dim('Advanced')}
|
||||
|
||||
@@ -33,6 +33,7 @@ export default async function dev(
|
||||
if (link.status === 'not_linked' && !process.env.__VERCEL_SKIP_DEV_CMD) {
|
||||
link = await setupAndLink(client, cwd, {
|
||||
autoConfirm: opts['--yes'],
|
||||
link,
|
||||
successEmoji: 'link',
|
||||
setupMsg: 'Set up and develop',
|
||||
});
|
||||
|
||||
@@ -29,6 +29,7 @@ export default new Map([
|
||||
['pull', 'pull'],
|
||||
['remove', 'remove'],
|
||||
['rm', 'remove'],
|
||||
['rollback', 'rollback'],
|
||||
['secret', 'secrets'],
|
||||
['secrets', 'secrets'],
|
||||
['switch', 'teams'],
|
||||
|
||||
@@ -147,6 +147,7 @@ export default async function main(client: Client) {
|
||||
if (status === 'not_linked' && !app) {
|
||||
const linkedProject = await ensureLink('list', client, path, {
|
||||
autoConfirm,
|
||||
link,
|
||||
});
|
||||
if (typeof linkedProject === 'number') {
|
||||
return linkedProject;
|
||||
|
||||
@@ -18,7 +18,7 @@ import getDeploymentsByProjectId, {
|
||||
import { getPkgName, getCommandName } from '../util/pkg-name';
|
||||
import getArgs from '../util/get-args';
|
||||
import handleError from '../util/handle-error';
|
||||
import Client from '../util/client';
|
||||
import type Client from '../util/client';
|
||||
import { Output } from '../util/output';
|
||||
import { Alias, Project } from '../types';
|
||||
import { NowError } from '../util/now-error';
|
||||
|
||||
122
packages/cli/src/commands/rollback.ts
Normal file
122
packages/cli/src/commands/rollback.ts
Normal file
@@ -0,0 +1,122 @@
|
||||
import chalk from 'chalk';
|
||||
import type Client from '../util/client';
|
||||
import { ensureLink } from '../util/link/ensure-link';
|
||||
import getArgs from '../util/get-args';
|
||||
import { getPkgName } from '../util/pkg-name';
|
||||
import handleError from '../util/handle-error';
|
||||
import logo from '../util/output/logo';
|
||||
import ms from 'ms';
|
||||
import requestRollback from '../util/rollback/request-rollback';
|
||||
import rollbackStatus from '../util/rollback/status';
|
||||
import validatePaths from '../util/validate-paths';
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
${chalk.bold(
|
||||
`${logo} ${getPkgName()} rollback`
|
||||
)} [deploymentId|deploymentName]
|
||||
|
||||
Quickly revert back to a previous deployment.
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
-A ${chalk.bold.underline('FILE')}, --local-config=${chalk.bold.underline(
|
||||
'FILE'
|
||||
)} Path to the local ${'`vercel.json`'} file
|
||||
-Q ${chalk.bold.underline('DIR')}, --global-config=${chalk.bold.underline(
|
||||
'DIR'
|
||||
)} Path to the global ${'`.vercel`'} directory
|
||||
-d, --debug Debug mode [off]
|
||||
-t ${chalk.bold.underline('TOKEN')}, --token=${chalk.bold.underline(
|
||||
'TOKEN'
|
||||
)} Login token
|
||||
--timeout=${chalk.bold.underline(
|
||||
'TIME'
|
||||
)} Time to wait for rollback completion [3m]
|
||||
-y, --yes Skip questions when setting up new project using default scope and settings
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} Show the status of any current pending rollbacks
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} rollback`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} rollback status`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} rollback status --timeout 30s`)}
|
||||
|
||||
${chalk.gray('–')} Rollback a deployment using id or url
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} rollback <deployment id/url>`)}
|
||||
`);
|
||||
};
|
||||
|
||||
/**
|
||||
* `vc rollback` command
|
||||
* @param {Client} client
|
||||
* @returns {Promise<number>} Resolves an exit code; 0 on success
|
||||
*/
|
||||
export default async (client: Client): Promise<number> => {
|
||||
let argv;
|
||||
try {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--debug': Boolean,
|
||||
'-d': '--debug',
|
||||
'--timeout': String,
|
||||
'--yes': Boolean,
|
||||
'-y': '--yes',
|
||||
});
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (argv['--help'] || argv._[0] === 'help') {
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
|
||||
// ensure the current directory is good
|
||||
const cwd = argv['--cwd'] || process.cwd();
|
||||
const pathValidation = await validatePaths(client, [cwd]);
|
||||
if (!pathValidation.valid) {
|
||||
return pathValidation.exitCode;
|
||||
}
|
||||
|
||||
// ensure the current directory is a linked project
|
||||
const linkedProject = await ensureLink(
|
||||
'rollback',
|
||||
client,
|
||||
pathValidation.path,
|
||||
{
|
||||
autoConfirm: Boolean(argv['--yes']),
|
||||
}
|
||||
);
|
||||
if (typeof linkedProject === 'number') {
|
||||
return linkedProject;
|
||||
}
|
||||
|
||||
// validate the timeout
|
||||
let timeout = argv['--timeout'];
|
||||
if (timeout && ms(timeout) === undefined) {
|
||||
client.output.error(`Invalid timeout "${timeout}"`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { project } = linkedProject;
|
||||
const actionOrDeployId = argv._[1] || 'status';
|
||||
|
||||
if (actionOrDeployId === 'status') {
|
||||
return await rollbackStatus({
|
||||
client,
|
||||
project,
|
||||
timeout,
|
||||
});
|
||||
}
|
||||
|
||||
return await requestRollback({
|
||||
client,
|
||||
deployId: actionOrDeployId,
|
||||
project,
|
||||
timeout,
|
||||
});
|
||||
};
|
||||
@@ -18,7 +18,7 @@ import sourceMap from '@zeit/source-map-support';
|
||||
import { mkdirp } from 'fs-extra';
|
||||
import chalk from 'chalk';
|
||||
import epipebomb from 'epipebomb';
|
||||
import updateNotifier from 'update-notifier';
|
||||
import getLatestVersion from './util/get-latest-version';
|
||||
import { URL } from 'url';
|
||||
import * as Sentry from '@sentry/node';
|
||||
import hp from './util/humanize-path';
|
||||
@@ -55,13 +55,6 @@ import { VercelConfig } from '@vercel/client';
|
||||
|
||||
const isCanary = pkg.version.includes('canary');
|
||||
|
||||
// Checks for available update and returns an instance
|
||||
const notifier = updateNotifier({
|
||||
pkg,
|
||||
distTag: isCanary ? 'canary' : 'latest',
|
||||
updateCheckInterval: 1000 * 60 * 60 * 24 * 7, // 1 week
|
||||
});
|
||||
|
||||
const VERCEL_DIR = getGlobalPathConfig();
|
||||
const VERCEL_CONFIG_PATH = configFiles.getConfigFilePath();
|
||||
const VERCEL_AUTH_CONFIG_PATH = configFiles.getAuthConfigFilePath();
|
||||
@@ -149,8 +142,15 @@ const main = async () => {
|
||||
}
|
||||
|
||||
// Print update information, if available
|
||||
if (notifier.update && notifier.update.latest !== pkg.version && isTTY) {
|
||||
const { latest } = notifier.update;
|
||||
if (isTTY && !process.env.NO_UPDATE_NOTIFIER) {
|
||||
// Check if an update is available. If so, `latest` will contain a string
|
||||
// of the latest version, otherwise `undefined`.
|
||||
const latest = getLatestVersion({
|
||||
distTag: isCanary ? 'canary' : 'latest',
|
||||
output,
|
||||
pkg,
|
||||
});
|
||||
if (latest) {
|
||||
console.log(
|
||||
info(
|
||||
`${chalk.black.bgCyan('UPDATE AVAILABLE')} ` +
|
||||
@@ -161,11 +161,12 @@ const main = async () => {
|
||||
);
|
||||
|
||||
console.log(
|
||||
info(
|
||||
`${info(
|
||||
`Changelog: https://github.com/vercel/vercel/releases/tag/vercel@${latest}`
|
||||
)
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// The second argument to the command can be:
|
||||
//
|
||||
@@ -174,7 +175,7 @@ const main = async () => {
|
||||
const targetOrSubcommand = argv._[2];
|
||||
|
||||
// Currently no beta commands - add here as needed
|
||||
const betaCommands: string[] = [];
|
||||
const betaCommands: string[] = ['rollback'];
|
||||
if (betaCommands.includes(targetOrSubcommand)) {
|
||||
console.log(
|
||||
`${chalk.grey(
|
||||
@@ -555,6 +556,9 @@ const main = async () => {
|
||||
case 'remove':
|
||||
func = require('./commands/remove').default;
|
||||
break;
|
||||
case 'rollback':
|
||||
func = require('./commands/rollback').default;
|
||||
break;
|
||||
case 'secrets':
|
||||
func = require('./commands/secrets').default;
|
||||
break;
|
||||
|
||||
@@ -287,6 +287,7 @@ export interface Project extends ProjectSettings {
|
||||
link?: ProjectLinkData;
|
||||
alias?: ProjectAliasTarget[];
|
||||
latestDeployments?: Partial<Deployment>[];
|
||||
lastRollbackTarget: RollbackTarget | null;
|
||||
}
|
||||
|
||||
export interface Org {
|
||||
@@ -321,6 +322,20 @@ export type ProjectLinkResult =
|
||||
| 'MISSING_PROJECT_SETTINGS';
|
||||
};
|
||||
|
||||
export type RollbackJobStatus =
|
||||
| 'pending'
|
||||
| 'in-progress'
|
||||
| 'succeeded'
|
||||
| 'failed'
|
||||
| 'skipped';
|
||||
|
||||
export interface RollbackTarget {
|
||||
fromDeploymentId: string;
|
||||
jobStatus: RollbackJobStatus;
|
||||
requestedAt: number;
|
||||
toDeploymentId: string;
|
||||
}
|
||||
|
||||
export interface Token {
|
||||
id: string;
|
||||
name: string;
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import Client from '../client';
|
||||
import type Client from '../client';
|
||||
import toHost from '../to-host';
|
||||
import { Deployment } from '../../types';
|
||||
import {
|
||||
|
||||
@@ -571,7 +571,7 @@ export class DeploymentNotFound extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_NOT_FOUND',
|
||||
meta: { id, context },
|
||||
message: `Can't find the deployment ${id} under the context ${context}`,
|
||||
message: `Can't find the deployment "${id}" under the context "${context}"`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
186
packages/cli/src/util/get-latest-version/get-latest-worker.js
Normal file
186
packages/cli/src/util/get-latest-version/get-latest-worker.js
Normal file
@@ -0,0 +1,186 @@
|
||||
/**
|
||||
* This file is spawned in the background and checks npm for the latest version
|
||||
* of the CLI, then writes the version to the cache file.
|
||||
*
|
||||
* NOTE: Since this file runs asynchronously in the background, it's possible
|
||||
* for multiple instances of this file to be running at the same time leading
|
||||
* to a race condition where the most recent instance will overwrite the
|
||||
* previous cache file resetting the `notified` flag and cause the update
|
||||
* notification to appear for multiple consequetive commands. Not the end of
|
||||
* the world, but something to be aware of.
|
||||
*/
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { Agent: HttpsAgent } = require('https');
|
||||
const { bold, gray, red } = require('chalk');
|
||||
const { format, inspect } = require('util');
|
||||
|
||||
/**
|
||||
* An simple output helper which accumulates error and debug log messages in
|
||||
* memory for potential persistance to disk while immediately outputting errors
|
||||
* and debug messages, when the `--debug` flag is set, to `stderr`.
|
||||
*/
|
||||
class WorkerOutput {
|
||||
debugLog = [];
|
||||
logFile = null;
|
||||
|
||||
constructor({ debug = true }) {
|
||||
this.debugOutputEnabled = debug;
|
||||
}
|
||||
|
||||
debug(...args) {
|
||||
this.print('debug', args);
|
||||
}
|
||||
|
||||
error(...args) {
|
||||
this.print('error', args);
|
||||
}
|
||||
|
||||
print(type, args) {
|
||||
const str = format(
|
||||
...args.map(s => (typeof s === 'string' ? s : inspect(s)))
|
||||
);
|
||||
this.debugLog.push(`[${new Date().toISOString()}] [${type}] ${str}`);
|
||||
if (type === 'debug' && this.debugOutputEnabled) {
|
||||
console.error(
|
||||
`${gray('>')} ${bold('[debug]')} ${gray(
|
||||
`[${new Date().toISOString()}]`
|
||||
)} ${str}`
|
||||
);
|
||||
} else if (type === 'error') {
|
||||
console.error(`${red(`Error:`)} ${str}`);
|
||||
}
|
||||
}
|
||||
|
||||
setLogFile(file) {
|
||||
// wire up the exit handler the first time the log file is set
|
||||
if (this.logFile === null) {
|
||||
process.on('exit', () => {
|
||||
if (this.debugLog.length) {
|
||||
fs.outputFileSync(this.logFile, this.debugLog.join('\n'));
|
||||
}
|
||||
});
|
||||
}
|
||||
this.logFile = file;
|
||||
}
|
||||
}
|
||||
|
||||
const output = new WorkerOutput({
|
||||
// enable the debug logging if the `--debug` is set or if this worker script
|
||||
// was directly executed
|
||||
debug: process.argv.includes('--debug') || !process.connected,
|
||||
});
|
||||
|
||||
process.on('unhandledRejection', err => {
|
||||
output.error('Exiting worker due to unhandled rejection:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
// this timer will prevent this worker process from running longer than 10s
|
||||
const timer = setTimeout(() => {
|
||||
output.error('Worker timed out after 10 seconds');
|
||||
process.exit(1);
|
||||
}, 10000);
|
||||
|
||||
// wait for the parent to give us the work payload
|
||||
process.once('message', async msg => {
|
||||
output.debug('Received message from parent:', msg);
|
||||
|
||||
output.debug('Disconnecting from parent');
|
||||
process.disconnect();
|
||||
|
||||
const { cacheFile, distTag, name, updateCheckInterval } = msg;
|
||||
const cacheFileParsed = path.parse(cacheFile);
|
||||
await fs.mkdirp(cacheFileParsed.dir);
|
||||
|
||||
output.setLogFile(
|
||||
path.join(cacheFileParsed.dir, `${cacheFileParsed.name}.log`)
|
||||
);
|
||||
|
||||
const lockFile = path.join(
|
||||
cacheFileParsed.dir,
|
||||
`${cacheFileParsed.name}.lock`
|
||||
);
|
||||
|
||||
try {
|
||||
// check for a lock file and either bail if running or write our pid and continue
|
||||
output.debug(`Checking lock file: ${lockFile}`);
|
||||
if (await isRunning(lockFile)) {
|
||||
output.debug('Worker already running, exiting');
|
||||
process.exit(1);
|
||||
}
|
||||
output.debug(`Initializing lock file with pid ${process.pid}`);
|
||||
await fs.writeFile(lockFile, String(process.pid), 'utf-8');
|
||||
|
||||
// fetch the latest version from npm
|
||||
const agent = new HttpsAgent({
|
||||
keepAlive: true,
|
||||
maxSockets: 15, // See: `npm config get maxsockets`
|
||||
});
|
||||
const headers = {
|
||||
accept:
|
||||
'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*',
|
||||
};
|
||||
const url = `https://registry.npmjs.org/${name}`;
|
||||
output.debug(`Fetching ${url}`);
|
||||
const res = await fetch(url, { agent, headers });
|
||||
const json = await res.json();
|
||||
const tags = json['dist-tags'];
|
||||
const version = tags[distTag];
|
||||
|
||||
if (version) {
|
||||
output.debug(`Found dist tag "${distTag}" with version "${version}"`);
|
||||
} else {
|
||||
output.error(`Dist tag "${distTag}" not found`);
|
||||
output.debug('Available dist tags:', Object.keys(tags));
|
||||
}
|
||||
|
||||
output.debug(`Writing cache file: ${cacheFile}`);
|
||||
await fs.outputJSON(cacheFile, {
|
||||
expireAt: Date.now() + updateCheckInterval,
|
||||
notified: false,
|
||||
version,
|
||||
});
|
||||
} catch (err) {
|
||||
output.error(`Failed to get package info:`, err);
|
||||
} finally {
|
||||
clearTimeout(timer);
|
||||
|
||||
output.debug(`Releasing lock file: ${lockFile}`);
|
||||
await fs.remove(lockFile);
|
||||
|
||||
output.debug(`Worker finished successfully!`);
|
||||
|
||||
// force the worker to exit
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
|
||||
// signal the parent process we're ready
|
||||
if (process.connected) {
|
||||
output.debug("Notifying parent we're ready");
|
||||
process.send({ type: 'ready' });
|
||||
} else {
|
||||
console.error('No IPC bridge detected, exiting');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
async function isRunning(lockFile) {
|
||||
try {
|
||||
const pid = parseInt(await fs.readFile(lockFile, 'utf-8'));
|
||||
output.debug(`Found lock file with pid: ${pid}`);
|
||||
|
||||
// checks for existence of a process; throws if not found
|
||||
process.kill(pid, 0);
|
||||
|
||||
// process is still running
|
||||
return true;
|
||||
} catch (err) {
|
||||
// lock file does not exist or process is not running and pid is stale
|
||||
output.debug(`Resetting lock file: ${err.toString()}`);
|
||||
await fs.remove(lockFile);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
151
packages/cli/src/util/get-latest-version/index.ts
Normal file
151
packages/cli/src/util/get-latest-version/index.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import semver from 'semver';
|
||||
import XDGAppPaths from 'xdg-app-paths';
|
||||
import { dirname, parse as parsePath, resolve as resolvePath } from 'path';
|
||||
import type { Output } from '../output';
|
||||
import { existsSync, outputJSONSync, readJSONSync } from 'fs-extra';
|
||||
import type { PackageJson } from '@vercel/build-utils';
|
||||
import { spawn } from 'child_process';
|
||||
|
||||
interface GetLatestVersionOptions {
|
||||
cacheDir?: string;
|
||||
distTag?: string;
|
||||
output?: Output;
|
||||
pkg: PackageJson;
|
||||
updateCheckInterval?: number;
|
||||
}
|
||||
|
||||
interface PackageInfoCache {
|
||||
version: string;
|
||||
expireAt: number;
|
||||
notified: boolean;
|
||||
}
|
||||
|
||||
interface GetLatestWorkerPayload {
|
||||
cacheFile?: string;
|
||||
distTag?: string;
|
||||
updateCheckInterval?: number;
|
||||
name?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Determines if it needs to check for a newer CLI version and returns the last
|
||||
* detected version. The version could be stale, but still newer than the
|
||||
* current version.
|
||||
*
|
||||
* @returns {String|undefined} If a newer version is found, then the lastest
|
||||
* version, otherwise `undefined`.
|
||||
*/
|
||||
export default function getLatestVersion({
|
||||
cacheDir = XDGAppPaths('com.vercel.cli').cache(),
|
||||
distTag = 'latest',
|
||||
output,
|
||||
pkg,
|
||||
updateCheckInterval = 1000 * 60 * 60 * 24 * 7, // 1 week
|
||||
}: GetLatestVersionOptions): string | undefined {
|
||||
if (
|
||||
!pkg ||
|
||||
typeof pkg !== 'object' ||
|
||||
!pkg.name ||
|
||||
typeof pkg.name !== 'string'
|
||||
) {
|
||||
throw new TypeError('Expected package to be an object with a package name');
|
||||
}
|
||||
|
||||
const cacheFile = resolvePath(
|
||||
cacheDir,
|
||||
'package-updates',
|
||||
`${pkg.name}-${distTag}.json`
|
||||
);
|
||||
|
||||
let cache: PackageInfoCache | undefined;
|
||||
try {
|
||||
cache = readJSONSync(cacheFile);
|
||||
} catch (err: any) {
|
||||
// cache does not exist or malformed
|
||||
if (err.code !== 'ENOENT') {
|
||||
output?.debug(`Error reading latest package cache file: ${err}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!cache || cache.expireAt < Date.now()) {
|
||||
spawnWorker(
|
||||
{
|
||||
cacheFile,
|
||||
distTag,
|
||||
updateCheckInterval,
|
||||
name: pkg.name,
|
||||
},
|
||||
output
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
cache &&
|
||||
!cache.notified &&
|
||||
pkg.version &&
|
||||
semver.lt(pkg.version, cache.version)
|
||||
) {
|
||||
cache.notified = true;
|
||||
outputJSONSync(cacheFile, cache);
|
||||
return cache.version;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Spawn the worker, wait for the worker to report it's ready, then signal the
|
||||
* worker to fetch the latest version.
|
||||
*/
|
||||
function spawnWorker(
|
||||
payload: GetLatestWorkerPayload,
|
||||
output: Output | undefined
|
||||
) {
|
||||
// we need to find the update worker script since the location is
|
||||
// different based on production vs tests
|
||||
let dir = dirname(__filename);
|
||||
let script = resolvePath(dir, 'dist', 'get-latest-worker.js');
|
||||
const { root } = parsePath(dir);
|
||||
while (!existsSync(script)) {
|
||||
dir = dirname(dir);
|
||||
if (dir === root) {
|
||||
// didn't find it, bail
|
||||
output?.debug('Failed to find the get latest worker script!');
|
||||
return;
|
||||
}
|
||||
script = resolvePath(dir, 'dist', 'get-latest-worker.js');
|
||||
}
|
||||
|
||||
// spawn the worker with an IPC channel
|
||||
output?.debug(`Spawning ${script}`);
|
||||
const args = [script];
|
||||
if (output?.debugEnabled) {
|
||||
args.push('--debug');
|
||||
}
|
||||
const worker = spawn(process.execPath, args, {
|
||||
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
|
||||
windowsHide: true,
|
||||
});
|
||||
|
||||
// we allow the child 2 seconds to let us know it's ready before we give up
|
||||
const workerReadyTimer = setTimeout(() => worker.kill(), 2000);
|
||||
|
||||
// listen for an early on close error, but then we remove it when unref
|
||||
const onClose = (code: number) => {
|
||||
output?.debug(`Get latest worker exited (code ${code})`);
|
||||
};
|
||||
worker.on('close', onClose);
|
||||
|
||||
// generally, the parent won't be around long enough to handle a non-zero
|
||||
// worker process exit code
|
||||
worker.on('error', err => {
|
||||
output?.log(`Failed to spawn get latest worker: ${err.stack}`);
|
||||
});
|
||||
|
||||
// wait for the worker to start and notify us it is ready
|
||||
worker.once('message', () => {
|
||||
clearTimeout(workerReadyTimer);
|
||||
|
||||
worker.removeListener('close', onClose);
|
||||
worker.send(payload);
|
||||
worker.unref();
|
||||
});
|
||||
}
|
||||
@@ -32,7 +32,11 @@ export async function ensureLink(
|
||||
cwd: string,
|
||||
opts: SetupAndLinkOptions
|
||||
): Promise<LinkResult | number> {
|
||||
let link = await getLinkedProject(client, cwd);
|
||||
let { link } = opts;
|
||||
if (!link) {
|
||||
link = await getLinkedProject(client, cwd);
|
||||
opts.link = link;
|
||||
}
|
||||
|
||||
if (
|
||||
(link.status === 'linked' && opts.forceDelete) ||
|
||||
|
||||
@@ -30,8 +30,9 @@ import Now, { CreateOptions } from '../index';
|
||||
import { isAPIError } from '../errors-ts';
|
||||
|
||||
export interface SetupAndLinkOptions {
|
||||
forceDelete?: boolean;
|
||||
autoConfirm?: boolean;
|
||||
forceDelete?: boolean;
|
||||
link?: ProjectLinkResult;
|
||||
successEmoji?: EmojiLabel;
|
||||
setupMsg?: string;
|
||||
projectName?: string;
|
||||
@@ -41,8 +42,9 @@ export default async function setupAndLink(
|
||||
client: Client,
|
||||
path: string,
|
||||
{
|
||||
forceDelete = false,
|
||||
autoConfirm = false,
|
||||
forceDelete = false,
|
||||
link,
|
||||
successEmoji = 'link',
|
||||
setupMsg = 'Set up',
|
||||
projectName,
|
||||
@@ -56,7 +58,9 @@ export default async function setupAndLink(
|
||||
output.error(`Expected directory but found file: ${path}`);
|
||||
return { status: 'error', exitCode: 1, reason: 'PATH_IS_FILE' };
|
||||
}
|
||||
const link = await getLinkedProject(client, path);
|
||||
if (!link) {
|
||||
link = await getLinkedProject(client, path);
|
||||
}
|
||||
const isTTY = client.stdin.isTTY;
|
||||
const quiet = !isTTY;
|
||||
let rootDirectory: string | null = null;
|
||||
|
||||
@@ -186,15 +186,14 @@ export async function getLinkedProject(
|
||||
})})\n`
|
||||
);
|
||||
return { status: 'error', exitCode: 1 };
|
||||
} else {
|
||||
}
|
||||
|
||||
output.print(
|
||||
prependEmoji(
|
||||
'Your Project was either deleted, transferred to a new Team, or you don’t have access to it anymore.\n',
|
||||
emoji('warning')
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
return { status: 'not_linked', org: null, project: null };
|
||||
}
|
||||
|
||||
|
||||
38
packages/cli/src/util/rollback/get-deployment-info.ts
Normal file
38
packages/cli/src/util/rollback/get-deployment-info.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import type Client from '../client';
|
||||
import type { Deployment } from '../../types';
|
||||
import getDeploymentByIdOrHost from '../deploy/get-deployment-by-id-or-host';
|
||||
import handleCertError from '../certs/handle-cert-error';
|
||||
|
||||
/**
|
||||
* Attempts to find the deployment by name or id.
|
||||
* @param {Client} client - The Vercel client instance
|
||||
* @param {string} contextName - The scope name
|
||||
* @param {string} deployId - The deployment name or id to rollback
|
||||
* @returns {Promise<Deployment>} Resolves an exit code or deployment info
|
||||
*/
|
||||
export default async function getDeploymentInfo(
|
||||
client: Client,
|
||||
contextName: string,
|
||||
deployId: string
|
||||
): Promise<Deployment> {
|
||||
const deployment = handleCertError(
|
||||
client.output,
|
||||
await getDeploymentByIdOrHost(client, contextName, deployId)
|
||||
);
|
||||
|
||||
if (deployment === 1) {
|
||||
throw new Error(
|
||||
`Failed to get deployment "${deployId}" in scope "${contextName}"`
|
||||
);
|
||||
}
|
||||
|
||||
if (deployment instanceof Error) {
|
||||
throw deployment;
|
||||
}
|
||||
|
||||
if (!deployment) {
|
||||
throw new Error(`Couldn't find the deployment "${deployId}"`);
|
||||
}
|
||||
|
||||
return deployment;
|
||||
}
|
||||
19
packages/cli/src/util/rollback/render-alias-status.ts
Normal file
19
packages/cli/src/util/rollback/render-alias-status.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
import chalk from 'chalk';
|
||||
|
||||
/**
|
||||
* Stylize the alias status label.
|
||||
* @param {AliasStatus} status - The status label
|
||||
* @returns {string}
|
||||
*/
|
||||
export default function renderAliasStatus(status: string): string {
|
||||
if (status === 'completed') {
|
||||
return chalk.green(status);
|
||||
}
|
||||
if (status === 'failed') {
|
||||
return chalk.red(status);
|
||||
}
|
||||
if (status === 'skipped') {
|
||||
return chalk.gray(status);
|
||||
}
|
||||
return chalk.yellow(status);
|
||||
}
|
||||
85
packages/cli/src/util/rollback/request-rollback.ts
Normal file
85
packages/cli/src/util/rollback/request-rollback.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import chalk from 'chalk';
|
||||
import type Client from '../client';
|
||||
import { getCommandName } from '../pkg-name';
|
||||
import getDeploymentInfo from './get-deployment-info';
|
||||
import getScope from '../get-scope';
|
||||
import { isValidName } from '../is-valid-name';
|
||||
import ms from 'ms';
|
||||
import type { Project } from '../../types';
|
||||
import rollbackStatus from './status';
|
||||
|
||||
/**
|
||||
* Requests a rollback and waits for it complete.
|
||||
* @param {Client} client - The Vercel client instance
|
||||
* @param {string} deployId - The deployment name or id to rollback
|
||||
* @param {Project} project - Project info instance
|
||||
* @param {string} [timeout] - Time to poll for succeeded/failed state
|
||||
* @returns {Promise<number>} Resolves an exit code; 0 on success
|
||||
*/
|
||||
export default async function requestRollback({
|
||||
client,
|
||||
deployId,
|
||||
project,
|
||||
timeout,
|
||||
}: {
|
||||
client: Client;
|
||||
deployId: string;
|
||||
project: Project;
|
||||
timeout?: string;
|
||||
}): Promise<number> {
|
||||
const { output } = client;
|
||||
const { contextName } = await getScope(client);
|
||||
|
||||
if (!isValidName(deployId)) {
|
||||
output.error(
|
||||
`The provided argument "${deployId}" is not a valid deployment or project`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
output.spinner(
|
||||
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}…`
|
||||
);
|
||||
|
||||
let deployment;
|
||||
try {
|
||||
deployment = await getDeploymentInfo(client, contextName, deployId);
|
||||
} catch (err: any) {
|
||||
output.error(err?.toString() || err);
|
||||
return 1;
|
||||
} finally {
|
||||
output.stopSpinner();
|
||||
// re-render the spinner text because it goes so fast
|
||||
output.log(
|
||||
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}…`
|
||||
);
|
||||
}
|
||||
|
||||
// create the rollback
|
||||
await client.fetch<any>(
|
||||
`/v9/projects/${project.id}/rollback/${deployment.uid}`,
|
||||
{
|
||||
body: {}, // required
|
||||
method: 'POST',
|
||||
}
|
||||
);
|
||||
|
||||
if (timeout !== undefined && ms(timeout) === 0) {
|
||||
output.log(
|
||||
`Successfully requested rollback of ${chalk.bold(project.name)} to ${
|
||||
deployment.url
|
||||
} (${deployment.uid})`
|
||||
);
|
||||
output.log(`To check rollback status, run ${getCommandName('rollback')}.`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
// check the status
|
||||
return await rollbackStatus({
|
||||
client,
|
||||
contextName,
|
||||
deployment,
|
||||
project,
|
||||
timeout,
|
||||
});
|
||||
}
|
||||
255
packages/cli/src/util/rollback/status.ts
Normal file
255
packages/cli/src/util/rollback/status.ts
Normal file
@@ -0,0 +1,255 @@
|
||||
import chalk from 'chalk';
|
||||
import type Client from '../client';
|
||||
import type {
|
||||
Deployment,
|
||||
PaginationOptions,
|
||||
Project,
|
||||
RollbackTarget,
|
||||
} from '../../types';
|
||||
import elapsed from '../output/elapsed';
|
||||
import formatDate from '../format-date';
|
||||
import getDeploymentInfo from './get-deployment-info';
|
||||
import getScope from '../get-scope';
|
||||
import ms from 'ms';
|
||||
import renderAliasStatus from './render-alias-status';
|
||||
import sleep from '../sleep';
|
||||
|
||||
interface RollbackAlias {
|
||||
alias: {
|
||||
alias: string;
|
||||
deploymentId: string;
|
||||
};
|
||||
id: string;
|
||||
status: 'completed' | 'in-progress' | 'pending' | 'failed';
|
||||
}
|
||||
|
||||
interface RollbackAliasesResponse {
|
||||
aliases: RollbackAlias[];
|
||||
pagination: PaginationOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Continuously checks a deployment status until it has succeeded, failed, or
|
||||
* taken longer than the timeout (default 3 minutes).
|
||||
* @param {Client} client - The Vercel client instance
|
||||
* @param {string} [contextName] - The scope name; if not specified, it will be
|
||||
* extracted from the `client`
|
||||
* @param {Deployment} [deployment] - Info about the deployment which is used
|
||||
* to display different output following a rollback request
|
||||
* @param {Project} project - Project info instance
|
||||
* @param {string} [timeout] - Milliseconds to poll for succeeded/failed state
|
||||
* @returns {Promise<number>} Resolves an exit code; 0 on success
|
||||
*/
|
||||
export default async function rollbackStatus({
|
||||
client,
|
||||
contextName,
|
||||
deployment,
|
||||
project,
|
||||
timeout = '3m',
|
||||
}: {
|
||||
client: Client;
|
||||
contextName?: string;
|
||||
deployment?: Deployment;
|
||||
project: Project;
|
||||
timeout?: string;
|
||||
}): Promise<number> {
|
||||
const { output } = client;
|
||||
const recentThreshold = Date.now() - ms('3m');
|
||||
const rollbackTimeout = Date.now() + ms(timeout);
|
||||
let counter = 0;
|
||||
let spinnerMessage = deployment
|
||||
? 'Rollback in progress'
|
||||
: `Checking rollback status of ${project.name}`;
|
||||
|
||||
const check = async () => {
|
||||
const { lastRollbackTarget } = await client.fetch<any>(
|
||||
`/v9/projects/${project.id}?rollbackInfo=true`
|
||||
);
|
||||
return lastRollbackTarget;
|
||||
};
|
||||
|
||||
if (!contextName) {
|
||||
({ contextName } = await getScope(client));
|
||||
}
|
||||
|
||||
try {
|
||||
output.spinner(`${spinnerMessage}…`);
|
||||
|
||||
// continuously loop until the rollback has explicitly succeeded, failed,
|
||||
// or timed out
|
||||
for (;;) {
|
||||
const { jobStatus, requestedAt, toDeploymentId }: RollbackTarget =
|
||||
(await check()) ?? {};
|
||||
|
||||
if (
|
||||
!jobStatus ||
|
||||
(jobStatus !== 'in-progress' && jobStatus !== 'pending')
|
||||
) {
|
||||
output.stopSpinner();
|
||||
output.log(`${spinnerMessage}…`);
|
||||
}
|
||||
|
||||
if (!jobStatus || requestedAt < recentThreshold) {
|
||||
output.log('No deployment rollback in progress');
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (jobStatus === 'skipped') {
|
||||
output.log('Rollback was skipped');
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (jobStatus === 'succeeded') {
|
||||
return await renderJobSucceeded({
|
||||
client,
|
||||
contextName,
|
||||
performingRollback: !!deployment,
|
||||
requestedAt,
|
||||
project,
|
||||
toDeploymentId,
|
||||
});
|
||||
}
|
||||
|
||||
if (jobStatus === 'failed') {
|
||||
return await renderJobFailed({
|
||||
client,
|
||||
contextName,
|
||||
deployment,
|
||||
project,
|
||||
toDeploymentId,
|
||||
});
|
||||
}
|
||||
|
||||
// lastly, if we're not pending/in-progress, then we don't know what
|
||||
// the status is, so bail
|
||||
if (jobStatus !== 'pending' && jobStatus !== 'in-progress') {
|
||||
output.log(`Unknown rollback status "${jobStatus}"`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// check if we have been running for too long
|
||||
if (requestedAt < recentThreshold || Date.now() >= rollbackTimeout) {
|
||||
output.log(
|
||||
`The rollback exceeded its deadline - rerun ${chalk.bold(
|
||||
`vercel rollback ${toDeploymentId}`
|
||||
)} to try again`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
// if we've done our first poll and not rolling back, then print the
|
||||
// requested at date/time
|
||||
if (counter++ === 0 && !deployment) {
|
||||
spinnerMessage += ` requested at ${formatDate(requestedAt)}`;
|
||||
}
|
||||
output.spinner(`${spinnerMessage}…`);
|
||||
|
||||
await sleep(250);
|
||||
}
|
||||
} finally {
|
||||
output.stopSpinner();
|
||||
}
|
||||
}
|
||||
|
||||
async function renderJobFailed({
|
||||
client,
|
||||
contextName,
|
||||
deployment,
|
||||
project,
|
||||
toDeploymentId,
|
||||
}: {
|
||||
client: Client;
|
||||
contextName: string;
|
||||
deployment?: Deployment;
|
||||
project: Project;
|
||||
toDeploymentId: string;
|
||||
}) {
|
||||
const { output } = client;
|
||||
|
||||
try {
|
||||
const name = (
|
||||
deployment ||
|
||||
(await getDeploymentInfo(client, contextName, toDeploymentId))
|
||||
)?.url;
|
||||
output.error(
|
||||
`Failed to remap all aliases to the requested deployment ${name} (${toDeploymentId})`
|
||||
);
|
||||
} catch (e) {
|
||||
output.error(
|
||||
`Failed to remap all aliases to the requested deployment ${toDeploymentId}`
|
||||
);
|
||||
}
|
||||
|
||||
// aliases are paginated, so continuously loop until all of them have been
|
||||
// fetched
|
||||
let nextTimestamp;
|
||||
for (;;) {
|
||||
let url = `/v9/projects/${project.id}/rollback/aliases?failedOnly=true&limit=20`;
|
||||
if (nextTimestamp) {
|
||||
url += `&until=${nextTimestamp}`;
|
||||
}
|
||||
|
||||
const { aliases, pagination } = await client.fetch<RollbackAliasesResponse>(
|
||||
url
|
||||
);
|
||||
|
||||
for (const { alias, status } of aliases) {
|
||||
output.log(
|
||||
` ${renderAliasStatus(status).padEnd(11)} ${alias.alias} (${
|
||||
alias.deploymentId
|
||||
})`
|
||||
);
|
||||
}
|
||||
|
||||
if (pagination?.next) {
|
||||
nextTimestamp = pagination.next;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
async function renderJobSucceeded({
|
||||
client,
|
||||
contextName,
|
||||
performingRollback,
|
||||
project,
|
||||
requestedAt,
|
||||
toDeploymentId,
|
||||
}: {
|
||||
client: Client;
|
||||
contextName: string;
|
||||
performingRollback: boolean;
|
||||
project: Project;
|
||||
requestedAt: number;
|
||||
toDeploymentId: string;
|
||||
}) {
|
||||
const { output } = client;
|
||||
|
||||
let deploymentInfo = '';
|
||||
try {
|
||||
const deployment = await getDeploymentInfo(
|
||||
client,
|
||||
contextName,
|
||||
toDeploymentId
|
||||
);
|
||||
deploymentInfo = `${chalk.bold(deployment.url)} (${toDeploymentId})`;
|
||||
} catch (err: any) {
|
||||
output.debug(
|
||||
`Failed to get deployment url for ${toDeploymentId}: ${
|
||||
err?.toString() || err
|
||||
}`
|
||||
);
|
||||
deploymentInfo = chalk.bold(toDeploymentId);
|
||||
}
|
||||
|
||||
const duration = performingRollback ? elapsed(Date.now() - requestedAt) : '';
|
||||
output.log(
|
||||
`Success! ${chalk.bold(
|
||||
project.name
|
||||
)} was rolled back to ${deploymentInfo} ${duration}`
|
||||
);
|
||||
return 0;
|
||||
}
|
||||
@@ -143,6 +143,7 @@ async function runNpmInstall(fixturePath) {
|
||||
await execa('yarn', ['install'], {
|
||||
cwd: fixturePath,
|
||||
shell: true,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
BIN
packages/cli/test/fixtures/unit/create-git-meta/not-dirty/git/index
generated
vendored
BIN
packages/cli/test/fixtures/unit/create-git-meta/not-dirty/git/index
generated
vendored
Binary file not shown.
3
packages/cli/test/fixtures/unit/vercel-rollback/.gitignore
vendored
Normal file
3
packages/cli/test/fixtures/unit/vercel-rollback/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
.next
|
||||
yarn.lock
|
||||
!.vercel
|
||||
4
packages/cli/test/fixtures/unit/vercel-rollback/.vercel/project.json
vendored
Normal file
4
packages/cli/test/fixtures/unit/vercel-rollback/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "vercel-rollback"
|
||||
}
|
||||
12
packages/cli/test/fixtures/unit/vercel-rollback/package.json
vendored
Normal file
12
packages/cli/test/fixtures/unit/vercel-rollback/package.json
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"scripts": {
|
||||
"build": "next build",
|
||||
"dev": "next",
|
||||
"now-build": "next build"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "^8.0.0",
|
||||
"react": "^16.7.0",
|
||||
"react-dom": "^16.7.0"
|
||||
}
|
||||
}
|
||||
11
packages/cli/test/fixtures/unit/vercel-rollback/pages/index.js
vendored
Normal file
11
packages/cli/test/fixtures/unit/vercel-rollback/pages/index.js
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
import { withRouter } from 'next/router';
|
||||
|
||||
function Index({ router }) {
|
||||
const data = {
|
||||
pathname: router.pathname,
|
||||
query: router.query,
|
||||
};
|
||||
return <div>{JSON.stringify(data)}</div>;
|
||||
}
|
||||
|
||||
export default withRouter(Index);
|
||||
10
packages/cli/test/fixtures/unit/vercel-rollback/vercel.json
vendored
Normal file
10
packages/cli/test/fixtures/unit/vercel-rollback/vercel.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"version": 2,
|
||||
"name": "vercel-rollback",
|
||||
"routes": [
|
||||
{
|
||||
"src": "/(.*)",
|
||||
"dest": "/index?route-param=b"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -140,6 +140,51 @@ function setupDeploymentEndpoints() {
|
||||
res.json(deployment);
|
||||
});
|
||||
|
||||
client.scenario.get('/v5/now/deployments/:id', (req, res) => {
|
||||
const { id } = req.params;
|
||||
const { url } = req.query;
|
||||
let deployment;
|
||||
if (id === 'get') {
|
||||
if (typeof url !== 'string') {
|
||||
res.statusCode = 400;
|
||||
return res.json({ error: { code: 'bad_request' } });
|
||||
}
|
||||
deployment = Array.from(deployments.values()).find(d => {
|
||||
return d.url === url;
|
||||
});
|
||||
} else {
|
||||
// lookup by ID
|
||||
deployment = deployments.get(id);
|
||||
}
|
||||
if (!deployment) {
|
||||
res.statusCode = 404;
|
||||
return res.json({
|
||||
error: { code: 'not_found', message: 'Deployment not found', id },
|
||||
});
|
||||
}
|
||||
res.json({
|
||||
uid: deployment.id,
|
||||
url: deployment.url,
|
||||
name: '',
|
||||
type: 'LAMBDAS',
|
||||
state: 'READY',
|
||||
version: deployment.version,
|
||||
created: deployment.createdAt,
|
||||
ready: deployment.ready,
|
||||
buildingAt: deployment.buildingAt,
|
||||
creator: {
|
||||
uid: deployment.creator?.uid,
|
||||
username: deployment.creator?.username,
|
||||
},
|
||||
target: deployment.target,
|
||||
ownerId: undefined, // ?
|
||||
projectId: undefined, // ?
|
||||
inspectorUrl: deployment.inspectorUrl,
|
||||
meta: {},
|
||||
alias: deployment.alias,
|
||||
});
|
||||
});
|
||||
|
||||
client.scenario.get('/:version/deployments/:id/builds', (req, res) => {
|
||||
const { id } = req.params;
|
||||
const deployment = deployments.get(id);
|
||||
|
||||
@@ -23,13 +23,19 @@ export async function toOutput(
|
||||
return new Promise(resolve => {
|
||||
let output = '';
|
||||
let timeoutId = setTimeout(onTimeout, timeout);
|
||||
const hint =
|
||||
matcherHint(matcherName, 'stream', 'test', matcherHintOptions) + '\n\n';
|
||||
|
||||
const message = () => {
|
||||
function onData(data: string) {
|
||||
output += data;
|
||||
if (output.includes(test)) {
|
||||
cleanup();
|
||||
resolve({
|
||||
pass: true,
|
||||
message() {
|
||||
const labelExpected = 'Expected output';
|
||||
const labelReceived = 'Received output';
|
||||
const printLabel = getLabelPrinter(labelExpected, labelReceived);
|
||||
const hint =
|
||||
matcherHint(matcherName, 'stream', 'test', matcherHintOptions) + '\n\n';
|
||||
return (
|
||||
hint +
|
||||
printLabel(labelExpected) +
|
||||
@@ -40,19 +46,19 @@ export async function toOutput(
|
||||
(isNot ? ' ' : '') +
|
||||
printReceived(output)
|
||||
);
|
||||
};
|
||||
|
||||
function onData(data: string) {
|
||||
output += data;
|
||||
if (output.includes(test)) {
|
||||
cleanup();
|
||||
resolve({ pass: true, message });
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function onTimeout() {
|
||||
cleanup();
|
||||
resolve({ pass: false, message });
|
||||
resolve({
|
||||
pass: false,
|
||||
message() {
|
||||
return `${hint}Timed out waiting ${timeout} ms for output`;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
function cleanup() {
|
||||
|
||||
@@ -130,6 +130,7 @@ export const defaultProject = {
|
||||
userId: 'K4amb7K9dAt5R2vBJWF32bmY',
|
||||
},
|
||||
],
|
||||
lastRollbackTarget: null,
|
||||
alias: [
|
||||
{
|
||||
domain: 'foobar.com',
|
||||
|
||||
372
packages/cli/test/unit/commands/rollback.test.ts
Normal file
372
packages/cli/test/unit/commands/rollback.test.ts
Normal file
@@ -0,0 +1,372 @@
|
||||
import chalk from 'chalk';
|
||||
import { client } from '../../mocks/client';
|
||||
import { defaultProject, useProject } from '../../mocks/project';
|
||||
import { Request, Response } from 'express';
|
||||
import rollback from '../../../src/commands/rollback';
|
||||
import { RollbackJobStatus, RollbackTarget } from '../../../src/types';
|
||||
import { setupFixture } from '../../helpers/setup-fixture';
|
||||
import { useDeployment } from '../../mocks/deployment';
|
||||
import { useTeams } from '../../mocks/team';
|
||||
import { useUser } from '../../mocks/user';
|
||||
import sleep from '../../../src/util/sleep';
|
||||
|
||||
jest.setTimeout(60000);
|
||||
|
||||
describe('rollback', () => {
|
||||
it('should error if cwd is invalid', async () => {
|
||||
client.setArgv('rollback', '--cwd', __filename);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput(
|
||||
'Error: Support for single file deployments has been removed.'
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should error if timeout is invalid', async () => {
|
||||
const { cwd } = initRollbackTest();
|
||||
client.setArgv('rollback', '--yes', '--cwd', cwd, '--timeout', 'foo');
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Error: Invalid timeout "foo"');
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should error if invalid deployment name', async () => {
|
||||
const { cwd } = initRollbackTest();
|
||||
client.setArgv('rollback', '????', '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
'Error: The provided argument "????" is not a valid deployment or project'
|
||||
);
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should error if deployment not found', async () => {
|
||||
const { cwd } = initRollbackTest();
|
||||
client.setArgv('rollback', 'foo', '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput('Fetching deployment "foo" in ');
|
||||
await expect(client.stderr).toOutput(
|
||||
'Error: Error: Can\'t find the deployment "foo" under the context'
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should show status when not rolling back', async () => {
|
||||
const { cwd } = initRollbackTest();
|
||||
client.setArgv('rollback', '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
'Checking rollback status of vercel-rollback'
|
||||
);
|
||||
await expect(client.stderr).toOutput('No deployment rollback in progress');
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(0);
|
||||
});
|
||||
|
||||
it('should rollback by deployment id', async () => {
|
||||
const { cwd, previousDeployment } = initRollbackTest();
|
||||
client.setArgv('rollback', previousDeployment.id, '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
|
||||
);
|
||||
await expect(client.stderr).toOutput('Rollback in progress');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Success! ${chalk.bold('vercel-rollback')} was rolled back to ${
|
||||
previousDeployment.url
|
||||
} (${previousDeployment.id})`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(0);
|
||||
});
|
||||
|
||||
it('should rollback by deployment url', async () => {
|
||||
const { cwd, previousDeployment } = initRollbackTest();
|
||||
client.setArgv('rollback', previousDeployment.url, '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Fetching deployment "${previousDeployment.url}" in ${previousDeployment.creator?.username}`
|
||||
);
|
||||
await expect(client.stderr).toOutput('Rollback in progress');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Success! ${chalk.bold('vercel-rollback')} was rolled back to ${
|
||||
previousDeployment.url
|
||||
} (${previousDeployment.id})`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(0);
|
||||
});
|
||||
|
||||
it('should get status while rolling back', async () => {
|
||||
const { cwd, previousDeployment, project } = initRollbackTest({
|
||||
rollbackPollCount: 10,
|
||||
});
|
||||
|
||||
// start the rollback
|
||||
client.setArgv('rollback', previousDeployment.id, '--yes', '--cwd', cwd);
|
||||
rollback(client);
|
||||
|
||||
// need to wait for the rollback request to be accepted
|
||||
await sleep(500);
|
||||
|
||||
// get the status
|
||||
client.setArgv('rollback', '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Checking rollback status of ${project.name}`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Success! ${chalk.bold('vercel-rollback')} was rolled back to ${
|
||||
previousDeployment.url
|
||||
} (${previousDeployment.id})`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(0);
|
||||
});
|
||||
|
||||
it('should error if rollback request fails', async () => {
|
||||
const { cwd, previousDeployment } = initRollbackTest({
|
||||
rollbackPollCount: 10,
|
||||
rollbackStatusCode: 500,
|
||||
});
|
||||
|
||||
client.setArgv('rollback', previousDeployment.id, '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).rejects.toThrow('Response Error (500)');
|
||||
});
|
||||
|
||||
it('should error if rollback fails (no aliases)', async () => {
|
||||
const { cwd, previousDeployment } = initRollbackTest({
|
||||
rollbackJobStatus: 'failed',
|
||||
});
|
||||
client.setArgv('rollback', previousDeployment.id, '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
|
||||
);
|
||||
await expect(client.stderr).toOutput('Rollback in progress');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error: Failed to remap all aliases to the requested deployment ${previousDeployment.url} (${previousDeployment.id})`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should error if rollback fails (with aliases)', async () => {
|
||||
const { cwd, previousDeployment } = initRollbackTest({
|
||||
rollbackAliases: [
|
||||
{
|
||||
alias: { alias: 'foo', deploymentId: 'foo_123' },
|
||||
status: 'completed',
|
||||
},
|
||||
{
|
||||
alias: { alias: 'bar', deploymentId: 'bar_123' },
|
||||
status: 'failed',
|
||||
},
|
||||
],
|
||||
rollbackJobStatus: 'failed',
|
||||
});
|
||||
client.setArgv('rollback', previousDeployment.id, '--yes', '--cwd', cwd);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
|
||||
);
|
||||
await expect(client.stderr).toOutput('Rollback in progress');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Error: Failed to remap all aliases to the requested deployment ${previousDeployment.url} (${previousDeployment.id})`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
` ${chalk.green('completed')} foo (foo_123)`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
` ${chalk.red('failed')} bar (bar_123)`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should error if deployment times out', async () => {
|
||||
const { cwd, previousDeployment } = initRollbackTest({
|
||||
rollbackPollCount: 10,
|
||||
});
|
||||
client.setArgv(
|
||||
'rollback',
|
||||
previousDeployment.id,
|
||||
'--yes',
|
||||
'--cwd',
|
||||
cwd,
|
||||
'--timeout',
|
||||
'2s'
|
||||
);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
|
||||
);
|
||||
await expect(client.stderr).toOutput('Rollback in progress');
|
||||
await expect(client.stderr).toOutput(
|
||||
`The rollback exceeded its deadline - rerun ${chalk.bold(
|
||||
`vercel rollback ${previousDeployment.id}`
|
||||
)} to try again`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(1);
|
||||
});
|
||||
|
||||
it('should immediately exit after requesting rollback', async () => {
|
||||
const { cwd, previousDeployment } = initRollbackTest();
|
||||
client.setArgv(
|
||||
'rollback',
|
||||
previousDeployment.id,
|
||||
'--yes',
|
||||
'--cwd',
|
||||
cwd,
|
||||
'--timeout',
|
||||
'0'
|
||||
);
|
||||
const exitCodePromise = rollback(client);
|
||||
|
||||
await expect(client.stderr).toOutput('Retrieving project…');
|
||||
await expect(client.stderr).toOutput(
|
||||
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
|
||||
);
|
||||
await expect(client.stderr).toOutput(
|
||||
`Successfully requested rollback of ${chalk.bold('vercel-rollback')} to ${
|
||||
previousDeployment.url
|
||||
} (${previousDeployment.id})`
|
||||
);
|
||||
|
||||
await expect(exitCodePromise).resolves.toEqual(0);
|
||||
});
|
||||
});
|
||||
|
||||
type RollbackAlias = {
|
||||
alias: {
|
||||
alias: string;
|
||||
deploymentId: string;
|
||||
};
|
||||
status: string;
|
||||
};
|
||||
|
||||
function initRollbackTest({
|
||||
rollbackAliases = [],
|
||||
rollbackJobStatus = 'succeeded',
|
||||
rollbackPollCount = 2,
|
||||
rollbackStatusCode,
|
||||
}: {
|
||||
rollbackAliases?: RollbackAlias[];
|
||||
rollbackJobStatus?: RollbackJobStatus;
|
||||
rollbackPollCount?: number;
|
||||
rollbackStatusCode?: number;
|
||||
} = {}) {
|
||||
const cwd = setupFixture('vercel-rollback');
|
||||
const user = useUser();
|
||||
useTeams('team_dummy');
|
||||
const { project } = useProject({
|
||||
...defaultProject,
|
||||
id: 'vercel-rollback',
|
||||
name: 'vercel-rollback',
|
||||
});
|
||||
|
||||
const currentDeployment = useDeployment({ creator: user });
|
||||
const previousDeployment = useDeployment({ creator: user });
|
||||
let lastRollbackTarget: RollbackTarget | null = null;
|
||||
|
||||
client.scenario.post(
|
||||
'/:version/projects/:project/rollback/:id',
|
||||
(req: Request, res: Response) => {
|
||||
const { id } = req.params;
|
||||
if (previousDeployment.id !== id) {
|
||||
res.statusCode = 404;
|
||||
res.json({
|
||||
error: { code: 'not_found', message: 'Deployment not found', id },
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
if (rollbackStatusCode === 500) {
|
||||
res.statusCode = 500;
|
||||
res.end('Server error');
|
||||
return;
|
||||
}
|
||||
|
||||
lastRollbackTarget = {
|
||||
fromDeploymentId: currentDeployment.id,
|
||||
jobStatus: 'in-progress',
|
||||
requestedAt: Date.now(),
|
||||
toDeploymentId: id,
|
||||
};
|
||||
res.statusCode = 201;
|
||||
res.end();
|
||||
}
|
||||
);
|
||||
|
||||
let counter = 0;
|
||||
|
||||
client.scenario.get(`/v9/projects/${project.id}`, (req, res) => {
|
||||
const data = { ...project };
|
||||
if (req.query?.rollbackInfo === 'true') {
|
||||
if (lastRollbackTarget && counter++ > rollbackPollCount) {
|
||||
lastRollbackTarget.jobStatus = rollbackJobStatus;
|
||||
}
|
||||
data.lastRollbackTarget = lastRollbackTarget;
|
||||
}
|
||||
res.json(data);
|
||||
});
|
||||
|
||||
client.scenario.get(`/:version/now/deployments/get`, (req, res) => {
|
||||
const { url } = req.query;
|
||||
if (url === previousDeployment.url) {
|
||||
res.json({ id: previousDeployment.id });
|
||||
} else {
|
||||
res.statusCode = 404;
|
||||
res.json({
|
||||
error: { code: 'not_found', message: 'Deployment not found' },
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
client.scenario.get(
|
||||
'/:version/projects/:project/rollback/aliases',
|
||||
(req, res) => {
|
||||
res.json({
|
||||
aliases: rollbackAliases,
|
||||
pagination: null,
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
return {
|
||||
cwd,
|
||||
project,
|
||||
currentDeployment,
|
||||
previousDeployment,
|
||||
};
|
||||
}
|
||||
140
packages/cli/test/unit/util/get-latest-version.test.ts
Normal file
140
packages/cli/test/unit/util/get-latest-version.test.ts
Normal file
@@ -0,0 +1,140 @@
|
||||
import fs from 'fs-extra';
|
||||
import sleep from '../../../src/util/sleep';
|
||||
import tmp from 'tmp-promise';
|
||||
import getLatestVersion from '../../../src/util/get-latest-version';
|
||||
import { join } from 'path';
|
||||
|
||||
tmp.setGracefulCleanup();
|
||||
|
||||
const cacheDir = tmp.tmpNameSync({
|
||||
prefix: 'test-vercel-cli-get-latest-version-',
|
||||
});
|
||||
|
||||
const cacheFile = join(cacheDir, 'package-updates', 'vercel-latest.json');
|
||||
|
||||
const pkg = {
|
||||
name: 'vercel',
|
||||
version: '27.3.0',
|
||||
};
|
||||
|
||||
const versionRE = /^\d+\.\d+\.\d+$/;
|
||||
|
||||
describe('get latest version', () => {
|
||||
afterEach(() => fs.remove(cacheDir));
|
||||
|
||||
it('should find newer version async', async () => {
|
||||
// 1. first call, no cache file
|
||||
let latest = getLatestVersion({
|
||||
cacheDir,
|
||||
pkg,
|
||||
});
|
||||
expect(latest).toEqual(undefined);
|
||||
|
||||
await waitForCacheFile();
|
||||
|
||||
let cache = await fs.readJSON(cacheFile);
|
||||
expect(typeof cache).toEqual('object');
|
||||
expect(typeof cache.expireAt).toEqual('number');
|
||||
expect(cache.expireAt).toBeGreaterThan(Date.now());
|
||||
expect(typeof cache.version).toEqual('string');
|
||||
expect(cache.version).toEqual(expect.stringMatching(versionRE));
|
||||
expect(cache.notified).toEqual(false);
|
||||
|
||||
// 2. call again and this time it'll return the version from the cache
|
||||
latest = getLatestVersion({
|
||||
cacheDir,
|
||||
pkg,
|
||||
});
|
||||
expect(typeof latest).toBe('string');
|
||||
expect(latest).toEqual(expect.stringMatching(versionRE));
|
||||
|
||||
cache = await fs.readJSON(cacheFile);
|
||||
expect(cache.version).toEqual(expect.stringMatching(versionRE));
|
||||
expect(cache.notified).toEqual(true);
|
||||
|
||||
// 3. notification already done, should skip
|
||||
latest = getLatestVersion({
|
||||
cacheDir,
|
||||
pkg,
|
||||
});
|
||||
expect(latest).toEqual(undefined);
|
||||
});
|
||||
|
||||
it('should not find a newer version', async () => {
|
||||
// 1. first call, no cache file
|
||||
let latest = getLatestVersion({
|
||||
cacheDir,
|
||||
updateCheckInterval: 1,
|
||||
pkg: {
|
||||
...pkg,
|
||||
version: '999.0.0',
|
||||
},
|
||||
});
|
||||
expect(latest).toEqual(undefined);
|
||||
|
||||
await waitForCacheFile();
|
||||
|
||||
// 2. call again and should recheck and still not find a new version
|
||||
latest = getLatestVersion({
|
||||
cacheDir,
|
||||
updateCheckInterval: 1,
|
||||
pkg: {
|
||||
...pkg,
|
||||
version: '999.0.0',
|
||||
},
|
||||
});
|
||||
expect(latest).toEqual(undefined);
|
||||
});
|
||||
|
||||
it('should not check twice', async () => {
|
||||
// 1. first call, no cache file
|
||||
let latest = getLatestVersion({
|
||||
cacheDir,
|
||||
updateCheckInterval: 1,
|
||||
pkg,
|
||||
});
|
||||
expect(latest).toEqual(undefined);
|
||||
|
||||
// 2. immediately call again, but should hopefully still be undefined
|
||||
latest = getLatestVersion({
|
||||
cacheDir,
|
||||
updateCheckInterval: 1,
|
||||
pkg,
|
||||
});
|
||||
expect(latest).toEqual(undefined);
|
||||
|
||||
await waitForCacheFile();
|
||||
|
||||
// 3. call again and should recheck and find a new version
|
||||
latest = getLatestVersion({
|
||||
cacheDir,
|
||||
updateCheckInterval: 1,
|
||||
pkg,
|
||||
});
|
||||
expect(typeof latest).toBe('string');
|
||||
expect(latest).toEqual(expect.stringMatching(versionRE));
|
||||
});
|
||||
|
||||
it('should error if no arguments are passed in', () => {
|
||||
expect(() => getLatestVersion(undefined as any)).toThrow(TypeError);
|
||||
});
|
||||
|
||||
it('should error package is invalid', () => {
|
||||
expect(() => getLatestVersion({} as any)).toThrow(TypeError);
|
||||
expect(() => getLatestVersion({ pkg: null as any })).toThrow(TypeError);
|
||||
expect(() => getLatestVersion({ pkg: {} })).toThrow(TypeError);
|
||||
expect(() => getLatestVersion({ pkg: { name: null as any } })).toThrow(
|
||||
TypeError
|
||||
);
|
||||
expect(() => getLatestVersion({ pkg: { name: '' } })).toThrow(TypeError);
|
||||
});
|
||||
});
|
||||
|
||||
async function waitForCacheFile() {
|
||||
for (let i = 0; i < 20; i++) {
|
||||
await sleep(100);
|
||||
if (await fs.pathExists(cacheFile)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "12.2.20",
|
||||
"version": "12.2.21",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -43,7 +43,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/routing-utils": "2.1.3",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "1.1.13",
|
||||
"version": "1.1.14",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
|
||||
@@ -267,17 +267,10 @@ export const frameworks = [
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
src: '^/dist/(.*)$',
|
||||
src: '^/assets/(.*)$',
|
||||
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
{
|
||||
src: '/(.*)',
|
||||
dest: '/index.html',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/fs-detectors",
|
||||
"version": "3.5.3",
|
||||
"version": "3.5.4",
|
||||
"description": "Vercel filesystem detectors",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
@@ -20,7 +20,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/error-utils": "1.0.3",
|
||||
"@vercel/frameworks": "1.1.13",
|
||||
"@vercel/frameworks": "1.1.14",
|
||||
"@vercel/routing-utils": "2.1.3",
|
||||
"glob": "8.0.3",
|
||||
"js-yaml": "4.1.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "2.2.18",
|
||||
"version": "2.2.19",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -35,7 +35,7 @@
|
||||
"@types/jest": "28.1.6",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/hydrogen",
|
||||
"version": "0.0.32",
|
||||
"version": "0.0.33",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -21,7 +21,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/static-config": "2.0.6",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "3.3.0",
|
||||
"version": "3.3.2",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -34,6 +34,7 @@
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/buffer-crc32": "0.2.0",
|
||||
"@types/bytes": "3.1.1",
|
||||
"@types/convert-source-map": "1.5.2",
|
||||
"@types/find-up": "4.0.0",
|
||||
"@types/fs-extra": "8.0.0",
|
||||
@@ -44,11 +45,12 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/nft": "0.22.1",
|
||||
"@vercel/routing-utils": "2.1.3",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
"bytes": "3.1.2",
|
||||
"cheerio": "1.0.0-rc.10",
|
||||
"convert-source-map": "1.8.0",
|
||||
"esbuild": "0.12.22",
|
||||
|
||||
@@ -4,4 +4,4 @@ const MIB = 1024 * KIB;
|
||||
/**
|
||||
* The maximum size of a *compressed* edge function.
|
||||
*/
|
||||
export const EDGE_FUNCTION_SIZE_LIMIT = MIB;
|
||||
export const EDGE_FUNCTION_SIZE_LIMIT = 4 * MIB;
|
||||
|
||||
@@ -6,7 +6,7 @@ import { join } from 'path';
|
||||
import { EDGE_FUNCTION_SIZE_LIMIT } from './constants';
|
||||
import zlib from 'zlib';
|
||||
import { promisify } from 'util';
|
||||
import bytes from 'pretty-bytes';
|
||||
import { prettyBytes } from '../utils';
|
||||
|
||||
// @ts-expect-error this is a prebuilt file, based on `../../scripts/build-edge-function-template.js`
|
||||
import template from '../../dist/___get-nextjs-edge-function.js';
|
||||
@@ -44,7 +44,9 @@ export async function getNextjsEdgeFunctionSource(
|
||||
* We validate at this point because we want to verify against user code.
|
||||
* It should not count the Worker wrapper nor the Next.js wrapper.
|
||||
*/
|
||||
const wasmFiles = (wasm ?? []).map(({ filePath }) => join(outputDir, filePath));
|
||||
const wasmFiles = (wasm ?? []).map(({ filePath }) =>
|
||||
join(outputDir, filePath)
|
||||
);
|
||||
await validateSize(text, wasmFiles);
|
||||
|
||||
// Wrap to fake module.exports
|
||||
@@ -83,9 +85,9 @@ async function validateSize(script: string, wasmFiles: string[]) {
|
||||
const gzipped = await gzip(content);
|
||||
if (gzipped.length > EDGE_FUNCTION_SIZE_LIMIT) {
|
||||
throw new Error(
|
||||
`Exceeds maximum edge function size: ${bytes(
|
||||
`Exceeds maximum edge function size: ${prettyBytes(
|
||||
gzipped.length
|
||||
)} / ${bytes(EDGE_FUNCTION_SIZE_LIMIT)}`
|
||||
)} / ${prettyBytes(EDGE_FUNCTION_SIZE_LIMIT)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,9 +39,13 @@ import { getNextjsEdgeFunctionSource } from './edge-function-source/get-edge-fun
|
||||
import type { LambdaOptionsWithFiles } from '@vercel/build-utils/dist/lambda';
|
||||
import { stringifySourceMap } from './sourcemapped';
|
||||
import type { RawSourceMap } from 'source-map';
|
||||
import bytes from 'bytes';
|
||||
|
||||
type stringMap = { [key: string]: string };
|
||||
|
||||
const _prettyBytes = (n: number) => bytes(n, { unitSeparator: ' ' });
|
||||
export { _prettyBytes as prettyBytes }
|
||||
|
||||
// Identify /[param]/ in route string
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
const TEST_DYNAMIC_ROUTE = /\/\[[^\/]+?\](?=\/|$)/;
|
||||
|
||||
@@ -14,7 +14,7 @@ it('should throw an error when exceeds the script size limit', async () => {
|
||||
`
|
||||
module.exports.middleware = function () {
|
||||
return Response(${JSON.stringify({
|
||||
text: randomBytes(1200000).toString('base64'),
|
||||
text: randomBytes(4200 * 1024).toString('base64'),
|
||||
})})
|
||||
}
|
||||
`
|
||||
@@ -51,7 +51,7 @@ it('throws an error if it contains too big WASM file', async () => {
|
||||
);
|
||||
|
||||
const wasmPath = join(dir, 'big.wasm');
|
||||
await writeFile(wasmPath, randomBytes(1200 * 1024));
|
||||
await writeFile(wasmPath, randomBytes(4200 * 1024));
|
||||
|
||||
expect(async () => {
|
||||
await getNextjsEdgeFunctionSource(
|
||||
@@ -107,5 +107,7 @@ it('uses the template', async () => {
|
||||
);
|
||||
const source = edgeFunctionSource.source();
|
||||
expect(source).toMatch(/nextConfig/);
|
||||
expect(source).toContain(`const wasm_small = require("/wasm/wasm_small.wasm")`);
|
||||
expect(source).toContain(
|
||||
`const wasm_small = require("/wasm/wasm_small.wasm")`
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "2.7.0",
|
||||
"version": "2.7.1",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -31,7 +31,7 @@
|
||||
"dependencies": {
|
||||
"@edge-runtime/vm": "2.0.0",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/node-bridge": "3.1.2",
|
||||
"@vercel/static-config": "2.0.6",
|
||||
"edge-runtime": "2.0.0",
|
||||
|
||||
@@ -116,7 +116,7 @@ function cachedLookup<T>(fn: (arg: string) => T): (arg: string) => T {
|
||||
/**
|
||||
* Maps the config path to a build func
|
||||
*/
|
||||
const configFileToBuildMap = new Map<string, Build>();
|
||||
const configFileToBuildMap = new Map<string, GetOutputFunction>();
|
||||
|
||||
/**
|
||||
* Register TypeScript compiler.
|
||||
@@ -189,16 +189,30 @@ export function register(opts: Options = {}): Register {
|
||||
}
|
||||
}
|
||||
|
||||
function getBuild(configFileName = ''): Build {
|
||||
let build = configFileToBuildMap.get(configFileName);
|
||||
if (build) return build;
|
||||
function getBuild(
|
||||
configFileName = '',
|
||||
skipTypeCheck?: boolean
|
||||
): GetOutputFunction {
|
||||
const cachedGetOutput = configFileToBuildMap.get(configFileName);
|
||||
|
||||
if (cachedGetOutput) {
|
||||
return cachedGetOutput;
|
||||
}
|
||||
|
||||
const outFiles = new Map<string, SourceOutput>();
|
||||
const config = readConfig(configFileName);
|
||||
|
||||
/**
|
||||
* Create the basic required function using transpile mode.
|
||||
* Create the basic function for transpile only (ts-node --transpileOnly)
|
||||
*/
|
||||
const getOutput = function (code: string, fileName: string): SourceOutput {
|
||||
const getOutputTranspile: GetOutputFunction = (
|
||||
code: string,
|
||||
fileName: string
|
||||
) => {
|
||||
const outFile = outFiles.get(fileName);
|
||||
if (outFile) {
|
||||
return outFile;
|
||||
}
|
||||
const result = ts.transpileModule(code, {
|
||||
fileName,
|
||||
transformers,
|
||||
@@ -212,14 +226,16 @@ export function register(opts: Options = {}): Register {
|
||||
|
||||
reportTSError(diagnosticList, config.options.noEmitOnError);
|
||||
|
||||
return { code: result.outputText, map: result.sourceMapText as string };
|
||||
const file = {
|
||||
code: result.outputText,
|
||||
map: result.sourceMapText as string,
|
||||
};
|
||||
outFiles.set(fileName, file);
|
||||
return file;
|
||||
};
|
||||
|
||||
// Use full language services when the fast option is disabled.
|
||||
let getOutputTypeCheck: (code: string, fileName: string) => SourceOutput;
|
||||
{
|
||||
const memoryCache = new MemoryCache(config.fileNames);
|
||||
const cachedReadFile = cachedLookup(debugFn('readFile', readFile));
|
||||
const cachedReadFile = cachedLookup(readFile);
|
||||
|
||||
// Create the compiler host for type checking.
|
||||
const serviceHost: _ts.LanguageServiceHost = {
|
||||
@@ -278,7 +294,17 @@ export function register(opts: Options = {}): Register {
|
||||
memoryCache.fileContents.set(fileName, contents);
|
||||
};
|
||||
|
||||
getOutputTypeCheck = function (code: string, fileName: string) {
|
||||
/**
|
||||
* Create complete function with full language services (normal behavior for `tsc`)
|
||||
*/
|
||||
const getOutputTypeCheck: GetOutputFunction = (
|
||||
code: string,
|
||||
fileName: string
|
||||
) => {
|
||||
const outFile = outFiles.get(fileName);
|
||||
if (outFile) {
|
||||
return outFile;
|
||||
}
|
||||
updateMemoryCache(code, fileName);
|
||||
|
||||
const output = service.getEmitOutput(fileName);
|
||||
@@ -288,10 +314,7 @@ export function register(opts: Options = {}): Register {
|
||||
.getSemanticDiagnostics(fileName)
|
||||
.concat(service.getSyntacticDiagnostics(fileName));
|
||||
|
||||
const diagnosticList = filterDiagnostics(
|
||||
diagnostics,
|
||||
ignoreDiagnostics
|
||||
);
|
||||
const diagnosticList = filterDiagnostics(diagnostics, ignoreDiagnostics);
|
||||
|
||||
reportTSError(diagnosticList, config.options.noEmitOnError);
|
||||
|
||||
@@ -310,16 +333,18 @@ export function register(opts: Options = {}): Register {
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
const file = {
|
||||
code: output.outputFiles[1].text,
|
||||
map: output.outputFiles[0].text,
|
||||
};
|
||||
outFiles.set(fileName, file);
|
||||
return file;
|
||||
};
|
||||
}
|
||||
|
||||
build = { getOutput, getOutputTypeCheck };
|
||||
configFileToBuildMap.set(configFileName, build);
|
||||
return build;
|
||||
const getOutput = skipTypeCheck ? getOutputTranspile : getOutputTypeCheck;
|
||||
configFileToBuildMap.set(configFileName, getOutput);
|
||||
|
||||
return getOutput;
|
||||
}
|
||||
|
||||
// determine the tsconfig.json path for a given folder
|
||||
@@ -407,10 +432,8 @@ export function register(opts: Options = {}): Register {
|
||||
skipTypeCheck?: boolean
|
||||
): SourceOutput {
|
||||
const configFileName = detectConfig();
|
||||
const build = getBuild(configFileName);
|
||||
const { code: value, map: sourceMap } = (
|
||||
skipTypeCheck ? build.getOutput : build.getOutputTypeCheck
|
||||
)(code, fileName);
|
||||
const buildOutput = getBuild(configFileName, skipTypeCheck);
|
||||
const { code: value, map: sourceMap } = buildOutput(code, fileName);
|
||||
const output = {
|
||||
code: value,
|
||||
map: Object.assign(JSON.parse(sourceMap), {
|
||||
@@ -425,10 +448,7 @@ export function register(opts: Options = {}): Register {
|
||||
return compile;
|
||||
}
|
||||
|
||||
interface Build {
|
||||
getOutput(code: string, fileName: string): SourceOutput;
|
||||
getOutputTypeCheck(code: string, fileName: string): SourceOutput;
|
||||
}
|
||||
type GetOutputFunction = (code: string, fileName: string) => SourceOutput;
|
||||
|
||||
/**
|
||||
* Do post-processing on config options to support `ts-node`.
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/python",
|
||||
"version": "3.1.28",
|
||||
"version": "3.1.29",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
|
||||
@@ -22,7 +22,7 @@
|
||||
"devDependencies": {
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/jest": "27.4.1",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"execa": "^1.0.0",
|
||||
"typescript": "4.3.4"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/redwood",
|
||||
"version": "1.0.38",
|
||||
"version": "1.0.39",
|
||||
"main": "./dist/index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -27,6 +27,6 @@
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "5.6.0"
|
||||
"@vercel/build-utils": "5.7.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/remix",
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.1",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -25,7 +25,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"typescript": "4.6.4"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@vercel/ruby",
|
||||
"author": "Nathan Cahill <nathan@nathancahill.com>",
|
||||
"version": "1.3.44",
|
||||
"version": "1.3.45",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/ruby",
|
||||
@@ -22,7 +22,7 @@
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "8.0.0",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"execa": "2.0.4",
|
||||
"fs-extra": "^7.0.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/static-build",
|
||||
"version": "1.0.40",
|
||||
"version": "1.0.42",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/build-step",
|
||||
@@ -36,8 +36,8 @@
|
||||
"@types/ms": "0.7.31",
|
||||
"@types/node-fetch": "2.5.4",
|
||||
"@types/promise-timeout": "1.3.0",
|
||||
"@vercel/build-utils": "5.6.0",
|
||||
"@vercel/frameworks": "1.1.13",
|
||||
"@vercel/build-utils": "5.7.0",
|
||||
"@vercel/frameworks": "1.1.14",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/routing-utils": "2.1.3",
|
||||
"@vercel/static-config": "2.0.6",
|
||||
|
||||
141
yarn.lock
141
yarn.lock
@@ -2626,6 +2626,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/bytes/-/bytes-3.0.0.tgz#549eeacd0a8fecfaa459334583a4edcee738e6db"
|
||||
integrity sha512-ZF43+CIIlzngQe8/Zo7L1kpY9W8O6rO006VDz3c5iM21ddtXWxCEyOXyft+q4pVF2tGqvrVuVrEDH1+gJEi1fQ==
|
||||
|
||||
"@types/bytes@3.1.1":
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/bytes/-/bytes-3.1.1.tgz#67a876422e660dc4c10a27f3e5bcfbd5455f01d0"
|
||||
integrity sha512-lOGyCnw+2JVPKU3wIV0srU0NyALwTBJlVSx5DfMQOFuuohA8y9S8orImpuIQikZ0uIQ8gehrRjxgQC1rLRi11w==
|
||||
|
||||
"@types/chance@1.1.3":
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/chance/-/chance-1.1.3.tgz#d19fe9391288d60fdccd87632bfc9ab2b4523fea"
|
||||
@@ -4248,20 +4253,6 @@ boxen@^3.0.0:
|
||||
type-fest "^0.3.0"
|
||||
widest-line "^2.0.0"
|
||||
|
||||
boxen@^5.0.0:
|
||||
version "5.1.2"
|
||||
resolved "https://registry.yarnpkg.com/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50"
|
||||
integrity sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==
|
||||
dependencies:
|
||||
ansi-align "^3.0.0"
|
||||
camelcase "^6.2.0"
|
||||
chalk "^4.1.0"
|
||||
cli-boxes "^2.2.1"
|
||||
string-width "^4.2.2"
|
||||
type-fest "^0.20.2"
|
||||
widest-line "^3.1.0"
|
||||
wrap-ansi "^7.0.0"
|
||||
|
||||
brace-expansion@^1.1.7:
|
||||
version "1.1.11"
|
||||
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
|
||||
@@ -4395,6 +4386,11 @@ bytes@3.1.0:
|
||||
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6"
|
||||
integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==
|
||||
|
||||
bytes@3.1.2:
|
||||
version "3.1.2"
|
||||
resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5"
|
||||
integrity sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==
|
||||
|
||||
cac@^6.7.12:
|
||||
version "6.7.12"
|
||||
resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.12.tgz#6fb5ea2ff50bd01490dbda497f4ae75a99415193"
|
||||
@@ -4607,14 +4603,6 @@ chalk@^4.0.0:
|
||||
ansi-styles "^4.1.0"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
chalk@^4.1.0:
|
||||
version "4.1.2"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
|
||||
integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
|
||||
dependencies:
|
||||
ansi-styles "^4.1.0"
|
||||
supports-color "^7.1.0"
|
||||
|
||||
chance@1.1.7:
|
||||
version "1.1.7"
|
||||
resolved "https://registry.yarnpkg.com/chance/-/chance-1.1.7.tgz#e99dde5ac16681af787b5ba94c8277c090d6cfe8"
|
||||
@@ -4759,11 +4747,6 @@ cli-boxes@^2.2.0:
|
||||
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.0.tgz#538ecae8f9c6ca508e3c3c95b453fe93cb4c168d"
|
||||
integrity sha512-gpaBrMAizVEANOpfZp/EEUixTXDyGt7DFzdK5hU+UbWt/J0lB0w20ncZj59Z9a93xHb9u12zF5BS6i9RKbtg4w==
|
||||
|
||||
cli-boxes@^2.2.1:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f"
|
||||
integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==
|
||||
|
||||
cli-cursor@^2.0.0, cli-cursor@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5"
|
||||
@@ -5017,18 +5000,6 @@ configstore@^4.0.0:
|
||||
write-file-atomic "^2.0.0"
|
||||
xdg-basedir "^3.0.0"
|
||||
|
||||
configstore@^5.0.1:
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96"
|
||||
integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==
|
||||
dependencies:
|
||||
dot-prop "^5.2.0"
|
||||
graceful-fs "^4.1.2"
|
||||
make-dir "^3.0.0"
|
||||
unique-string "^2.0.0"
|
||||
write-file-atomic "^3.0.0"
|
||||
xdg-basedir "^4.0.0"
|
||||
|
||||
console-control-strings@^1.0.0, console-control-strings@~1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
|
||||
@@ -5270,11 +5241,6 @@ crypto-random-string@^1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e"
|
||||
integrity sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=
|
||||
|
||||
crypto-random-string@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
|
||||
integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
|
||||
|
||||
css-select@^4.3.0:
|
||||
version "4.3.0"
|
||||
resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b"
|
||||
@@ -5662,7 +5628,7 @@ dot-prop@^4.1.0, dot-prop@^4.2.0:
|
||||
dependencies:
|
||||
is-obj "^1.0.0"
|
||||
|
||||
dot-prop@^5.1.0, dot-prop@^5.2.0:
|
||||
dot-prop@^5.1.0:
|
||||
version "5.2.0"
|
||||
resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.2.0.tgz#c34ecc29556dc45f1f4c22697b6f4904e0cc4fcb"
|
||||
integrity sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==
|
||||
@@ -6037,11 +6003,6 @@ escalade@^3.1.1:
|
||||
resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
|
||||
integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
|
||||
|
||||
escape-goat@^2.0.0:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675"
|
||||
integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==
|
||||
|
||||
escape-html@1.0.3, escape-html@~1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
|
||||
@@ -7126,13 +7087,6 @@ global-dirs@^0.1.0:
|
||||
dependencies:
|
||||
ini "^1.3.4"
|
||||
|
||||
global-dirs@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.0.tgz#70a76fe84ea315ab37b1f5576cbde7d48ef72686"
|
||||
integrity sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==
|
||||
dependencies:
|
||||
ini "2.0.0"
|
||||
|
||||
globals@^11.1.0:
|
||||
version "11.12.0"
|
||||
resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
|
||||
@@ -7616,11 +7570,6 @@ inherits@2.0.3:
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
|
||||
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
|
||||
|
||||
ini@2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5"
|
||||
integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==
|
||||
|
||||
ini@3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/ini/-/ini-3.0.0.tgz#2f6de95006923aa75feed8894f5686165adc08f1"
|
||||
@@ -7891,24 +7840,11 @@ is-installed-globally@^0.1.0:
|
||||
global-dirs "^0.1.0"
|
||||
is-path-inside "^1.0.0"
|
||||
|
||||
is-installed-globally@^0.4.0:
|
||||
version "0.4.0"
|
||||
resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520"
|
||||
integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==
|
||||
dependencies:
|
||||
global-dirs "^3.0.0"
|
||||
is-path-inside "^3.0.2"
|
||||
|
||||
is-npm@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-3.0.0.tgz#ec9147bfb629c43f494cf67936a961edec7e8053"
|
||||
integrity sha512-wsigDr1Kkschp2opC4G3yA6r9EgVA6NjRpWzIi9axXqeIaAATPRJc4uLujXe3Nd9uO8KoDyA4MD6aZSeXTADhA==
|
||||
|
||||
is-npm@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-5.0.0.tgz#43e8d65cc56e1b67f8d47262cf667099193f45a8"
|
||||
integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==
|
||||
|
||||
is-number@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195"
|
||||
@@ -7974,11 +7910,6 @@ is-path-inside@^3.0.1:
|
||||
resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017"
|
||||
integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==
|
||||
|
||||
is-path-inside@^3.0.2:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
|
||||
integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
|
||||
|
||||
is-plain-obj@^1.0.0, is-plain-obj@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
|
||||
@@ -9180,7 +9111,7 @@ kleur@^3.0.3:
|
||||
resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e"
|
||||
integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==
|
||||
|
||||
latest-version@^5.0.0, latest-version@^5.1.0:
|
||||
latest-version@^5.0.0:
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face"
|
||||
integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==
|
||||
@@ -11259,13 +11190,6 @@ punycode@^2.1.0, punycode@^2.1.1:
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
|
||||
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
|
||||
|
||||
pupa@^2.1.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62"
|
||||
integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==
|
||||
dependencies:
|
||||
escape-goat "^2.0.0"
|
||||
|
||||
q@^1.5.1:
|
||||
version "1.5.1"
|
||||
resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
|
||||
@@ -11854,13 +11778,6 @@ semver-diff@^2.0.0:
|
||||
dependencies:
|
||||
semver "^5.0.3"
|
||||
|
||||
semver-diff@^3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b"
|
||||
integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==
|
||||
dependencies:
|
||||
semver "^6.3.0"
|
||||
|
||||
"semver@2 || 3 || 4 || 5", "semver@2.x || 3.x || 4 || 5", semver@^5.0.3, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0, semver@^5.7.0, semver@^5.7.1:
|
||||
version "5.7.1"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
|
||||
@@ -12365,7 +12282,7 @@ string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0:
|
||||
is-fullwidth-code-point "^3.0.0"
|
||||
strip-ansi "^6.0.0"
|
||||
|
||||
string-width@^4.2.2, string-width@^4.2.3:
|
||||
string-width@^4.2.3:
|
||||
version "4.2.3"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
|
||||
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
|
||||
@@ -13386,13 +13303,6 @@ unique-string@^1.0.0:
|
||||
dependencies:
|
||||
crypto-random-string "^1.0.0"
|
||||
|
||||
unique-string@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d"
|
||||
integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==
|
||||
dependencies:
|
||||
crypto-random-string "^2.0.0"
|
||||
|
||||
unique-temp-dir@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/unique-temp-dir/-/unique-temp-dir-1.0.0.tgz#6dce95b2681ca003eebfb304a415f9cbabcc5385"
|
||||
@@ -13448,26 +13358,6 @@ unset-value@^1.0.0:
|
||||
has-value "^0.3.1"
|
||||
isobject "^3.0.0"
|
||||
|
||||
update-notifier@5.1.0:
|
||||
version "5.1.0"
|
||||
resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-5.1.0.tgz#4ab0d7c7f36a231dd7316cf7729313f0214d9ad9"
|
||||
integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==
|
||||
dependencies:
|
||||
boxen "^5.0.0"
|
||||
chalk "^4.1.0"
|
||||
configstore "^5.0.1"
|
||||
has-yarn "^2.1.0"
|
||||
import-lazy "^2.1.0"
|
||||
is-ci "^2.0.0"
|
||||
is-installed-globally "^0.4.0"
|
||||
is-npm "^5.0.0"
|
||||
is-yarn-global "^0.3.0"
|
||||
latest-version "^5.1.0"
|
||||
pupa "^2.1.1"
|
||||
semver "^7.3.4"
|
||||
semver-diff "^3.1.1"
|
||||
xdg-basedir "^4.0.0"
|
||||
|
||||
update-notifier@^3.0.1:
|
||||
version "3.0.1"
|
||||
resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-3.0.1.tgz#78ecb68b915e2fd1be9f767f6e298ce87b736250"
|
||||
@@ -13847,11 +13737,6 @@ xdg-basedir@^3.0.0:
|
||||
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4"
|
||||
integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=
|
||||
|
||||
xdg-basedir@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
|
||||
integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
|
||||
|
||||
xdg-portable@^7.0.0:
|
||||
version "7.2.1"
|
||||
resolved "https://registry.yarnpkg.com/xdg-portable/-/xdg-portable-7.2.1.tgz#4301ba0868b2cbc9de0c53b3699906adcc9d2560"
|
||||
|
||||
Reference in New Issue
Block a user