Compare commits

..

8 Commits

Author SHA1 Message Date
JJ Kasper
4c3bc05322 Publish Stable
- @vercel/build-utils@5.7.5
 - vercel@28.11.1
 - @vercel/client@12.2.26
 - @vercel/fs-detectors@3.6.2
 - @vercel/gatsby-plugin-vercel-analytics@1.0.1
 - @vercel/go@2.2.24
 - @vercel/hydrogen@0.0.38
 - @vercel/next@3.3.9
 - @vercel/node@2.8.6
 - @vercel/python@3.1.34
 - @vercel/redwood@1.0.45
 - @vercel/remix@1.1.7
 - @vercel/ruby@1.3.50
 - @vercel/static-build@1.1.1
2023-01-11 12:01:03 -08:00
JJ Kasper
3f47587a8b [next] fix lambda creation when using edge runtime (#9204)
Co-authored-by: remorses <beats.by.morse@gmail.com>
2023-01-11 09:35:21 -08:00
Nathan Rajlich
84f93d8af4 [build-utils] Support directory entries in Lambda#createZip() (#9201)
Adds support for empty directory entries in the Lambda `createZip()` function.
2023-01-11 10:21:42 +00:00
Chris Barber
e1aaf8080b [cli] Replace update-notifier dependency with build in (#9098)
This PR replaces the `update-notifier` dependency with a custom implementation.

There are a few reasons: the dependency is quite large, it requires ESM in order to update, can sometimes suggest an update to an older version, and used dependencies with known security issues.

The result looks like:

<img width="768" alt="image" src="https://user-images.githubusercontent.com/97262/208452226-b7508299-f830-4d42-a96a-7646ec8227aa.png">

Note: This PR is the successor to https://github.com/vercel/vercel/pull/8090.
2023-01-11 03:45:36 +00:00
JJ Kasper
0857352967 [next] Fix dynamic routes order for app dir (#9202)
This ensures the RSC dynamic routes come before the HTML route as it's more specific and the HTML route can end up matching first unexpectedly. 

Test deployment with fix: https://discord-gmki6w031-vtest314-ijjk-testing.vercel.app/

Fixes: https://github.com/vercel/next.js/issues/44728
2023-01-11 00:24:28 +00:00
Chris Barber
f92d229a63 [cli] Rollback team check (#9120)
https://linear.app/vercel/issue/VCCLI-377/rollback-failing-for-enterprise-teams

When running `vc rollback` for a deployment belonging to another team, the command will fail requesting the rollback. Technically, the command should have failed trying to get the deployment. This PR checks that the current team matches the deployment being rolled back.

![image](https://user-images.githubusercontent.com/97262/210431585-ffb73658-b15c-4adb-b110-a8c5e816db32.png)

This PR also cleans up a bunch of deployment related things. There were 3 functions to get a deployment and now there's just one which uses the latest v13 API. Get deployment error handling now throws instead of returning an error. The `Deployment` type definition has been updated to match the v13 response and mock deployment data was also updated.
2023-01-10 21:05:08 +00:00
Steven
427a2a58cf [tests] Update turbo to 1.7.0-canary.9 (#9193)
Let's try out turbo canary

Co-authored-by: tknickman <tom.knickman@vercel.com>
2023-01-10 12:51:12 -05:00
Ethan Arrowood
ccb5f301ad [static-build] @vercel/static-build to use @vercel/gatsby-plugin-vercel-analytics (#9194)
Updates the `static-build` injector to use the new plugin.

Still need to verify somehow that the newly published plugin is working as expected. It should be fine since it was a copy-paste from the previous plugin repo, but always good to verify before we break everything! 

This PR also updates the README in `@vercel/gatsby-plugin-vercel-analytics`
2023-01-10 15:22:31 +00:00
56 changed files with 1124 additions and 752 deletions

View File

@@ -36,7 +36,7 @@
"prettier": "2.6.2",
"ts-eager": "2.0.2",
"ts-jest": "28.0.5",
"turbo": "1.6.3"
"turbo": "1.7.0-canary.9"
},
"scripts": {
"lerna": "lerna",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/build-utils",
"version": "5.7.4",
"version": "5.7.5",
"license": "MIT",
"main": "./dist/index.js",
"types": "./dist/index.d.js",

View File

@@ -9,8 +9,13 @@ export interface DownloadedFiles {
[filePath: string]: FileFsRef;
}
const S_IFMT = 61440; /* 0170000 type of file */
const S_IFDIR = 16384; /* 0040000 directory */
const S_IFLNK = 40960; /* 0120000 symbolic link */
const S_IFMT = 61440; /* 0170000 type of file */
export function isDirectory(mode: number): boolean {
return (mode & S_IFMT) === S_IFDIR;
}
export function isSymbolicLink(mode: number): boolean {
return (mode & S_IFMT) === S_IFLNK;

View File

@@ -3,7 +3,7 @@ import Sema from 'async-sema';
import { ZipFile } from 'yazl';
import minimatch from 'minimatch';
import { readlink } from 'fs-extra';
import { isSymbolicLink } from './fs/download';
import { isSymbolicLink, isDirectory } from './fs/download';
import streamToBuffer from './fs/stream-to-buffer';
import type { Files, Config } from './types';
@@ -200,6 +200,8 @@ export async function createZip(files: Files): Promise<Buffer> {
const symlinkTarget = symlinkTargets.get(name);
if (typeof symlinkTarget === 'string') {
zipFile.addBuffer(Buffer.from(symlinkTarget, 'utf8'), name, opts);
} else if (file.mode && isDirectory(file.mode)) {
zipFile.addEmptyDirectory(name, opts);
} else {
const stream = file.toStream();
stream.on('error', reject);

View File

@@ -0,0 +1,97 @@
import path from 'path';
import { tmpdir } from 'os';
import fs from 'fs-extra';
import { createZip } from '../src/lambda';
import { FileBlob, glob, spawnAsync } from '../src';
const MODE_DIRECTORY = 16877; /* drwxr-xr-x */
const MODE_FILE = 33188; /* -rw-r--r-- */
describe('Lambda', () => {
it('should create zip file with symlinks', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = await glob('**', path.join(__dirname, 'symlinks'));
expect(Object.keys(files)).toHaveLength(4);
const outFile = path.join(__dirname, 'symlinks.zip');
await fs.remove(outFile);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
await fs.mkdirp(outDir);
await fs.writeFile(outFile, await createZip(files));
await spawnAsync('unzip', [outFile], { cwd: outDir });
const [linkStat, linkDirStat, aStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
]);
expect(linkStat.isSymbolicLink()).toEqual(true);
expect(linkDirStat.isSymbolicLink()).toEqual(true);
expect(aStat.isFile()).toEqual(true);
});
it('should create zip file with empty directory', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const dir = await fs.mkdtemp(path.join(tmpdir(), 'create-zip-empty-dir'));
try {
const files = {
a: new FileBlob({
data: 'contents',
mode: MODE_FILE,
}),
empty: new FileBlob({
data: '',
mode: MODE_DIRECTORY,
}),
'b/a': new FileBlob({
data: 'inside dir b',
mode: MODE_FILE,
}),
c: new FileBlob({
data: '',
mode: MODE_DIRECTORY,
}),
'c/a': new FileBlob({
data: 'inside dir c',
mode: MODE_FILE,
}),
};
const outFile = path.join(dir, 'lambda.zip');
const outDir = path.join(dir, 'out');
await fs.mkdirp(outDir);
await fs.writeFile(outFile, await createZip(files));
await spawnAsync('unzip', [outFile], { cwd: outDir });
expect(fs.statSync(path.join(outDir, 'empty')).isDirectory()).toEqual(
true
);
expect(fs.statSync(path.join(outDir, 'b')).isDirectory()).toEqual(true);
expect(fs.statSync(path.join(outDir, 'c')).isDirectory()).toEqual(true);
expect(fs.readFileSync(path.join(outDir, 'a'), 'utf8')).toEqual(
'contents'
);
expect(fs.readFileSync(path.join(outDir, 'b/a'), 'utf8')).toEqual(
'inside dir b'
);
expect(fs.readFileSync(path.join(outDir, 'c/a'), 'utf8')).toEqual(
'inside dir c'
);
expect(fs.readdirSync(path.join(outDir, 'empty'))).toHaveLength(0);
} finally {
await fs.remove(dir);
}
});
});

View File

@@ -2,12 +2,10 @@ import ms from 'ms';
import path from 'path';
import fs, { readlink } from 'fs-extra';
import { strict as assert, strictEqual } from 'assert';
import { createZip } from '../src/lambda';
import { getSupportedNodeVersion } from '../src/fs/node-version';
import download from '../src/fs/download';
import {
glob,
spawnAsync,
getNodeVersion,
getLatestNodeVersion,
getDiscontinuedNodeVersions,
@@ -141,34 +139,6 @@ it('should re-create FileBlob symlinks properly', async () => {
strictEqual(linkTextContents, 'a.txt');
});
it('should create zip files with symlinks properly', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');
return;
}
const files = await glob('**', path.join(__dirname, 'symlinks'));
assert.equal(Object.keys(files).length, 4);
const outFile = path.join(__dirname, 'symlinks.zip');
await fs.remove(outFile);
const outDir = path.join(__dirname, 'symlinks-out');
await fs.remove(outDir);
await fs.mkdirp(outDir);
await fs.writeFile(outFile, await createZip(files));
await spawnAsync('unzip', [outFile], { cwd: outDir });
const [linkStat, linkDirStat, aStat] = await Promise.all([
fs.lstat(path.join(outDir, 'link.txt')),
fs.lstat(path.join(outDir, 'link-dir')),
fs.lstat(path.join(outDir, 'a.txt')),
]);
assert(linkStat.isSymbolicLink());
assert(linkDirStat.isSymbolicLink());
assert(aStat.isFile());
});
it('should download symlinks even with incorrect file', async () => {
if (process.platform === 'win32') {
console.log('Skipping test on windows');

View File

@@ -1,6 +1,6 @@
{
"name": "vercel",
"version": "28.11.0",
"version": "28.11.1",
"preferGlobal": true,
"license": "Apache-2.0",
"description": "The command-line interface for Vercel",
@@ -41,17 +41,16 @@
"node": ">= 14"
},
"dependencies": {
"@vercel/build-utils": "5.7.4",
"@vercel/go": "2.2.23",
"@vercel/hydrogen": "0.0.37",
"@vercel/next": "3.3.8",
"@vercel/node": "2.8.5",
"@vercel/python": "3.1.33",
"@vercel/redwood": "1.0.44",
"@vercel/remix": "1.1.6",
"@vercel/ruby": "1.3.49",
"@vercel/static-build": "1.1.0",
"update-notifier": "5.1.0"
"@vercel/build-utils": "5.7.5",
"@vercel/go": "2.2.24",
"@vercel/hydrogen": "0.0.38",
"@vercel/next": "3.3.9",
"@vercel/node": "2.8.6",
"@vercel/python": "3.1.34",
"@vercel/redwood": "1.0.45",
"@vercel/remix": "1.1.7",
"@vercel/ruby": "1.3.50",
"@vercel/static-build": "1.1.1"
},
"devDependencies": {
"@alex_neo/jest-expect-message": "1.0.5",
@@ -93,12 +92,13 @@
"@types/which": "1.3.2",
"@types/write-json-file": "2.2.1",
"@types/yauzl-promise": "2.1.0",
"@vercel/client": "12.2.25",
"@vercel/client": "12.2.26",
"@vercel/error-utils": "1.0.3",
"@vercel/frameworks": "1.1.18",
"@vercel/fs-detectors": "3.6.1",
"@vercel/fs-detectors": "3.6.2",
"@vercel/fun": "1.0.4",
"@vercel/ncc": "0.24.0",
"@vercel/routing-utils": "2.1.3",
"@zeit/source-map-support": "0.6.2",
"ajv": "6.12.2",
"alpha-sort": "2.0.1",

View File

@@ -1,7 +1,7 @@
import cpy from 'cpy';
import execa from 'execa';
import { join } from 'path';
import { remove, writeFile } from 'fs-extra';
import { remove, readJSON, writeFile } from 'fs-extra';
const dirRoot = join(__dirname, '..');
const distRoot = join(dirRoot, 'dist');
@@ -43,15 +43,15 @@ async function main() {
stdio: 'inherit',
});
const pkg = await readJSON(join(dirRoot, 'package.json'));
const dependencies = Object.keys(pkg?.dependencies ?? {});
// Do the initial `ncc` build
console.log();
const args = [
'ncc',
'build',
'--external',
'update-notifier',
'src/index.ts',
];
console.log('Dependencies:', dependencies);
const externs = [];
for (const dep of dependencies) {
externs.push('--external', dep);
}
const args = ['ncc', 'build', 'src/index.ts', ...externs];
await execa('yarn', args, { stdio: 'inherit', cwd: dirRoot });
// `ncc` has some issues with `@vercel/fun`'s runtime files:
@@ -78,6 +78,10 @@ async function main() {
// Band-aid to bundle stuff that `ncc` neglects to bundle
await cpy(join(dirRoot, 'src/util/projects/VERCEL_DIR_README.txt'), distRoot);
await cpy(join(dirRoot, 'src/util/dev/builder-worker.js'), distRoot);
await cpy(
join(dirRoot, 'src/util/get-latest-version/get-latest-worker.js'),
distRoot
);
console.log('Finished building Vercel CLI');
}

View File

@@ -6,7 +6,7 @@ import { Output } from '../../util/output';
import * as ERRORS from '../../util/errors-ts';
import assignAlias from '../../util/alias/assign-alias';
import Client from '../../util/client';
import getDeploymentByIdOrHost from '../../util/deploy/get-deployment-by-id-or-host';
import getDeployment from '../../util/get-deployment';
import { getDeploymentForAlias } from '../../util/alias/get-deployment-by-alias';
import getScope from '../../util/get-scope';
import setupDomain from '../../util/domains/setup-domain';
@@ -136,36 +136,13 @@ export default async function set(
const [deploymentIdOrHost, aliasTarget] = args;
const deployment = handleCertError(
output,
await getDeploymentByIdOrHost(client, contextName, deploymentIdOrHost)
await getDeployment(client, contextName, deploymentIdOrHost)
);
if (deployment === 1) {
return deployment;
}
if (deployment instanceof ERRORS.DeploymentNotFound) {
output.error(
`Failed to find deployment "${deployment.meta.id}" under ${chalk.bold(
contextName
)}`
);
return 1;
}
if (deployment instanceof ERRORS.DeploymentPermissionDenied) {
output.error(
`No permission to access deployment "${
deployment.meta.id
}" under ${chalk.bold(deployment.meta.context)}`
);
return 1;
}
if (deployment instanceof ERRORS.InvalidDeploymentId) {
output.error(deployment.message);
return 1;
}
if (deployment === null) {
output.error(
`Couldn't find a deployment to alias. Please provide one as an argument.`

View File

@@ -15,17 +15,11 @@ import Client from '../../util/client';
import { getPkgName } from '../../util/pkg-name';
import { Deployment, PaginationOptions } from '../../types';
import { normalizeURL } from '../../util/bisect/normalize-url';
interface DeploymentV6
extends Pick<
Deployment,
'url' | 'target' | 'projectId' | 'ownerId' | 'meta' | 'inspectorUrl'
> {
createdAt: number;
}
import getScope from '../../util/get-scope';
import getDeployment from '../../util/get-deployment';
interface Deployments {
deployments: DeploymentV6[];
deployments: Deployment[];
pagination: PaginationOptions;
}
@@ -63,6 +57,8 @@ const help = () => {
export default async function main(client: Client): Promise<number> {
const { output } = client;
const scope = await getScope(client);
const { contextName } = scope;
const argv = getArgs(client.argv.slice(2), {
'--bad': String,
@@ -145,7 +141,9 @@ export default async function main(client: Client): Promise<number> {
output.spinner('Retrieving deployments…');
// `getDeployment` cannot be parallelized because it might prompt for login
const badDeployment = await getDeployment(client, bad).catch(err => err);
const badDeployment = await getDeployment(client, contextName, bad).catch(
err => err
);
if (badDeployment) {
if (badDeployment instanceof Error) {
@@ -162,7 +160,9 @@ export default async function main(client: Client): Promise<number> {
}
// `getDeployment` cannot be parallelized because it might prompt for login
const goodDeployment = await getDeployment(client, good).catch(err => err);
const goodDeployment = await getDeployment(client, contextName, good).catch(
err => err
);
if (goodDeployment) {
if (goodDeployment instanceof Error) {
@@ -204,7 +204,7 @@ export default async function main(client: Client): Promise<number> {
}
// Fetch all the project's "READY" deployments with the pagination API
let deployments: DeploymentV6[] = [];
let deployments: Deployment[] = [];
let next: number | undefined = badDeployment.createdAt + 1;
do {
const query = new URLSearchParams();
@@ -279,7 +279,7 @@ export default async function main(client: Client): Promise<number> {
const commit = getCommit(deployment);
if (commit) {
const shortSha = commit.sha.substring(0, 7);
const firstLine = commit.message.split('\n')[0];
const firstLine = commit.message?.split('\n')[0];
output.log(`${chalk.bold('Commit:')} [${shortSha}] ${firstLine}`);
}
@@ -356,7 +356,7 @@ export default async function main(client: Client): Promise<number> {
const commit = getCommit(lastBad);
if (commit) {
const shortSha = commit.sha.substring(0, 7);
const firstLine = commit.message.split('\n')[0];
const firstLine = commit.message?.split('\n')[0];
result.push(` ${chalk.bold('Commit:')} [${shortSha}] ${firstLine}`);
}
@@ -368,18 +368,7 @@ export default async function main(client: Client): Promise<number> {
return 0;
}
function getDeployment(
client: Client,
hostname: string
): Promise<DeploymentV6> {
const query = new URLSearchParams();
query.set('url', hostname);
query.set('resolve', '1');
query.set('noState', '1');
return client.fetch<DeploymentV6>(`/v10/deployments/get?${query}`);
}
function getCommit(deployment: DeploymentV6) {
function getCommit(deployment: Deployment) {
const sha =
deployment.meta?.githubCommitSha ||
deployment.meta?.gitlabCommitSha ||

View File

@@ -19,15 +19,13 @@ import toHumanPath from '../../util/humanize-path';
import Now from '../../util';
import stamp from '../../util/output/stamp';
import createDeploy from '../../util/deploy/create-deploy';
import getDeploymentByIdOrHost from '../../util/deploy/get-deployment-by-id-or-host';
import getDeployment from '../../util/get-deployment';
import parseMeta from '../../util/parse-meta';
import linkStyle from '../../util/output/link';
import param from '../../util/output/param';
import {
BuildsRateLimited,
DeploymentNotFound,
DeploymentPermissionDenied,
InvalidDeploymentId,
DomainNotFound,
DomainNotVerified,
DomainPermissionDenied,
@@ -629,21 +627,8 @@ export default async (client: Client): Promise<number> => {
return 1;
}
const deploymentResponse = await getDeploymentByIdOrHost(
client,
contextName,
deployment.id,
'v10'
);
if (
deploymentResponse instanceof DeploymentNotFound ||
deploymentResponse instanceof DeploymentPermissionDenied ||
deploymentResponse instanceof InvalidDeploymentId
) {
output.error(deploymentResponse.message);
return 1;
}
// get the deployment just to double check that it actually deployed
await getDeployment(client, contextName, deployment.id);
if (deployment === null) {
error('Uploading failed. Please try again.');

View File

@@ -9,12 +9,10 @@ import { handleError } from '../util/error';
import getScope from '../util/get-scope';
import { getPkgName, getCommandName } from '../util/pkg-name';
import Client from '../util/client';
import { getDeployment } from '../util/get-deployment';
import { Deployment } from '@vercel/client';
import { Build } from '../types';
import getDeployment from '../util/get-deployment';
import { Build, Deployment } from '../types';
import title from 'title';
import { isErrnoException } from '@vercel/error-utils';
import { isAPIError } from '../util/errors-ts';
import { URL } from 'url';
const help = () => {
@@ -49,7 +47,6 @@ const help = () => {
};
export default async function main(client: Client) {
let deployment;
let argv;
try {
@@ -101,30 +98,11 @@ export default async function main(client: Client) {
);
// resolve the deployment, since we might have been given an alias
try {
deployment = await getDeployment(client, deploymentIdOrHost);
} catch (err: unknown) {
if (isAPIError(err)) {
if (err.status === 404) {
error(
`Failed to find deployment "${deploymentIdOrHost}" in ${chalk.bold(
contextName
)}`
);
return 1;
}
if (err.status === 403) {
error(
`No permission to access deployment "${deploymentIdOrHost}" in ${chalk.bold(
contextName
)}`
);
return 1;
}
}
// unexpected
throw err;
}
const deployment = await getDeployment(
client,
contextName,
deploymentIdOrHost
);
const {
id,
@@ -138,11 +116,11 @@ export default async function main(client: Client) {
const { builds } =
deployment.version === 2
? await client.fetch<{ builds: Build[] }>(`/v1/deployments/${id}/builds`)
? await client.fetch<{ builds: Build[] }>(`/v11/deployments/${id}/builds`)
: { builds: [] };
log(
`Fetched deployment ${chalk.bold(url)} in ${chalk.bold(
`Fetched deployment "${chalk.bold(url)}" in ${chalk.bold(
contextName
)} ${elapsed(Date.now() - depFetchStart)}`
);
@@ -163,7 +141,7 @@ export default async function main(client: Client) {
}
print('\n\n');
if (aliases.length > 0) {
if (aliases !== undefined && aliases.length > 0) {
print(chalk.bold(' Aliases\n\n'));
let aliasList = '';
for (const alias of aliases) {
@@ -202,8 +180,6 @@ function stateString(s: Deployment['readyState']) {
switch (s) {
case 'INITIALIZING':
case 'BUILDING':
case 'DEPLOYING':
case 'ANALYZING':
return chalk.yellow(CIRCLE) + sTitle;
case 'ERROR':
return chalk.red(CIRCLE) + sTitle;

View File

@@ -7,8 +7,7 @@ import getScope from '../util/get-scope';
import { getPkgName } from '../util/pkg-name';
import getArgs from '../util/get-args';
import Client from '../util/client';
import { getDeployment } from '../util/get-deployment';
import { isAPIError } from '../util/errors-ts';
import getDeployment from '../util/get-deployment';
const help = () => {
console.log(`
@@ -125,28 +124,9 @@ export default async function main(client: Client) {
let deployment;
try {
deployment = await getDeployment(client, id);
} catch (err: unknown) {
deployment = await getDeployment(client, contextName, id);
} finally {
output.stopSpinner();
if (isAPIError(err)) {
if (err.status === 404) {
output.error(
`Failed to find deployment "${id}" in ${chalk.bold(contextName)}`
);
return 1;
}
if (err.status === 403) {
output.error(
`No permission to access deployment "${id}" in ${chalk.bold(
contextName
)}`
);
return 1;
}
}
// unexpected
throw err;
}
output.log(

View File

@@ -11,7 +11,7 @@ import getScope from '../util/get-scope';
import { isValidName } from '../util/is-valid-name';
import removeProject from '../util/projects/remove-project';
import getProjectByIdOrName from '../util/projects/get-project-by-id-or-name';
import getDeploymentByIdOrHost from '../util/deploy/get-deployment-by-id-or-host';
import getDeployment from '../util/get-deployment';
import getDeploymentsByProjectId, {
DeploymentPartial,
} from '../util/deploy/get-deployments-by-project-id';
@@ -133,7 +133,7 @@ export default async function main(client: Client) {
id =>
d &&
!(d instanceof NowError) &&
(d.uid === id || d.name === id || d.url === normalizeURL(id))
(d.id === id || d.name === id || d.url === normalizeURL(id))
);
const [deploymentList, projectList] = await Promise.all<any>([
@@ -142,7 +142,7 @@ export default async function main(client: Client) {
if (!contextName) {
throw new Error('Context name is not defined');
}
return getDeploymentByIdOrHost(client, contextName, idOrHost);
return getDeployment(client, contextName, idOrHost).catch(err => err);
})
),
Promise.all(
@@ -180,7 +180,7 @@ export default async function main(client: Client) {
aliases = await Promise.all(
deployments.map(async depl => {
const { aliases } = await getAliases(client, depl.uid);
const { aliases } = await getAliases(client, depl.id);
return aliases;
})
);
@@ -238,7 +238,7 @@ export default async function main(client: Client) {
const start = Date.now();
await Promise.all<any>([
...deployments.map(depl => now.remove(depl.uid, { hard })),
...deployments.map(depl => now.remove(depl.id, { hard })),
...projects.map(project => removeProject(client, project.id)),
]);
@@ -275,9 +275,9 @@ function readConfirmation(
const deploymentTable = table(
deployments.map(depl => {
const time = chalk.gray(`${ms(Date.now() - depl.created)} ago`);
const time = chalk.gray(`${ms(Date.now() - depl.createdAt)} ago`);
const url = depl.url ? chalk.underline(`https://${depl.url}`) : '';
return [` ${depl.uid}`, url, time];
return [` ${depl.id}`, url, time];
}),
{ align: ['l', 'r', 'l'], hsep: ' '.repeat(6) }
);

View File

@@ -18,7 +18,7 @@ import sourceMap from '@zeit/source-map-support';
import { mkdirp } from 'fs-extra';
import chalk from 'chalk';
import epipebomb from 'epipebomb';
import updateNotifier from 'update-notifier';
import getLatestVersion from './util/get-latest-version';
import { URL } from 'url';
import * as Sentry from '@sentry/node';
import hp from './util/humanize-path';
@@ -55,13 +55,6 @@ import { VercelConfig } from '@vercel/client';
const isCanary = pkg.version.includes('canary');
// Checks for available update and returns an instance
const notifier = updateNotifier({
pkg,
distTag: isCanary ? 'canary' : 'latest',
updateCheckInterval: 1000 * 60 * 60 * 24 * 7, // 1 week
});
const VERCEL_DIR = getGlobalPathConfig();
const VERCEL_CONFIG_PATH = configFiles.getConfigFilePath();
const VERCEL_AUTH_CONFIG_PATH = configFiles.getAuthConfigFilePath();
@@ -149,22 +142,26 @@ const main = async () => {
}
// Print update information, if available
if (notifier.update && notifier.update.latest !== pkg.version && isTTY) {
const { latest } = notifier.update;
console.log(
info(
if (isTTY && !process.env.NO_UPDATE_NOTIFIER) {
// Check if an update is available. If so, `latest` will contain a string
// of the latest version, otherwise `undefined`.
const latest = getLatestVersion({
distTag: isCanary ? 'canary' : 'latest',
output,
pkg,
});
if (latest) {
output.log(
`${chalk.black.bgCyan('UPDATE AVAILABLE')} ` +
`Run ${cmd(
await getUpdateCommand()
)} to install ${getTitleName()} CLI ${latest}`
)
);
);
console.log(
info(
`Changelog: https://github.com/vercel/vercel/releases/tag/vercel@${latest}`
)
);
output.log(
`Changelog: https://github.com/vercel/vercel/releases/tag/vercel@${latest}\n`
);
}
}
// The second argument to the command can be:

View File

@@ -1,4 +1,6 @@
import type { BuilderFunctions } from '@vercel/build-utils';
import type { Readable, Writable } from 'stream';
import type { Route } from '@vercel/routing-utils';
export type ProjectSettings = import('@vercel/build-utils').ProjectSettings;
@@ -116,32 +118,105 @@ export type Cert = {
expiration: string;
};
type RouteOrMiddleware =
| Route
| {
src: string;
continue: boolean;
middleware: 0;
};
export type Deployment = {
uid: string;
url: string;
alias?: string[];
aliasAssigned?: boolean | null | number;
aliasError?: null | { code: string; message: string };
aliasFinal?: string | null;
aliasWarning?: null | {
code: string;
message: string;
link?: string;
action?: string;
};
bootedAt?: number;
build?: { env: string[] };
builds?: { use: string; src?: string; config?: { [key: string]: any } };
buildErrorAt?: number;
buildingAt: number;
canceledAt?: number;
checksState?: 'completed' | 'registered' | 'running';
checksConclusion?: 'canceled' | 'failed' | 'skipped' | 'succeeded';
createdAt: number;
createdIn?: string;
creator: { uid: string; username?: string };
env?: string[];
errorCode?: string;
errorLink?: string;
errorMessage?: string | null;
errorStep?: string;
functions?: BuilderFunctions | null;
gitSource?: {
org?: string;
owner?: string;
prId?: number | null;
projectId: number;
ref?: string | null;
repoId?: number;
repoUuid: string;
sha?: string;
slug?: string;
type: string;
workspaceUuid: string;
};
id: string;
initReadyAt?: number;
inspectorUrl?: string | null;
lambdas?: Build[];
meta?: {
[key: string]: string | undefined;
};
monorepoManager?: string | null;
name: string;
type: 'LAMBDAS';
state:
ownerId?: string;
plan?: 'enterprise' | 'hobby' | 'oss' | 'pro';
previewCommentsEnabled?: boolean;
projectId?: string;
projectSettings?: {
buildCommand?: string | null;
devCommand?: string | null;
framework?: string;
installCommand?: string | null;
outputDirectory?: string | null;
};
public: boolean;
ready?: number;
readyState:
| 'BUILDING'
| 'ERROR'
| 'INITIALIZING'
| 'QUEUED'
| 'READY'
| 'CANCELED';
version?: number;
created: number;
createdAt: number;
ready?: number;
buildingAt?: number;
creator: { uid: string; username: string };
target: string | null;
ownerId: string;
projectId: string;
inspectorUrl: string;
meta: {
[key: string]: any;
regions: string[];
routes?: RouteOrMiddleware[] | null;
source?: 'cli' | 'git' | 'import' | 'import/repo' | 'clone/repo';
status:
| 'BUILDING'
| 'ERROR'
| 'INITIALIZING'
| 'QUEUED'
| 'READY'
| 'CANCELED';
target?: 'staging' | 'production' | null;
team?: {
id: string;
name: string;
slug: string;
};
alias?: string[];
ttyBuildLogs?: boolean;
type: 'LAMBDAS';
url: string;
userAliases?: string[];
version: 2;
};
export type Alias = {

View File

@@ -1,4 +1,4 @@
import { Deployment } from '../../types';
import type { Deployment } from '../../types';
import { Output } from '../output';
import Client from '../client';
import createAlias from './create-alias';

View File

@@ -1,4 +1,4 @@
import { Deployment } from '../../types';
import type { Deployment } from '../../types';
import { Output } from '../output';
import * as ERRORS from '../errors-ts';
import Client from '../client';
@@ -62,7 +62,7 @@ async function performCreateAlias(
) {
try {
return await client.fetch<AliasRecord>(
`/now/deployments/${deployment.uid}/aliases`,
`/now/deployments/${deployment.id}/aliases`,
{
method: 'POST',
body: { alias },
@@ -79,7 +79,7 @@ async function performCreateAlias(
if (err.code === 'deployment_not_found') {
return new ERRORS.DeploymentNotFound({
context: contextName,
id: deployment.uid,
id: deployment.id,
});
}
if (err.code === 'gone') {

View File

@@ -5,7 +5,7 @@ import { Output } from '../output';
import { User } from '../../types';
import { VercelConfig } from '../dev/types';
import getDeploymentsByAppName from '../deploy/get-deployments-by-appname';
import getDeploymentByIdOrHost from '../deploy/get-deployment-by-id-or-host';
import getDeployment from '../get-deployment';
async function getAppLastDeployment(
output: Output,
@@ -22,7 +22,7 @@ async function getAppLastDeployment(
// Try to fetch deployment details
if (deploymentItem) {
return getDeploymentByIdOrHost(client, contextName, deploymentItem.uid);
return await getDeployment(client, contextName, deploymentItem.uid);
}
return null;
@@ -42,13 +42,11 @@ export async function getDeploymentForAlias(
// When there are no args at all we try to get the targets from the config
if (args.length === 2) {
const [deploymentId] = args;
const deployment = await getDeploymentByIdOrHost(
client,
contextName,
deploymentId
);
output.stopSpinner();
return deployment;
try {
return await getDeployment(client, contextName, deploymentId);
} finally {
output.stopSpinner();
}
}
const appName =
@@ -59,13 +57,15 @@ export async function getDeploymentForAlias(
return null;
}
const deployment = await getAppLastDeployment(
output,
client,
appName,
user,
contextName
);
output.stopSpinner();
return deployment;
try {
return await getAppLastDeployment(
output,
client,
appName,
user,
contextName
);
} finally {
output.stopSpinner();
}
}

View File

@@ -1,78 +0,0 @@
import type Client from '../client';
import toHost from '../to-host';
import { Deployment } from '../../types';
import {
DeploymentNotFound,
DeploymentPermissionDenied,
InvalidDeploymentId,
isAPIError,
} from '../errors-ts';
import mapCertError from '../certs/map-cert-error';
type APIVersion = 'v5' | 'v10';
export default async function getDeploymentByIdOrHost(
client: Client,
contextName: string,
idOrHost: string,
apiVersion: APIVersion = 'v5'
) {
try {
const { deployment } =
idOrHost.indexOf('.') !== -1
? await getDeploymentByHost(
client,
toHost(idOrHost) as string,
apiVersion
)
: await getDeploymentById(client, idOrHost, apiVersion);
return deployment;
} catch (err: unknown) {
if (isAPIError(err)) {
if (err.status === 404) {
return new DeploymentNotFound({ id: idOrHost, context: contextName });
}
if (err.status === 403) {
return new DeploymentPermissionDenied(idOrHost, contextName);
}
if (err.status === 400 && err.message.includes('`id`')) {
return new InvalidDeploymentId(idOrHost);
}
const certError = mapCertError(err);
if (certError) {
return certError;
}
}
throw err;
}
}
async function getDeploymentById(
client: Client,
id: string,
apiVersion: APIVersion
) {
const deployment = await client.fetch<Deployment>(
`/${apiVersion}/now/deployments/${encodeURIComponent(id)}`
);
return { deployment };
}
type Response = {
id: string;
};
async function getDeploymentByHost(
client: Client,
host: string,
apiVersion: APIVersion
) {
const response = await client.fetch<Response>(
`/v10/now/deployments/get?url=${encodeURIComponent(
host
)}&resolve=1&noState=1`
);
return getDeploymentById(client, response.id, apiVersion);
}

View File

@@ -1,19 +0,0 @@
import { NowError } from '../now-error';
import Client from '../client';
import getDeploymentByIdOrHost from './get-deployment-by-id-or-host';
export default async function getDeploymentByIdOrThrow(
client: Client,
contextName: string,
idOrHost: string
) {
const deployment = await getDeploymentByIdOrHost(
client,
contextName,
idOrHost
);
if (deployment instanceof NowError) {
throw deployment;
}
return deployment;
}

View File

@@ -6,7 +6,7 @@ type Response = {
deployments: DeploymentPartial[];
};
export interface DeploymentPartial {
uid: string;
id: string;
name: string;
url: string;
created: number;

View File

@@ -7,7 +7,8 @@ import jsonlines from 'jsonlines';
import { eraseLines } from 'ansi-escapes';
import Client from './client';
import { getDeployment } from './get-deployment';
import getDeployment from './get-deployment';
import getScope from './get-scope';
export interface FindOpts {
direction: 'forward' | 'backward';
@@ -37,6 +38,7 @@ async function printEvents(
{ mode, onEvent, quiet, findOpts }: PrintEventsOptions
) {
const { log, debug } = client.output;
const { contextName } = await getScope(client);
// we keep track of how much we log in case we
// drop the connection and have to start over
@@ -74,7 +76,11 @@ async function printEvents(
poller = (function startPoller() {
return setTimeout(async () => {
try {
const json = await getDeployment(client, deploymentIdOrURL);
const json = await getDeployment(
client,
contextName,
deploymentIdOrURL
);
if (json.readyState === 'READY') {
stream.end();
finish();

View File

@@ -1,27 +1,53 @@
import { stringify } from 'querystring';
import { Deployment } from '@vercel/client';
import Client from './client';
import type Client from './client';
import {
DeploymentNotFound,
DeploymentPermissionDenied,
InvalidDeploymentId,
isAPIError,
} from './errors-ts';
import type { Deployment } from '../types';
import mapCertError from './certs/map-cert-error';
import toHost from './to-host';
export async function getDeployment(
/**
* Retrieves a v13 deployment.
*
* @param client - The Vercel CLI client instance.
* @param contextName - The scope context/team name.
* @param hostOrId - A deployment host or id.
* @returns The deployment information.
*/
export default async function getDeployment(
client: Client,
contextName: string,
hostOrId: string
): Promise<Deployment> {
let url = `/v13/deployments`;
if (hostOrId.includes('.')) {
let host = hostOrId.replace(/^https:\/\//i, '');
if (host.slice(-1) === '/') {
host = host.slice(0, -1);
}
url += `/get?${stringify({
url: host,
})}`;
} else {
url += `/${encodeURIComponent(hostOrId)}`;
hostOrId = toHost(hostOrId);
}
const deployment = await client.fetch<Deployment>(url);
return deployment;
try {
return await client.fetch<Deployment>(
`/v13/deployments/${encodeURIComponent(hostOrId)}`
);
} catch (err: unknown) {
if (isAPIError(err)) {
if (err.status === 404) {
throw new DeploymentNotFound({ id: hostOrId, context: contextName });
}
if (err.status === 403) {
throw new DeploymentPermissionDenied(hostOrId, contextName);
}
if (err.status === 400 && err.message.includes('`id`')) {
throw new InvalidDeploymentId(hostOrId);
}
const certError = mapCertError(err);
if (certError) {
throw certError;
}
}
throw err;
}
}

View File

@@ -0,0 +1,223 @@
/**
* This file is spawned in the background and checks npm for the latest version
* of the CLI, then writes the version to the cache file.
*
* NOTE: Since this file runs asynchronously in the background, it's possible
* for multiple instances of this file to be running at the same time leading
* to a race condition where the most recent instance will overwrite the
* previous cache file resetting the `notified` flag and cause the update
* notification to appear for multiple consequetive commands. Not the end of
* the world, but something to be aware of.
*
* IMPORTANT! This file must NOT depend on any 3rd party dependencies. This
* file is NOT bundled by `ncc` and thus any 3rd party dependencies will never
* be available.
*/
const https = require('https');
const { mkdirSync, writeFileSync } = require('fs');
const { access, mkdir, readFile, unlink, writeFile } = require('fs/promises');
const path = require('path');
const { format, inspect } = require('util');
/**
* An simple output helper which accumulates error and debug log messages in
* memory for potential persistance to disk while immediately outputting errors
* and debug messages, when the `--debug` flag is set, to `stderr`.
*/
class WorkerOutput {
debugLog = [];
logFile = null;
constructor({ debug = true }) {
this.debugOutputEnabled = debug;
}
debug(...args) {
this.print('debug', args);
}
error(...args) {
this.print('error', args);
}
print(type, args) {
const str = format(
...args.map(s => (typeof s === 'string' ? s : inspect(s)))
);
this.debugLog.push(`[${new Date().toISOString()}] [${type}] ${str}`);
if (type === 'debug' && this.debugOutputEnabled) {
console.error(`> '[debug] [${new Date().toISOString()}] ${str}`);
} else if (type === 'error') {
console.error(`Error: ${str}`);
}
}
setLogFile(file) {
// wire up the exit handler the first time the log file is set
if (this.logFile === null) {
process.on('exit', () => {
if (this.debugLog.length) {
mkdirSync(path.dirname(this.logFile), { recursive: true });
writeFileSync(this.logFile, this.debugLog.join('\n'));
}
});
}
this.logFile = file;
}
}
const output = new WorkerOutput({
// enable the debug logging if the `--debug` is set or if this worker script
// was directly executed
debug: process.argv.includes('--debug') || !process.connected,
});
process.on('unhandledRejection', err => {
output.error('Exiting worker due to unhandled rejection:', err);
process.exit(1);
});
// this timer will prevent this worker process from running longer than 10s
const timer = setTimeout(() => {
output.error('Worker timed out after 10 seconds');
process.exit(1);
}, 10000);
// wait for the parent to give us the work payload
process.once('message', async msg => {
output.debug('Received message from parent:', msg);
output.debug('Disconnecting from parent');
process.disconnect();
const { cacheFile, distTag, name, updateCheckInterval } = msg;
const cacheFileParsed = path.parse(cacheFile);
await mkdir(cacheFileParsed.dir, { recursive: true });
output.setLogFile(
path.join(cacheFileParsed.dir, `${cacheFileParsed.name}.log`)
);
const lockFile = path.join(
cacheFileParsed.dir,
`${cacheFileParsed.name}.lock`
);
try {
// check for a lock file and either bail if running or write our pid and continue
output.debug(`Checking lock file: ${lockFile}`);
if (await isRunning(lockFile)) {
output.debug('Worker already running, exiting');
process.exit(1);
}
output.debug(`Initializing lock file with pid ${process.pid}`);
await writeFile(lockFile, String(process.pid), 'utf-8');
// fetch the latest version from npm
const agent = new https.Agent({
keepAlive: true,
maxSockets: 15, // See: `npm config get maxsockets`
});
const headers = {
accept:
'application/vnd.npm.install-v1+json; q=1.0, application/json; q=0.8, */*',
};
const url = `https://registry.npmjs.org/-/package/${name}/dist-tags`;
output.debug(`Fetching ${url}`);
const tags = await new Promise((resolve, reject) => {
const req = https.get(
url,
{
agent,
headers,
},
res => {
let buf = '';
res.on('data', chunk => {
buf += chunk;
});
res.on('end', () => {
try {
resolve(JSON.parse(buf));
} catch (err) {
reject(err);
}
});
}
);
req.on('error', reject);
req.end();
});
const version = tags[distTag];
if (version) {
output.debug(`Found dist tag "${distTag}" with version "${version}"`);
} else {
output.error(`Dist tag "${distTag}" not found`);
output.debug('Available dist tags:', Object.keys(tags));
}
output.debug(`Writing cache file: ${cacheFile}`);
await writeFile(
cacheFile,
JSON.stringify({
expireAt: Date.now() + updateCheckInterval,
notified: false,
version,
})
);
} catch (err) {
output.error(`Failed to get package info:`, err);
} finally {
clearTimeout(timer);
if (await fileExists(lockFile)) {
output.debug(`Releasing lock file: ${lockFile}`);
await unlink(lockFile);
}
output.debug(`Worker finished successfully!`);
// force the worker to exit
process.exit(0);
}
});
// signal the parent process we're ready
if (process.connected) {
output.debug("Notifying parent we're ready");
process.send({ type: 'ready' });
} else {
console.error('No IPC bridge detected, exiting');
process.exit(1);
}
async function fileExists(file) {
return access(file)
.then(() => true)
.catch(() => false);
}
async function isRunning(lockFile) {
try {
const pid = parseInt(await readFile(lockFile, 'utf-8'));
output.debug(`Found lock file with pid: ${pid}`);
// checks for existence of a process; throws if not found
process.kill(pid, 0);
// process is still running
return true;
} catch (err) {
if (await fileExists(lockFile)) {
// lock file does not exist or process is not running and pid is stale
output.debug(`Resetting lock file: ${err.toString()}`);
await unlink(lockFile);
}
return false;
}
}

View File

@@ -0,0 +1,151 @@
import semver from 'semver';
import XDGAppPaths from 'xdg-app-paths';
import { dirname, parse as parsePath, resolve as resolvePath } from 'path';
import type { Output } from '../output';
import { existsSync, outputJSONSync, readJSONSync } from 'fs-extra';
import type { PackageJson } from '@vercel/build-utils';
import { spawn } from 'child_process';
interface GetLatestVersionOptions {
cacheDir?: string;
distTag?: string;
output?: Output;
pkg: PackageJson;
updateCheckInterval?: number;
}
interface PackageInfoCache {
version: string;
expireAt: number;
notified: boolean;
}
interface GetLatestWorkerPayload {
cacheFile?: string;
distTag?: string;
updateCheckInterval?: number;
name?: string;
}
/**
* Determines if it needs to check for a newer CLI version and returns the last
* detected version. The version could be stale, but still newer than the
* current version.
*
* @returns {String|undefined} If a newer version is found, then the lastest
* version, otherwise `undefined`.
*/
export default function getLatestVersion({
cacheDir = XDGAppPaths('com.vercel.cli').cache(),
distTag = 'latest',
output,
pkg,
updateCheckInterval = 1000 * 60 * 60 * 24 * 7, // 1 week
}: GetLatestVersionOptions): string | undefined {
if (
!pkg ||
typeof pkg !== 'object' ||
!pkg.name ||
typeof pkg.name !== 'string'
) {
throw new TypeError('Expected package to be an object with a package name');
}
const cacheFile = resolvePath(
cacheDir,
'package-updates',
`${pkg.name}-${distTag}.json`
);
let cache: PackageInfoCache | undefined;
try {
cache = readJSONSync(cacheFile);
} catch (err: any) {
// cache does not exist or malformed
if (err.code !== 'ENOENT') {
output?.debug(`Error reading latest package cache file: ${err}`);
}
}
if (!cache || cache.expireAt < Date.now()) {
spawnWorker(
{
cacheFile,
distTag,
updateCheckInterval,
name: pkg.name,
},
output
);
}
if (
cache &&
!cache.notified &&
pkg.version &&
semver.lt(pkg.version, cache.version)
) {
cache.notified = true;
outputJSONSync(cacheFile, cache);
return cache.version;
}
}
/**
* Spawn the worker, wait for the worker to report it's ready, then signal the
* worker to fetch the latest version.
*/
function spawnWorker(
payload: GetLatestWorkerPayload,
output: Output | undefined
) {
// we need to find the update worker script since the location is
// different based on production vs tests
let dir = dirname(__filename);
let script = resolvePath(dir, 'dist', 'get-latest-worker.js');
const { root } = parsePath(dir);
while (!existsSync(script)) {
dir = dirname(dir);
if (dir === root) {
// didn't find it, bail
output?.debug('Failed to find the get latest worker script!');
return;
}
script = resolvePath(dir, 'dist', 'get-latest-worker.js');
}
// spawn the worker with an IPC channel
output?.debug(`Spawning ${script}`);
const args = [script];
if (output?.debugEnabled) {
args.push('--debug');
}
const worker = spawn(process.execPath, args, {
stdio: ['inherit', 'inherit', 'inherit', 'ipc'],
windowsHide: true,
});
// we allow the child 2 seconds to let us know it's ready before we give up
const workerReadyTimer = setTimeout(() => worker.kill(), 2000);
// listen for an early on close error, but then we remove it when unref
const onClose = (code: number) => {
output?.debug(`Get latest worker exited (code ${code})`);
};
worker.on('close', onClose);
// generally, the parent won't be around long enough to handle a non-zero
// worker process exit code
worker.on('error', err => {
output?.log(`Failed to spawn get latest worker: ${err.stack}`);
});
// wait for the worker to start and notify us it is ready
worker.once('message', () => {
clearTimeout(workerReadyTimer);
worker.removeListener('close', onClose);
worker.send(payload);
worker.unref();
});
}

View File

@@ -1,38 +0,0 @@
import type Client from '../client';
import type { Deployment } from '../../types';
import getDeploymentByIdOrHost from '../deploy/get-deployment-by-id-or-host';
import handleCertError from '../certs/handle-cert-error';
/**
* Attempts to find the deployment by name or id.
* @param {Client} client - The Vercel client instance
* @param {string} contextName - The scope name
* @param {string} deployId - The deployment name or id to rollback
* @returns {Promise<Deployment>} Resolves an exit code or deployment info
*/
export default async function getDeploymentInfo(
client: Client,
contextName: string,
deployId: string
): Promise<Deployment> {
const deployment = handleCertError(
client.output,
await getDeploymentByIdOrHost(client, contextName, deployId)
);
if (deployment === 1) {
throw new Error(
`Failed to get deployment "${deployId}" in scope "${contextName}"`
);
}
if (deployment instanceof Error) {
throw deployment;
}
if (!deployment) {
throw new Error(`Couldn't find the deployment "${deployId}"`);
}
return deployment;
}

View File

@@ -1,11 +1,12 @@
import chalk from 'chalk';
import type Client from '../client';
import type { Deployment, Project, Team } from '../../types';
import { getCommandName } from '../pkg-name';
import getDeploymentInfo from './get-deployment-info';
import getDeployment from '../get-deployment';
import getScope from '../get-scope';
import getTeamById from '../teams/get-team-by-id';
import { isValidName } from '../is-valid-name';
import ms from 'ms';
import type { Project } from '../../types';
import rollbackStatus from './status';
/**
@@ -27,7 +28,7 @@ export default async function requestRollback({
project: Project;
timeout?: string;
}): Promise<number> {
const { output } = client;
const { config, output } = client;
const { contextName } = await getScope(client);
if (!isValidName(deployId)) {
@@ -37,27 +38,65 @@ export default async function requestRollback({
return 1;
}
output.spinner(
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}`
);
let deployment: Deployment;
let team: Team | undefined;
let deployment;
try {
deployment = await getDeploymentInfo(client, contextName, deployId);
} catch (err: any) {
output.error(err?.toString() || err);
return 1;
} finally {
output.stopSpinner();
output.spinner(
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}`
);
const [teamResult, deploymentResult] = await Promise.allSettled([
config.currentTeam ? getTeamById(client, config.currentTeam) : undefined,
getDeployment(client, contextName, deployId),
]);
if (teamResult.status === 'rejected') {
output.error(`Failed to retrieve team information: ${teamResult.reason}`);
return 1;
}
if (deploymentResult.status === 'rejected') {
output.error(deploymentResult.reason);
return 1;
}
team = teamResult.value;
deployment = deploymentResult.value;
// re-render the spinner text because it goes so fast
output.log(
`Fetching deployment "${deployId}" in ${chalk.bold(contextName)}`
);
} finally {
output.stopSpinner();
}
if (deployment.team?.id) {
if (!team || deployment.team.id !== team.id) {
output.error(
team
? `Deployment doesn't belong to current team ${chalk.bold(
contextName
)}`
: `Deployment belongs to a different team`
);
output.error(
`Use ${chalk.bold('vc switch')} to change your current team`
);
return 1;
}
} else if (team) {
output.error(
`Deployment doesn't belong to current team ${chalk.bold(contextName)}`
);
output.error(`Use ${chalk.bold('vc switch')} to change your current team`);
return 1;
}
// create the rollback
await client.fetch<any>(
`/v9/projects/${project.id}/rollback/${deployment.uid}`,
`/v9/projects/${project.id}/rollback/${deployment.id}`,
{
body: {}, // required
method: 'POST',
@@ -68,7 +107,7 @@ export default async function requestRollback({
output.log(
`Successfully requested rollback of ${chalk.bold(project.name)} to ${
deployment.url
} (${deployment.uid})`
} (${deployment.id})`
);
output.log(`To check rollback status, run ${getCommandName('rollback')}.`);
return 0;

View File

@@ -8,7 +8,7 @@ import type {
} from '../../types';
import elapsed from '../output/elapsed';
import formatDate from '../format-date';
import getDeploymentInfo from './get-deployment-info';
import getDeployment from '../get-deployment';
import getScope from '../get-scope';
import ms from 'ms';
import renderAliasStatus from './render-alias-status';
@@ -168,8 +168,7 @@ async function renderJobFailed({
try {
const name = (
deployment ||
(await getDeploymentInfo(client, contextName, toDeploymentId))
deployment || (await getDeployment(client, contextName, toDeploymentId))
)?.url;
output.error(
`Failed to remap all aliases to the requested deployment ${name} (${toDeploymentId})`
@@ -228,13 +227,10 @@ async function renderJobSucceeded({
}) {
const { output } = client;
// attempt to get the new deployment url
let deploymentInfo = '';
try {
const deployment = await getDeploymentInfo(
client,
contextName,
toDeploymentId
);
const deployment = await getDeployment(client, contextName, toDeploymentId);
deploymentInfo = `${chalk.bold(deployment.url)} (${toDeploymentId})`;
} catch (err: any) {
output.debug(

View File

@@ -1,20 +1,8 @@
// Native
import { parse } from 'url';
/**
* Converts a valid deployment lookup parameter to a hostname.
* `http://google.com` => google.com
* google.com => google.com
*/
function toHost(url: string): string {
if (/^https?:\/\//.test(url)) {
return parse(url).host!;
}
// Remove any path if present
// `a.b.c/` => `a.b.c`
return url.replace(/(\/\/)?([^/]+)(.*)/, '$2');
export default function toHost(url: string): string {
return url.replace(/^(?:.*?\/\/)?([^/]+).*/, '$1');
}
export default toHost;

View File

@@ -1,23 +1,30 @@
import { URL } from 'url';
import chance from 'chance';
import { Deployment } from '@vercel/client';
import { client } from './client';
import { Build, User } from '../../src/types';
import { Build, Deployment, User } from '../../src/types';
import type { Request, Response } from 'express';
let deployments = new Map<string, Deployment>();
let deploymentBuilds = new Map<Deployment, Build[]>();
let alreadySetupDeplomentEndpoints = false;
type State = Deployment['readyState'];
/**
* Initializes a mock deployment and wires up the deployment endpoint
* scenarios.
*/
export function useDeployment({
creator,
state = 'READY',
createdAt,
}: {
creator: Pick<User, 'id' | 'email' | 'name' | 'username'>;
state?: State;
state?:
| 'BUILDING'
| 'ERROR'
| 'INITIALIZING'
| 'QUEUED'
| 'READY'
| 'CANCELED';
createdAt?: number;
}) {
setupDeploymentEndpoints();
@@ -28,35 +35,34 @@ export function useDeployment({
const id = `dpl_${chance().guid()}`;
const deployment: Deployment = {
id,
url: url.hostname,
name,
meta: {},
regions: [],
routes: [],
plan: 'hobby',
public: false,
version: 2,
createdAt,
createdIn: 'sfo1',
buildingAt: Date.now(),
ownerId: creator.id,
creator: {
uid: creator.id,
email: creator.email,
name: creator.name,
username: creator.username,
},
readyState: state,
state: state,
ready: createdAt + 30000,
env: {},
build: { env: {} },
target: 'production',
alias: [],
aliasAssigned: true,
aliasError: null,
build: { env: [] },
buildingAt: Date.now(),
createdAt,
createdIn: 'sfo1',
creator: {
uid: creator.id,
username: creator.username,
},
env: [],
id,
inspectorUrl: `https://vercel.com/${creator.name}/${id}`,
meta: {},
name,
ownerId: creator.id,
plan: 'hobby',
public: false,
ready: createdAt + 30000,
readyState: state,
regions: [],
routes: [],
status: state,
target: 'production',
type: 'LAMBDAS',
url: url.hostname,
version: 2,
};
deployments.set(deployment.id, deployment);
@@ -108,17 +114,18 @@ beforeEach(() => {
alreadySetupDeplomentEndpoints = false;
});
function setupDeploymentEndpoints() {
function setupDeploymentEndpoints(): void {
if (alreadySetupDeplomentEndpoints) {
return;
}
alreadySetupDeplomentEndpoints = true;
client.scenario.get('/:version/deployments/:id', (req, res) => {
client.scenario.get('/v13/deployments/:id', (req, res) => {
const { id } = req.params;
const { url } = req.query;
let deployment;
if (id === 'get') {
if (typeof url !== 'string') {
res.statusCode = 400;
@@ -127,65 +134,26 @@ function setupDeploymentEndpoints() {
deployment = Array.from(deployments.values()).find(d => {
return d.url === url;
});
} else if (id.includes('.')) {
deployment = Array.from(deployments.values()).find(d => {
return d.url === id;
});
} else {
// lookup by ID
deployment = deployments.get(id);
}
if (!deployment) {
res.statusCode = 404;
return res.json({
error: { code: 'not_found', message: 'Deployment not found', id },
});
}
res.json(deployment);
});
client.scenario.get('/v5/now/deployments/:id', (req, res) => {
const { id } = req.params;
const { url } = req.query;
let deployment;
if (id === 'get') {
if (typeof url !== 'string') {
res.statusCode = 400;
return res.json({ error: { code: 'bad_request' } });
}
deployment = Array.from(deployments.values()).find(d => {
return d.url === url;
});
} else {
// lookup by ID
deployment = deployments.get(id);
}
if (!deployment) {
res.statusCode = 404;
return res.json({
error: { code: 'not_found', message: 'Deployment not found', id },
});
}
res.json({
uid: deployment.id,
url: deployment.url,
name: '',
type: 'LAMBDAS',
state: 'READY',
version: deployment.version,
created: deployment.createdAt,
ready: deployment.ready,
buildingAt: deployment.buildingAt,
creator: {
uid: deployment.creator?.uid,
username: deployment.creator?.username,
},
target: deployment.target,
ownerId: undefined, // ?
projectId: undefined, // ?
inspectorUrl: deployment.inspectorUrl,
meta: {},
alias: deployment.alias,
});
});
client.scenario.get('/:version/deployments/:id/builds', (req, res) => {
client.scenario.get('/v11/deployments/:id/builds', (req, res) => {
const { id } = req.params;
const deployment = deployments.get(id);
if (!deployment) {
@@ -200,7 +168,7 @@ function setupDeploymentEndpoints() {
const currentDeployments = Array.from(deployments.values()).sort(
(a: Deployment, b: Deployment) => {
// sort in reverse chronological order
return b.createdAt - a.createdAt;
return (b?.createdAt || 0) - (a?.createdAt || 0);
}
);

View File

@@ -7,7 +7,7 @@ import {
} from '../../src/types';
import { formatProvider } from '../../src/util/git/connect-git-provider';
import { parseEnvironment } from '../../src/commands/pull';
import { Env } from '@vercel/build-utils/dist';
import type { Env } from '@vercel/build-utils';
const envs: ProjectEnvVariable[] = [
{
@@ -124,6 +124,7 @@ export const defaultProject = {
{
alias: ['foobar.com'],
aliasAssigned: 1571239348998,
buildingAt: 1571239348998,
createdAt: 1571239348998,
createdIn: 'sfo1',
deploymentHostname: 'a-project-name-rjtr4pz3f',

View File

@@ -9,10 +9,10 @@ describe('inspect', () => {
const deployment = useDeployment({ creator: user });
client.setArgv('inspect', deployment.url);
const exitCode = await inspect(client);
expect(exitCode).toEqual(0);
await expect(client.stderr).toOutput(
`> Fetched deployment ${deployment.url} in ${user.username}`
`> Fetched deployment "${deployment.url}" in ${user.username}`
);
expect(exitCode).toEqual(0);
});
it('should strip the scheme of a url', async () => {
@@ -22,7 +22,7 @@ describe('inspect', () => {
const exitCode = await inspect(client);
expect(exitCode).toEqual(0);
await expect(client.stderr).toOutput(
`> Fetched deployment ${deployment.url} in ${user.username}`
`> Fetched deployment "${deployment.url}" in ${user.username}`
);
});
@@ -30,10 +30,8 @@ describe('inspect', () => {
const user = useUser();
useDeployment({ creator: user });
client.setArgv('inspect', 'bad.com');
const exitCode = await inspect(client);
expect(exitCode).toEqual(1);
await expect(client.stderr).toOutput(
`Error: Failed to find deployment "bad.com" in ${user.username}\n`
await expect(inspect(client)).rejects.toThrow(
`Can't find the deployment "bad.com" under the context "${user.username}"`
);
});
});

View File

@@ -265,6 +265,27 @@ describe('rollback', () => {
await expect(exitCodePromise).resolves.toEqual(0);
});
it('should error if deployment belongs to different team', async () => {
const { cwd, previousDeployment } = initRollbackTest();
previousDeployment.team = {
id: 'abc',
name: 'abc',
slug: 'abc',
};
client.setArgv('rollback', previousDeployment.id, '--yes', '--cwd', cwd);
const exitCodePromise = rollback(client);
await expect(client.stderr).toOutput('Retrieving project…');
await expect(client.stderr).toOutput(
`Fetching deployment "${previousDeployment.id}" in ${previousDeployment.creator?.username}`
);
await expect(client.stderr).toOutput(
'Error: Deployment belongs to a different team'
);
await expect(exitCodePromise).resolves.toEqual(1);
});
});
type RollbackAlias = {
@@ -330,7 +351,7 @@ function initRollbackTest({
let counter = 0;
client.scenario.get(`/v9/projects/${project.id}`, (req, res) => {
client.scenario.get(`/:version/projects/${project.id}`, (req, res) => {
const data = { ...project };
if (req.query?.rollbackInfo === 'true') {
if (lastRollbackTarget && counter++ > rollbackPollCount) {
@@ -341,18 +362,6 @@ function initRollbackTest({
res.json(data);
});
client.scenario.get(`/:version/now/deployments/get`, (req, res) => {
const { url } = req.query;
if (url === previousDeployment.url) {
res.json({ id: previousDeployment.id });
} else {
res.statusCode = 404;
res.json({
error: { code: 'not_found', message: 'Deployment not found' },
});
}
});
client.scenario.get(
'/:version/projects/:project/rollback/aliases',
(req, res) => {

View File

@@ -0,0 +1,140 @@
import fs from 'fs-extra';
import sleep from '../../../src/util/sleep';
import tmp from 'tmp-promise';
import getLatestVersion from '../../../src/util/get-latest-version';
import { join } from 'path';
tmp.setGracefulCleanup();
const cacheDir = tmp.tmpNameSync({
prefix: 'test-vercel-cli-get-latest-version-',
});
const cacheFile = join(cacheDir, 'package-updates', 'vercel-latest.json');
const pkg = {
name: 'vercel',
version: '27.3.0',
};
const versionRE = /^\d+\.\d+\.\d+$/;
describe('get latest version', () => {
afterEach(() => fs.remove(cacheDir));
it('should find newer version async', async () => {
// 1. first call, no cache file
let latest = getLatestVersion({
cacheDir,
pkg,
});
expect(latest).toEqual(undefined);
await waitForCacheFile();
let cache = await fs.readJSON(cacheFile);
expect(typeof cache).toEqual('object');
expect(typeof cache.expireAt).toEqual('number');
expect(cache.expireAt).toBeGreaterThan(Date.now());
expect(typeof cache.version).toEqual('string');
expect(cache.version).toEqual(expect.stringMatching(versionRE));
expect(cache.notified).toEqual(false);
// 2. call again and this time it'll return the version from the cache
latest = getLatestVersion({
cacheDir,
pkg,
});
expect(typeof latest).toBe('string');
expect(latest).toEqual(expect.stringMatching(versionRE));
cache = await fs.readJSON(cacheFile);
expect(cache.version).toEqual(expect.stringMatching(versionRE));
expect(cache.notified).toEqual(true);
// 3. notification already done, should skip
latest = getLatestVersion({
cacheDir,
pkg,
});
expect(latest).toEqual(undefined);
});
it('should not find a newer version', async () => {
// 1. first call, no cache file
let latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg: {
...pkg,
version: '999.0.0',
},
});
expect(latest).toEqual(undefined);
await waitForCacheFile();
// 2. call again and should recheck and still not find a new version
latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg: {
...pkg,
version: '999.0.0',
},
});
expect(latest).toEqual(undefined);
});
it('should not check twice', async () => {
// 1. first call, no cache file
let latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg,
});
expect(latest).toEqual(undefined);
// 2. immediately call again, but should hopefully still be undefined
latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg,
});
expect(latest).toEqual(undefined);
await waitForCacheFile();
// 3. call again and should recheck and find a new version
latest = getLatestVersion({
cacheDir,
updateCheckInterval: 1,
pkg,
});
expect(typeof latest).toBe('string');
expect(latest).toEqual(expect.stringMatching(versionRE));
});
it('should error if no arguments are passed in', () => {
expect(() => getLatestVersion(undefined as any)).toThrow(TypeError);
});
it('should error package is invalid', () => {
expect(() => getLatestVersion({} as any)).toThrow(TypeError);
expect(() => getLatestVersion({ pkg: null as any })).toThrow(TypeError);
expect(() => getLatestVersion({ pkg: {} })).toThrow(TypeError);
expect(() => getLatestVersion({ pkg: { name: null as any } })).toThrow(
TypeError
);
expect(() => getLatestVersion({ pkg: { name: '' } })).toThrow(TypeError);
});
});
async function waitForCacheFile() {
for (let i = 0; i < 40; i++) {
await sleep(100);
if (await fs.pathExists(cacheFile)) {
return;
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/client",
"version": "12.2.25",
"version": "12.2.26",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"homepage": "https://vercel.com",
@@ -43,7 +43,7 @@
]
},
"dependencies": {
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/routing-utils": "2.1.3",
"@zeit/fetch": "5.2.0",
"async-retry": "1.2.3",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/fs-detectors",
"version": "3.6.1",
"version": "3.6.2",
"description": "Vercel filesystem detectors",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -35,7 +35,7 @@
"@types/minimatch": "3.0.5",
"@types/node": "14.18.33",
"@types/semver": "7.3.10",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"typescript": "4.3.4"
}
}

View File

@@ -1,4 +1,4 @@
# gatsby-plugin-vercel
# @vercel/gatsby-plugin-vercel-analytics
---
@@ -15,13 +15,13 @@ This plugin sends [Core Web Vitals](https://web.dev/vitals/) to Vercel Analytics
## Install
```bash
npm i gatsby-plugin-vercel
npm i @vercel/gatsby-plugin-vercel-analytics
```
or
```bash
yarn add gatsby-plugin-vercel
yarn add @vercel/gatsby-plugin-vercel-analytics
```
## Usage
@@ -31,7 +31,7 @@ yarn add gatsby-plugin-vercel
module.exports = {
plugins: [
{
resolve: "gatsby-plugin-vercel",
resolve: "@vercel/gatsby-plugin-vercel-analytics",
options: {
// (optional) Prints metrics in the console when true
debug: false,

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/gatsby-plugin-vercel-analytics",
"version": "1.0.0",
"version": "1.0.1",
"description": "Track Core Web Vitals in Gatsby projects with Vercel Analytics.",
"main": "index.js",
"files": [

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/go",
"version": "2.2.23",
"version": "2.2.24",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
@@ -35,7 +35,7 @@
"@types/jest": "28.1.6",
"@types/node-fetch": "^2.3.0",
"@types/tar": "^4.0.0",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/ncc": "0.24.0",
"async-retry": "1.3.1",
"execa": "^1.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/hydrogen",
"version": "0.0.37",
"version": "0.0.38",
"license": "MIT",
"main": "./dist/index.js",
"homepage": "https://vercel.com/docs",
@@ -21,7 +21,7 @@
"devDependencies": {
"@types/jest": "27.5.1",
"@types/node": "14.18.33",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/static-config": "2.0.6",
"typescript": "4.6.4"
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/next",
"version": "3.3.8",
"version": "3.3.9",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
@@ -45,7 +45,7 @@
"@types/semver": "6.0.0",
"@types/text-table": "0.2.1",
"@types/webpack-sources": "3.2.0",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/nft": "0.22.5",
"@vercel/routing-utils": "2.1.3",
"async-sema": "3.0.1",

View File

@@ -2662,7 +2662,7 @@ async function getServerlessPages(params: {
for (const edgeFunctionFile of Object.keys(
middlewareManifest?.functions ?? {}
)) {
const edgePath = edgeFunctionFile.slice(1) + '.js';
const edgePath = (edgeFunctionFile.slice(1) || 'index') + '.js';
delete normalizedAppPaths[edgePath];
delete pages[edgePath];
}

View File

@@ -1143,15 +1143,15 @@ export async function serverBuild({
if (appDir) {
for (const route of dynamicRoutes) {
completeDynamicRoutes.push(route);
completeDynamicRoutes.push({
...route,
src: route.src.replace(
new RegExp(escapeStringRegexp('(?:/)?$')),
'(?:\\.rsc)?(?:/)?$'
'(?:\\.rsc)(?:/)?$'
),
dest: route.dest?.replace(/($|\?)/, '.rsc$1'),
});
completeDynamicRoutes.push(route);
}
} else {
completeDynamicRoutes.push(...dynamicRoutes);

View File

@@ -6,6 +6,15 @@
}
],
"probes": [
{
"path": "/dynamic/category-1/id-1",
"status": 200,
"headers": {
"RSC": "1"
},
"mustContain": ":{",
"mustNotContain": "<html"
},
{
"path": "/ssg",
"status": 200,

View File

@@ -8,6 +8,18 @@ jest.setTimeout(ms('6m'));
describe(`${__dirname.split(path.sep).pop()}`, () => {
it('should normalize routes in build results output', async () => {
// TODO: remove after bug with edge functions on Windows
// is resolved upstream in Next.js
if (process.platform === 'win32') {
const indexPage = path.join(__dirname, 'pages/index.tsx');
await fs.writeFile(
indexPage,
(
await fs.readFile(indexPage, 'utf8')
).replace('runtime: ', '// runtime: ')
);
}
const files = [
'index.test.js',
'next.config.js',
@@ -35,5 +47,9 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
expect(output).toHaveProperty('test/api/hello');
expect(output['test/api/hello'].type).toEqual('EdgeFunction');
for (const name in output) {
expect(output[name].type).not.toBe('Lambda');
}
});
});

View File

@@ -9,3 +9,8 @@ const Home: NextPage = () => {
}
export default Home
export const config = {
runtime: 'experimental-edge',
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/node",
"version": "2.8.5",
"version": "2.8.6",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
@@ -31,7 +31,7 @@
"dependencies": {
"@edge-runtime/vm": "2.0.0",
"@types/node": "14.18.33",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/node-bridge": "3.1.3",
"@vercel/static-config": "2.0.6",
"edge-runtime": "2.0.0",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/python",
"version": "3.1.33",
"version": "3.1.34",
"main": "./dist/index.js",
"license": "MIT",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/python",
@@ -22,7 +22,7 @@
"devDependencies": {
"@types/execa": "^0.9.0",
"@types/jest": "27.4.1",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/ncc": "0.24.0",
"execa": "^1.0.0",
"typescript": "4.3.4"

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/redwood",
"version": "1.0.44",
"version": "1.0.45",
"main": "./dist/index.js",
"license": "MIT",
"homepage": "https://vercel.com/docs",
@@ -27,6 +27,6 @@
"@types/aws-lambda": "8.10.19",
"@types/node": "14.18.33",
"@types/semver": "6.0.0",
"@vercel/build-utils": "5.7.4"
"@vercel/build-utils": "5.7.5"
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/remix",
"version": "1.1.6",
"version": "1.1.7",
"license": "MIT",
"main": "./dist/index.js",
"homepage": "https://vercel.com/docs",
@@ -25,7 +25,7 @@
"devDependencies": {
"@types/jest": "27.5.1",
"@types/node": "14.18.33",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"typescript": "4.6.4"
}
}

View File

@@ -1,7 +1,7 @@
{
"name": "@vercel/ruby",
"author": "Nathan Cahill <nathan@nathancahill.com>",
"version": "1.3.49",
"version": "1.3.50",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/ruby",
@@ -22,7 +22,7 @@
"devDependencies": {
"@types/fs-extra": "8.0.0",
"@types/semver": "6.0.0",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/ncc": "0.24.0",
"execa": "2.0.4",
"fs-extra": "^7.0.1",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/static-build",
"version": "1.1.0",
"version": "1.1.1",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/build-step",
@@ -36,7 +36,7 @@
"@types/ms": "0.7.31",
"@types/node-fetch": "2.5.4",
"@types/promise-timeout": "1.3.0",
"@vercel/build-utils": "5.7.4",
"@vercel/build-utils": "5.7.5",
"@vercel/frameworks": "1.1.18",
"@vercel/ncc": "0.24.0",
"@vercel/routing-utils": "2.1.3",

View File

@@ -8,15 +8,15 @@ import {
writePackageJson,
} from './_shared';
const GATSBY_PLUGIN_PACKAGE_NAME = '@vercel/gatsby-plugin-vercel-analytics';
const DEFAULT_CONFIG = {
plugins: [
{
resolve: 'gatsby-plugin-vercel',
resolve: GATSBY_PLUGIN_PACKAGE_NAME,
options: {},
},
],
};
const GATSBY_PLUGIN_PACKAGE_NAME = 'gatsby-plugin-vercel';
const GATSBY_CONFIG_FILE = 'gatsby-config';
export async function injectVercelAnalyticsPlugin(dir: string): Promise<void> {
@@ -96,13 +96,13 @@ if (!vercelConfig.plugins) {
const hasPlugin = vercelConfig.plugins.find(
(p: PluginRef) =>
p && (p === "gatsby-plugin-vercel" || p.resolve === "gatsby-plugin-vercel")
p && (p === "${GATSBY_PLUGIN_PACKAGE_NAME}" || p.resolve === "${GATSBY_PLUGIN_PACKAGE_NAME}")
);
if (!hasPlugin) {
vercelConfig.plugins = vercelConfig.plugins.slice();
vercelConfig.plugins.push({
resolve: "gatsby-plugin-vercel",
resolve: "${GATSBY_PLUGIN_PACKAGE_NAME}",
options: {},
});
}
@@ -134,13 +134,13 @@ if (!vercelConfig.plugins) {
const hasPlugin = vercelConfig.plugins.find(
(p) =>
p && (p === "gatsby-plugin-vercel" || p.resolve === "gatsby-plugin-vercel")
p && (p === "${GATSBY_PLUGIN_PACKAGE_NAME}" || p.resolve === "${GATSBY_PLUGIN_PACKAGE_NAME}")
);
if (!hasPlugin) {
vercelConfig.plugins = vercelConfig.plugins.slice();
vercelConfig.plugins.push({
resolve: "gatsby-plugin-vercel",
resolve: "${GATSBY_PLUGIN_PACKAGE_NAME}",
options: {},
});
}
@@ -172,12 +172,12 @@ if (!vercelConfig.plugins) {
const hasPlugin = vercelConfig.plugins.find(
(p) =>
p && (p === "gatsby-plugin-vercel" || p.resolve === "gatsby-plugin-vercel")
p && (p === "${GATSBY_PLUGIN_PACKAGE_NAME}" || p.resolve === "${GATSBY_PLUGIN_PACKAGE_NAME}")
);
if (!hasPlugin) {
vercelConfig.plugins = vercelConfig.plugins.slice();
vercelConfig.plugins.push({
resolve: "gatsby-plugin-vercel",
resolve: "${GATSBY_PLUGIN_PACKAGE_NAME}",
options: {},
});
}

View File

@@ -30,7 +30,7 @@ it(
"plugins": Array [
Object {
"options": Object {},
"resolve": "gatsby-plugin-vercel",
"resolve": "@vercel/gatsby-plugin-vercel-analytics",
},
],
}
@@ -61,7 +61,7 @@ it(
"plugins": Array [
Object {
"options": Object {},
"resolve": "gatsby-plugin-vercel",
"resolve": "@vercel/gatsby-plugin-vercel-analytics",
},
],
"siteMetadata": Object {
@@ -98,7 +98,7 @@ it(
"gatsby-plugin-react-helmet",
Object {
"options": Object {},
"resolve": "gatsby-plugin-vercel",
"resolve": "@vercel/gatsby-plugin-vercel-analytics",
},
],
"siteMetadata": Object {
@@ -132,7 +132,7 @@ it(
.toMatchInlineSnapshot(`
Object {
"plugins": Array [
"gatsby-plugin-vercel",
"@vercel/gatsby-plugin-vercel-analytics",
],
"siteMetadata": Object {
"author": "@gatsbyjs",
@@ -167,7 +167,7 @@ it(
"plugins": Array [
Object {
"options": Object {},
"resolve": "gatsby-plugin-vercel",
"resolve": "@vercel/gatsby-plugin-vercel-analytics",
},
],
"siteMetadata": Object {
@@ -204,7 +204,7 @@ it(
"gatsby-plugin-react-helmet",
Object {
"options": Object {},
"resolve": "gatsby-plugin-vercel",
"resolve": "@vercel/gatsby-plugin-vercel-analytics",
},
],
"siteMetadata": Object {
@@ -243,7 +243,7 @@ it(
},
Object {
"options": Object {},
"resolve": "gatsby-plugin-vercel",
"resolve": "@vercel/gatsby-plugin-vercel-analytics",
},
],
"siteMetadata": Object {
@@ -276,7 +276,7 @@ it(
"plugins": Array [
Object {
"options": Object {},
"resolve": "gatsby-plugin-vercel",
"resolve": "@vercel/gatsby-plugin-vercel-analytics",
},
],
"siteMetadata": Object {
@@ -321,13 +321,13 @@ if (!vercelConfig.plugins) {
const hasPlugin = vercelConfig.plugins.find(
(p) =>
p && (p === "gatsby-plugin-vercel" || p.resolve === "gatsby-plugin-vercel")
p && (p === "@vercel/gatsby-plugin-vercel-analytics" || p.resolve === "@vercel/gatsby-plugin-vercel-analytics")
);
if (!hasPlugin) {
vercelConfig.plugins = vercelConfig.plugins.slice();
vercelConfig.plugins.push({
resolve: "gatsby-plugin-vercel",
resolve: "@vercel/gatsby-plugin-vercel-analytics",
options: {},
});
}

199
yarn.lock
View File

@@ -4248,20 +4248,6 @@ boxen@^3.0.0:
type-fest "^0.3.0"
widest-line "^2.0.0"
boxen@^5.0.0:
version "5.1.2"
resolved "https://registry.yarnpkg.com/boxen/-/boxen-5.1.2.tgz#788cb686fc83c1f486dfa8a40c68fc2b831d2b50"
integrity sha512-9gYgQKXx+1nP8mP7CzFyaUARhg7D3n1dF/FnErWmu9l6JvGpNUN278h0aSb+QjoiKSWG+iZ3uHrcqk0qrY9RQQ==
dependencies:
ansi-align "^3.0.0"
camelcase "^6.2.0"
chalk "^4.1.0"
cli-boxes "^2.2.1"
string-width "^4.2.2"
type-fest "^0.20.2"
widest-line "^3.1.0"
wrap-ansi "^7.0.0"
brace-expansion@^1.1.7:
version "1.1.11"
resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd"
@@ -4612,14 +4598,6 @@ chalk@^4.0.0:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
chalk@^4.1.0:
version "4.1.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
dependencies:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
chance@1.1.7:
version "1.1.7"
resolved "https://registry.yarnpkg.com/chance/-/chance-1.1.7.tgz#e99dde5ac16681af787b5ba94c8277c090d6cfe8"
@@ -4764,11 +4742,6 @@ cli-boxes@^2.2.0:
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.0.tgz#538ecae8f9c6ca508e3c3c95b453fe93cb4c168d"
integrity sha512-gpaBrMAizVEANOpfZp/EEUixTXDyGt7DFzdK5hU+UbWt/J0lB0w20ncZj59Z9a93xHb9u12zF5BS6i9RKbtg4w==
cli-boxes@^2.2.1:
version "2.2.1"
resolved "https://registry.yarnpkg.com/cli-boxes/-/cli-boxes-2.2.1.tgz#ddd5035d25094fce220e9cab40a45840a440318f"
integrity sha512-y4coMcylgSCdVinjiDBuR8PCC2bLjyGTwEmPb9NHR/QaNU6EUOXcTY/s6VjGMD6ENSEaeQYHCY0GNGS5jfMwPw==
cli-cursor@^2.0.0, cli-cursor@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-2.1.0.tgz#b35dac376479facc3e94747d41d0d0f5238ffcb5"
@@ -5022,18 +4995,6 @@ configstore@^4.0.0:
write-file-atomic "^2.0.0"
xdg-basedir "^3.0.0"
configstore@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/configstore/-/configstore-5.0.1.tgz#d365021b5df4b98cdd187d6a3b0e3f6a7cc5ed96"
integrity sha512-aMKprgk5YhBNyH25hj8wGt2+D52Sw1DRRIzqBwLp2Ya9mFmY8KPvvtvmna8SxVR9JMZ4kzMD68N22vlaRpkeFA==
dependencies:
dot-prop "^5.2.0"
graceful-fs "^4.1.2"
make-dir "^3.0.0"
unique-string "^2.0.0"
write-file-atomic "^3.0.0"
xdg-basedir "^4.0.0"
console-control-strings@^1.0.0, console-control-strings@~1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
@@ -5283,11 +5244,6 @@ crypto-random-string@^1.0.0:
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e"
integrity sha1-ojD2T1aDEOFJgAmUB5DsmVRbyn4=
crypto-random-string@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-2.0.0.tgz#ef2a7a966ec11083388369baa02ebead229b30d5"
integrity sha512-v1plID3y9r/lPhviJ1wrXpLeyUIGAZ2SHNYTEapm7/8A9nLPoyvVp3RK/EPFqn5kEznyWgYZNsRtYYIWbuG8KA==
css-select@^4.3.0:
version "4.3.0"
resolved "https://registry.yarnpkg.com/css-select/-/css-select-4.3.0.tgz#db7129b2846662fd8628cfc496abb2b59e41529b"
@@ -5675,7 +5631,7 @@ dot-prop@^4.1.0, dot-prop@^4.2.0:
dependencies:
is-obj "^1.0.0"
dot-prop@^5.1.0, dot-prop@^5.2.0:
dot-prop@^5.1.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.2.0.tgz#c34ecc29556dc45f1f4c22697b6f4904e0cc4fcb"
integrity sha512-uEUyaDKoSQ1M4Oq8l45hSE26SnTxL6snNnqvK/VWx5wJhmff5z0FUVJDKDanor/6w3kzE3i7XZOk+7wC0EXr1A==
@@ -6042,11 +5998,6 @@ escalade@^3.1.1:
resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40"
integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==
escape-goat@^2.0.0:
version "2.1.1"
resolved "https://registry.yarnpkg.com/escape-goat/-/escape-goat-2.1.1.tgz#1b2dc77003676c457ec760b2dc68edb648188675"
integrity sha512-8/uIhbG12Csjy2JEW7D9pHbreaVaS/OpN3ycnyvElTdwM5n6GY6W6e2IPemfvGZeUMqZ9A/3GqIZMgKnBhAw/Q==
escape-html@1.0.3, escape-html@~1.0.3:
version "1.0.3"
resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988"
@@ -7126,13 +7077,6 @@ global-dirs@^0.1.0:
dependencies:
ini "^1.3.4"
global-dirs@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-3.0.0.tgz#70a76fe84ea315ab37b1f5576cbde7d48ef72686"
integrity sha512-v8ho2DS5RiCjftj1nD9NmnfaOzTdud7RRnVd9kFNOjqZbISlx5DQ+OrTkywgd0dIt7oFCvKetZSHoHcP3sDdiA==
dependencies:
ini "2.0.0"
globals@^11.1.0:
version "11.12.0"
resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e"
@@ -7616,11 +7560,6 @@ inherits@2.0.3:
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
ini@2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/ini/-/ini-2.0.0.tgz#e5fd556ecdd5726be978fa1001862eacb0a94bc5"
integrity sha512-7PnF4oN3CvZF23ADhA5wRaYEQpJ8qygSkbtTXWBeXWXmEVRXK+1ITciHWwHhsjv1TmW0MgacIv6hEi5pX5NQdA==
ini@3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/ini/-/ini-3.0.0.tgz#2f6de95006923aa75feed8894f5686165adc08f1"
@@ -7891,24 +7830,11 @@ is-installed-globally@^0.1.0:
global-dirs "^0.1.0"
is-path-inside "^1.0.0"
is-installed-globally@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/is-installed-globally/-/is-installed-globally-0.4.0.tgz#9a0fd407949c30f86eb6959ef1b7994ed0b7b520"
integrity sha512-iwGqO3J21aaSkC7jWnHP/difazwS7SFeIqxv6wEtLU8Y5KlzFTjyqcSIT0d8s4+dDhKytsk9PJZ2BkS5eZwQRQ==
dependencies:
global-dirs "^3.0.0"
is-path-inside "^3.0.2"
is-npm@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-3.0.0.tgz#ec9147bfb629c43f494cf67936a961edec7e8053"
integrity sha512-wsigDr1Kkschp2opC4G3yA6r9EgVA6NjRpWzIi9axXqeIaAATPRJc4uLujXe3Nd9uO8KoDyA4MD6aZSeXTADhA==
is-npm@^5.0.0:
version "5.0.0"
resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-5.0.0.tgz#43e8d65cc56e1b67f8d47262cf667099193f45a8"
integrity sha512-WW/rQLOazUq+ST/bCAVBp/2oMERWLsR7OrKyt052dNDk4DHcDE0/7QSXITlmi+VBcV13DfIbysG3tZJm5RfdBA==
is-number@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195"
@@ -7974,11 +7900,6 @@ is-path-inside@^3.0.1:
resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.2.tgz#f5220fc82a3e233757291dddc9c5877f2a1f3017"
integrity sha512-/2UGPSgmtqwo1ktx8NDHjuPwZWmHhO+gj0f93EkhLB5RgW9RZevWYYlIkS6zePc6U2WpOdQYIwHe9YC4DWEBVg==
is-path-inside@^3.0.2:
version "3.0.3"
resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283"
integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==
is-plain-obj@^1.0.0, is-plain-obj@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e"
@@ -9190,7 +9111,7 @@ kleur@^4.1.5:
resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780"
integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==
latest-version@^5.0.0, latest-version@^5.1.0:
latest-version@^5.0.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/latest-version/-/latest-version-5.1.0.tgz#119dfe908fe38d15dfa43ecd13fa12ec8832face"
integrity sha512-weT+r0kTkRQdCdYCNtkMwWXQTMEswKrFBkm4ckQOMVhhqhIMI1UT2hMj+1iigIhgSZm5gTmrRXBNoGUgaTY1xA==
@@ -11272,13 +11193,6 @@ punycode@^2.1.0, punycode@^2.1.1:
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
pupa@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/pupa/-/pupa-2.1.1.tgz#f5e8fd4afc2c5d97828faa523549ed8744a20d62"
integrity sha512-l1jNAspIBSFqbT+y+5FosojNpVpF94nlI+wDUpqP9enwOTfHx9f0gh5nB96vl+6yTpsJsypeNrwfzPrKuHB41A==
dependencies:
escape-goat "^2.0.0"
q@^1.5.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
@@ -11865,13 +11779,6 @@ semver-diff@^2.0.0:
dependencies:
semver "^5.0.3"
semver-diff@^3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-3.1.1.tgz#05f77ce59f325e00e2706afd67bb506ddb1ca32b"
integrity sha512-GX0Ix/CJcHyB8c4ykpHGIAvLyOwOobtM/8d+TQkAd81/bEjgPHrfba41Vpesr7jX/t8Uh+R3EX9eAS5be+jQYg==
dependencies:
semver "^6.3.0"
"semver@2 || 3 || 4 || 5", "semver@2.x || 3.x || 4 || 5", semver@^5.0.3, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0, semver@^5.7.0, semver@^5.7.1:
version "5.7.1"
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
@@ -12376,7 +12283,7 @@ string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0:
is-fullwidth-code-point "^3.0.0"
strip-ansi "^6.0.0"
string-width@^4.2.2, string-width@^4.2.3:
string-width@^4.2.3:
version "4.2.3"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010"
integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==
@@ -13114,47 +13021,47 @@ tunnel-agent@^0.6.0:
dependencies:
safe-buffer "^5.0.1"
turbo-darwin-64@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/turbo-darwin-64/-/turbo-darwin-64-1.6.3.tgz#fad7e078784b0fafc0b1f75ce9378828918595f5"
integrity sha512-QmDIX0Yh1wYQl0bUS0gGWwNxpJwrzZU2GIAYt3aOKoirWA2ecnyb3R6ludcS1znfNV2MfunP+l8E3ncxUHwtjA==
turbo-darwin-64@1.7.0-canary.9:
version "1.7.0-canary.9"
resolved "https://registry.yarnpkg.com/turbo-darwin-64/-/turbo-darwin-64-1.7.0-canary.9.tgz#7a5d5a4afd6efaed4def7be03b7e829944e65570"
integrity sha512-JRRSMjISYlZnmI23LCOIWoU1d3J7a/NlB27LkJfdtmzuszbkt3K6xqFn+DegVEsKyBftiSgMA4kzQCpIuay3Zg==
turbo-darwin-arm64@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/turbo-darwin-arm64/-/turbo-darwin-arm64-1.6.3.tgz#f0a32cae39e3fcd3da5e3129a94c18bb2e3ed6aa"
integrity sha512-75DXhFpwE7CinBbtxTxH08EcWrxYSPFow3NaeFwsG8aymkWXF+U2aukYHJA6I12n9/dGqf7yRXzkF0S/9UtdyQ==
turbo-darwin-arm64@1.7.0-canary.9:
version "1.7.0-canary.9"
resolved "https://registry.yarnpkg.com/turbo-darwin-arm64/-/turbo-darwin-arm64-1.7.0-canary.9.tgz#403cb3f9cee38aa8837d8fe4e6d893766c6c8466"
integrity sha512-zWLdlgCQnYLGIN6q111DIjdlP+k69Mi8QGGrIY0ouV5/H+/Ft2h9VNHyGbhiLNdgPnjYYBi/aq5IDdiwxmF0Pg==
turbo-linux-64@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/turbo-linux-64/-/turbo-linux-64-1.6.3.tgz#8ddc6ac55ef84641182fe5ff50647f1b355826b0"
integrity sha512-O9uc6J0yoRPWdPg9THRQi69K6E2iZ98cRHNvus05lZbcPzZTxJYkYGb5iagCmCW/pq6fL4T4oLWAd6evg2LGQA==
turbo-linux-64@1.7.0-canary.9:
version "1.7.0-canary.9"
resolved "https://registry.yarnpkg.com/turbo-linux-64/-/turbo-linux-64-1.7.0-canary.9.tgz#86effd17413e574487e93384b6c4c974d758f65b"
integrity sha512-YMBpPY3zPI2yok/J3RnC7sGm8XPFF9eFFveThRDg8DSpS/eXAoM4XWOYxxNlyGrJp+YkzTkM/UmWfwowLfvleQ==
turbo-linux-arm64@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/turbo-linux-arm64/-/turbo-linux-arm64-1.6.3.tgz#846c1dc84d8dc741651906613c16acccba30428c"
integrity sha512-dCy667qqEtZIhulsRTe8hhWQNCJO0i20uHXv7KjLHuFZGCeMbWxB8rsneRoY+blf8+QNqGuXQJxak7ayjHLxiA==
turbo-linux-arm64@1.7.0-canary.9:
version "1.7.0-canary.9"
resolved "https://registry.yarnpkg.com/turbo-linux-arm64/-/turbo-linux-arm64-1.7.0-canary.9.tgz#0225ffefbd57eb022de0a2205fbcf724e5942f31"
integrity sha512-ljQZlLJ34yDsaEBFdiI7N8BcC2J/KhQijtDjNkthfekguL4Sx5MnWTONiJyGmnjDJ94V97WmMKklaw9nIweS8g==
turbo-windows-64@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/turbo-windows-64/-/turbo-windows-64-1.6.3.tgz#89ac819fa76ad31d12fbfdeb3045bcebd0d308eb"
integrity sha512-lKRqwL3mrVF09b9KySSaOwetehmGknV9EcQTF7d2dxngGYYX1WXoQLjFP9YYH8ZV07oPm+RUOAKSCQuDuMNhiA==
turbo-windows-64@1.7.0-canary.9:
version "1.7.0-canary.9"
resolved "https://registry.yarnpkg.com/turbo-windows-64/-/turbo-windows-64-1.7.0-canary.9.tgz#640582a4776bf40b984516d774d311c61d4c86f9"
integrity sha512-Pb0n9p5wegmJjqul9lrNzKvMSP9Z8ZEDwZHgaRcG8WIDBjcbDMZ48EtCeODxBK9TNi1EKmyF0tIJkx9e8tz9cQ==
turbo-windows-arm64@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/turbo-windows-arm64/-/turbo-windows-arm64-1.6.3.tgz#977607c9a51f0b76076c8b158bafce06ce813070"
integrity sha512-BXY1sDPEA1DgPwuENvDCD8B7Hb0toscjus941WpL8CVd10hg9pk/MWn9CNgwDO5Q9ks0mw+liDv2EMnleEjeNA==
turbo-windows-arm64@1.7.0-canary.9:
version "1.7.0-canary.9"
resolved "https://registry.yarnpkg.com/turbo-windows-arm64/-/turbo-windows-arm64-1.7.0-canary.9.tgz#aaefdd90cbf1b30c4611bd7186094b9cf33e268a"
integrity sha512-IWrQh40utN71ujRqIrfZPCgAq++5P6ek9NCCMQTuXSG7MAe79t6iMzS6L3skYqoPbRMKFVTkLx147BFU2dp7dw==
turbo@1.6.3:
version "1.6.3"
resolved "https://registry.yarnpkg.com/turbo/-/turbo-1.6.3.tgz#ec26cc8907c38a9fd6eb072fb10dad254733543e"
integrity sha512-FtfhJLmEEtHveGxW4Ye/QuY85AnZ2ZNVgkTBswoap7UMHB1+oI4diHPNyqrQLG4K1UFtCkjOlVoLsllUh/9QRw==
turbo@1.7.0-canary.9:
version "1.7.0-canary.9"
resolved "https://registry.yarnpkg.com/turbo/-/turbo-1.7.0-canary.9.tgz#b4df5078f5833f1aed4f8ffbdebaf38af986749f"
integrity sha512-WBeUcEoWKPKOjwM0rd15AhORZINbPJYjFVT5TzEJnFeK880CyNAl9h3ohdzKPYH6Pi43FNEwm59GSwYls663DA==
optionalDependencies:
turbo-darwin-64 "1.6.3"
turbo-darwin-arm64 "1.6.3"
turbo-linux-64 "1.6.3"
turbo-linux-arm64 "1.6.3"
turbo-windows-64 "1.6.3"
turbo-windows-arm64 "1.6.3"
turbo-darwin-64 "1.7.0-canary.9"
turbo-darwin-arm64 "1.7.0-canary.9"
turbo-linux-64 "1.7.0-canary.9"
turbo-linux-arm64 "1.7.0-canary.9"
turbo-windows-64 "1.7.0-canary.9"
turbo-windows-arm64 "1.7.0-canary.9"
tweetnacl@^0.14.3, tweetnacl@~0.14.0:
version "0.14.5"
@@ -13354,13 +13261,6 @@ unique-string@^1.0.0:
dependencies:
crypto-random-string "^1.0.0"
unique-string@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-2.0.0.tgz#39c6451f81afb2749de2b233e3f7c5e8843bd89d"
integrity sha512-uNaeirEPvpZWSgzwsPGtU2zVSTrn/8L5q/IexZmH0eH6SA73CmAA5U4GwORTxQAZs95TAXLNqeLoPPNO5gZfWg==
dependencies:
crypto-random-string "^2.0.0"
unique-temp-dir@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/unique-temp-dir/-/unique-temp-dir-1.0.0.tgz#6dce95b2681ca003eebfb304a415f9cbabcc5385"
@@ -13416,26 +13316,6 @@ unset-value@^1.0.0:
has-value "^0.3.1"
isobject "^3.0.0"
update-notifier@5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-5.1.0.tgz#4ab0d7c7f36a231dd7316cf7729313f0214d9ad9"
integrity sha512-ItnICHbeMh9GqUy31hFPrD1kcuZ3rpxDZbf4KUDavXwS0bW5m7SLbDQpGX3UYr072cbrF5hFUs3r5tUsPwjfHw==
dependencies:
boxen "^5.0.0"
chalk "^4.1.0"
configstore "^5.0.1"
has-yarn "^2.1.0"
import-lazy "^2.1.0"
is-ci "^2.0.0"
is-installed-globally "^0.4.0"
is-npm "^5.0.0"
is-yarn-global "^0.3.0"
latest-version "^5.1.0"
pupa "^2.1.1"
semver "^7.3.4"
semver-diff "^3.1.1"
xdg-basedir "^4.0.0"
update-notifier@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/update-notifier/-/update-notifier-3.0.1.tgz#78ecb68b915e2fd1be9f767f6e298ce87b736250"
@@ -13820,11 +13700,6 @@ xdg-basedir@^3.0.0:
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-3.0.0.tgz#496b2cc109eca8dbacfe2dc72b603c17c5870ad4"
integrity sha1-SWsswQnsqNus/i3HK2A8F8WHCtQ=
xdg-basedir@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13"
integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q==
xdg-portable@^7.0.0:
version "7.2.1"
resolved "https://registry.yarnpkg.com/xdg-portable/-/xdg-portable-7.2.1.tgz#4301ba0868b2cbc9de0c53b3699906adcc9d2560"