mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-12 04:22:14 +00:00
Compare commits
45 Commits
@vercel/py
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0c7b54edad | ||
|
|
6d42816395 | ||
|
|
6fe6d05a42 | ||
|
|
50a201f145 | ||
|
|
701a02ae9d | ||
|
|
39f7586621 | ||
|
|
c4a39c8d29 | ||
|
|
3ac238cf08 | ||
|
|
8384813a0d | ||
|
|
c4587de439 | ||
|
|
d997dc4fbc | ||
|
|
d15b90bd4d | ||
|
|
5b31297f0c | ||
|
|
e232566cbe | ||
|
|
592689cad1 | ||
|
|
9b08e72f76 | ||
|
|
bd0e10cfe7 | ||
|
|
28436ade60 | ||
|
|
de0d2fba0b | ||
|
|
e0900128d6 | ||
|
|
8d15f30579 | ||
|
|
960c66584c | ||
|
|
1c8f91031a | ||
|
|
68cb23c3cc | ||
|
|
94f6ae2595 | ||
|
|
b92aeac84d | ||
|
|
00420b7a01 | ||
|
|
a5128790d0 | ||
|
|
ae9aa91f4f | ||
|
|
d4cef69cc9 | ||
|
|
323f67c31a | ||
|
|
63c499a826 | ||
|
|
ad436313e1 | ||
|
|
c414288b2f | ||
|
|
b07ff7431f | ||
|
|
79fde4475c | ||
|
|
855197c699 | ||
|
|
fbd9080859 | ||
|
|
b5c5b7b82c | ||
|
|
0a072ee850 | ||
|
|
0b56caba45 | ||
|
|
ab3db60824 | ||
|
|
f2f2ff2c67 | ||
|
|
ba7dafff71 | ||
|
|
987fb4d4f7 |
8
.github/CONTRIBUTING.md
vendored
8
.github/CONTRIBUTING.md
vendored
@@ -23,7 +23,7 @@ Make sure all the tests pass before making changes.
|
||||
|
||||
## Verifying your change
|
||||
|
||||
Once you are done with your changes (we even suggest doing it along the way), make sure all the test still pass by running:
|
||||
Once you are done with your changes (we even suggest doing it along the way), make sure all the tests still pass by running:
|
||||
|
||||
```
|
||||
yarn test-unit
|
||||
@@ -64,7 +64,7 @@ Integration tests create deployments to your Vercel account using the `test` pro
|
||||
x-now-trace=iad1]
|
||||
```
|
||||
|
||||
In such cases you can visit the URL of the failed deployment and append `/_logs` so see the build error. In the case above, that would be https://test-8ashcdlew.vercel.app/_logs
|
||||
In such cases, you can visit the URL of the failed deployment and append `/_logs` to see the build error. In the case above, that would be https://test-8ashcdlew.vercel.app/_logs
|
||||
|
||||
The logs of this deployment will contain the actual error which may help you to understand what went wrong.
|
||||
|
||||
@@ -82,11 +82,11 @@ nodeFileTrace(['path/to/entrypoint.js'], {
|
||||
.then(e => console.error(e));
|
||||
```
|
||||
|
||||
When you run this script, you'll see all imported files. If anything file is missing, the bug is in [@vercel/nft](https://github.com/vercel/nft) and not the Builder.
|
||||
When you run this script, you'll see all the imported files. If anything file is missing, the bug is in [@vercel/nft](https://github.com/vercel/nft) and not the Builder.
|
||||
|
||||
## Deploy a Builder with existing project
|
||||
|
||||
Sometimes you want to test changes to a Builder against an existing project, maybe with `vercel dev` or an actual deployment. You can avoid publishing every Builder change to npm by uploading the Builder as a tarball.
|
||||
Sometimes you want to test changes to a Builder against an existing project, maybe with `vercel dev` or actual deployment. You can avoid publishing every Builder change to npm by uploading the Builder as a tarball.
|
||||
|
||||
1. Change directory to the desired Builder `cd ./packages/node`
|
||||
2. Run `yarn build` to compile typescript and other build steps
|
||||
|
||||
10
.github/workflows/test-integration-cli.yml
vendored
10
.github/workflows/test-integration-cli.yml
vendored
@@ -18,11 +18,13 @@ jobs:
|
||||
os: [ubuntu-latest]
|
||||
node: [14]
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
TURBO_REMOTE_ONLY: true
|
||||
TURBO_TEAM: vercel
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
steps:
|
||||
- name: Conditionally set remote env
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
run: |
|
||||
echo "TURBO_REMOTE_ONLY=true" >> $GITHUB_ENV
|
||||
echo "TURBO_TEAM=vercel" >> $GITHUB_ENV
|
||||
echo "TURBO_TOKEN=${{ secrets.TURBO_TOKEN }}" >> $GITHUB_ENV
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.13.15'
|
||||
|
||||
10
.github/workflows/test-unit.yml
vendored
10
.github/workflows/test-unit.yml
vendored
@@ -18,11 +18,13 @@ jobs:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
node: [14]
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
TURBO_REMOTE_ONLY: true
|
||||
TURBO_TEAM: vercel
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
steps:
|
||||
- name: Conditionally set remote env
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
run: |
|
||||
echo "TURBO_REMOTE_ONLY=true" >> $GITHUB_ENV
|
||||
echo "TURBO_TEAM=vercel" >> $GITHUB_ENV
|
||||
echo "TURBO_TOKEN=${{ secrets.TURBO_TOKEN }}" >> $GITHUB_ENV
|
||||
- uses: actions/setup-go@v2
|
||||
with:
|
||||
go-version: '1.13.15'
|
||||
|
||||
10
.github/workflows/test.yml
vendored
10
.github/workflows/test.yml
vendored
@@ -38,10 +38,6 @@ jobs:
|
||||
runs-on: ${{ matrix.runner }}
|
||||
name: ${{matrix.scriptName}} (${{matrix.packageName}}, ${{matrix.chunkNumber}}, ${{ matrix.runner }})
|
||||
if: ${{ needs.setup.outputs['tests'] != '[]' }}
|
||||
env:
|
||||
TURBO_REMOTE_ONLY: true
|
||||
TURBO_TEAM: vercel
|
||||
TURBO_TOKEN: ${{ secrets.TURBO_TOKEN }}
|
||||
needs:
|
||||
- setup
|
||||
strategy:
|
||||
@@ -49,6 +45,12 @@ jobs:
|
||||
matrix:
|
||||
include: ${{ fromJson(needs.setup.outputs['tests']) }}
|
||||
steps:
|
||||
- name: Conditionally set remote env
|
||||
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||
run: |
|
||||
echo "TURBO_REMOTE_ONLY=true" >> $GITHUB_ENV
|
||||
echo "TURBO_TEAM=vercel" >> $GITHUB_ENV
|
||||
echo "TURBO_TOKEN=${{ secrets.TURBO_TOKEN }}" >> $GITHUB_ENV
|
||||
- uses: actions/checkout@v2
|
||||
with:
|
||||
fetch-depth: 2
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "esnext",
|
||||
"target": "ES2020",
|
||||
"skipLibCheck": true,
|
||||
"strict": false,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
|
||||
@@ -22,8 +22,5 @@
|
||||
"@types/react-dom": "^17.0.9",
|
||||
"typescript": "^4.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "14.x"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"build": {
|
||||
"env": {
|
||||
"ENABLE_FILE_SYSTEM_API": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
1
examples/sveltekit/.gitignore
vendored
1
examples/sveltekit/.gitignore
vendored
@@ -7,3 +7,4 @@ node_modules
|
||||
.env.*
|
||||
!.env.example
|
||||
.vercel
|
||||
.output
|
||||
|
||||
13
examples/sveltekit/.prettierignore
Normal file
13
examples/sveltekit/.prettierignore
Normal file
@@ -0,0 +1,13 @@
|
||||
.DS_Store
|
||||
node_modules
|
||||
/build
|
||||
/.svelte-kit
|
||||
/package
|
||||
.env
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# Ignore files for PNPM, NPM and YARN
|
||||
pnpm-lock.yaml
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
6
examples/sveltekit/.prettierrc
Normal file
6
examples/sveltekit/.prettierrc
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"useTabs": false,
|
||||
"singleQuote": true,
|
||||
"trailingComma": "none",
|
||||
"printWidth": 100
|
||||
}
|
||||
@@ -14,31 +14,29 @@ If you're seeing this, you've probably already done this step. Congrats!
|
||||
|
||||
```bash
|
||||
# create a new project in the current directory
|
||||
npm init svelte@next
|
||||
npm init svelte
|
||||
|
||||
# create a new project in my-app
|
||||
npm init svelte@next my-app
|
||||
npm init svelte my-app
|
||||
```
|
||||
|
||||
> Note: the `@next` is temporary
|
||||
|
||||
## Developing
|
||||
|
||||
Once you've created a project and installed dependencies with `npm install` (or `pnpm install` or `yarn`), start a development server:
|
||||
Once you've created a project and installed dependencies with `pnpm install`, start a development server:
|
||||
|
||||
```bash
|
||||
npm run dev
|
||||
pnpm run dev
|
||||
|
||||
# or start the server and open the app in a new browser tab
|
||||
npm run dev -- --open
|
||||
pnpm run dev -- --open
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
This uses the adapter-auto for SvelteKit, which detects Vercel and runs adapter-vercel on your behalf.
|
||||
To create a production version of your app:
|
||||
|
||||
```bash
|
||||
npm run build
|
||||
pnpm run build
|
||||
```
|
||||
|
||||
> You can preview the built app with `npm run preview`, regardless of whether you installed an adapter. This should _not_ be used to serve your app in production.
|
||||
You can preview the production build with `npm run preview`.
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
{
|
||||
"extends": "./.svelte-kit/tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"$lib": ["src/lib"],
|
||||
"$lib/*": ["src/lib/*"]
|
||||
"allowJs": true,
|
||||
"checkJs": true,
|
||||
"esModuleInterop": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"resolveJsonModule": true,
|
||||
"skipLibCheck": true,
|
||||
"sourceMap": true,
|
||||
"strict": true
|
||||
}
|
||||
},
|
||||
"include": ["src/**/*.d.ts", "src/**/*.js", "src/**/*.svelte"]
|
||||
}
|
||||
|
||||
1267
examples/sveltekit/package-lock.json
generated
1267
examples/sveltekit/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,22 +1,29 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "sveltekit",
|
||||
"version": "0.0.1",
|
||||
"scripts": {
|
||||
"dev": "svelte-kit dev",
|
||||
"build": "svelte-kit build",
|
||||
"package": "svelte-kit package",
|
||||
"preview": "svelte-kit preview"
|
||||
"preview": "svelte-kit preview",
|
||||
"prepare": "svelte-kit sync",
|
||||
"check": "svelte-check --tsconfig ./jsconfig.json",
|
||||
"check:watch": "svelte-check --tsconfig ./jsconfig.json --watch",
|
||||
"lint": "prettier --check --plugin-search-dir=. .",
|
||||
"format": "prettier --write --plugin-search-dir=. ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-auto": "next",
|
||||
"@sveltejs/kit": "next",
|
||||
"svelte": "^3.46.0"
|
||||
"@types/cookie": "^0.4.1",
|
||||
"prettier": "^2.5.1",
|
||||
"prettier-plugin-svelte": "^2.5.0",
|
||||
"svelte": "^3.46.0",
|
||||
"svelte-check": "^2.2.6",
|
||||
"typescript": "~4.6.2"
|
||||
},
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@fontsource/fira-mono": "^4.5.0",
|
||||
"@lukeed/uuid": "^2.0.0",
|
||||
"cookie": "^0.4.1"
|
||||
}
|
||||
}
|
||||
|
||||
1633
examples/sveltekit/pnpm-lock.yaml
generated
Normal file
1633
examples/sveltekit/pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
15
examples/sveltekit/src/app.d.ts
vendored
Normal file
15
examples/sveltekit/src/app.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/// <reference types="@sveltejs/kit" />
|
||||
|
||||
// See https://kit.svelte.dev/docs/types#app
|
||||
// for information about these interfaces
|
||||
declare namespace App {
|
||||
interface Locals {
|
||||
userid: string;
|
||||
}
|
||||
|
||||
// interface Platform {}
|
||||
|
||||
// interface Session {}
|
||||
|
||||
// interface Stuff {}
|
||||
}
|
||||
@@ -2,12 +2,11 @@
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta name="description" content="Svelte demo app" />
|
||||
<link rel="icon" href="%svelte.assets%/favicon.png" />
|
||||
<link rel="icon" href="%sveltekit.assets%/favicon.png" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||
%svelte.head%
|
||||
%sveltekit.head%
|
||||
</head>
|
||||
<body>
|
||||
<div>%svelte.body%</div>
|
||||
<div>%sveltekit.body%</div>
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import cookie from 'cookie';
|
||||
import { v4 as uuid } from '@lukeed/uuid';
|
||||
import * as cookie from 'cookie';
|
||||
|
||||
/** @type {import('@sveltejs/kit').Handle} */
|
||||
export const handle = async ({ event, resolve }) => {
|
||||
const cookies = cookie.parse(event.request.headers.get('cookie') || '');
|
||||
event.locals.userid = cookies.userid || uuid();
|
||||
event.locals.userid = cookies['userid'] || crypto.randomUUID();
|
||||
|
||||
const response = await resolve(event);
|
||||
|
||||
if (!cookies.userid) {
|
||||
if (!cookies['userid']) {
|
||||
// if this is the first time the user has visited this app,
|
||||
// set a cookie so that we recognise them when they return
|
||||
response.headers.set(
|
||||
|
||||
@@ -7,6 +7,10 @@
|
||||
$: displayed_count.set(count);
|
||||
$: offset = modulo($displayed_count, 1);
|
||||
|
||||
/**
|
||||
* @param {number} n
|
||||
* @param {number} m
|
||||
*/
|
||||
function modulo(n, m) {
|
||||
// handle negative numbers
|
||||
return ((n % m) + m) % m;
|
||||
@@ -50,6 +54,7 @@
|
||||
justify-content: center;
|
||||
border: 0;
|
||||
background-color: transparent;
|
||||
touch-action: manipulation;
|
||||
color: var(--text-color);
|
||||
font-size: 2rem;
|
||||
}
|
||||
|
||||
@@ -2,9 +2,36 @@ import { invalidate } from '$app/navigation';
|
||||
|
||||
// this action (https://svelte.dev/tutorial/actions) allows us to
|
||||
// progressively enhance a <form> that already works without JS
|
||||
/**
|
||||
* @param {HTMLFormElement} form
|
||||
* @param {{
|
||||
* pending?: ({ data, form }: { data: FormData; form: HTMLFormElement }) => void;
|
||||
* error?: ({
|
||||
* data,
|
||||
* form,
|
||||
* response,
|
||||
* error
|
||||
* }: {
|
||||
* data: FormData;
|
||||
* form: HTMLFormElement;
|
||||
* response: Response | null;
|
||||
* error: Error | null;
|
||||
* }) => void;
|
||||
* result?: ({
|
||||
* data,
|
||||
* form,
|
||||
* response
|
||||
* }: {
|
||||
* data: FormData;
|
||||
* response: Response;
|
||||
* form: HTMLFormElement;
|
||||
* }) => void;
|
||||
* }} [opts]
|
||||
*/
|
||||
export function enhance(form, { pending, error, result } = {}) {
|
||||
let current_token;
|
||||
|
||||
/** @param {SubmitEvent} e */
|
||||
async function handle_submit(e) {
|
||||
const token = (current_token = {});
|
||||
|
||||
@@ -37,7 +64,7 @@ export function enhance(form, { pending, error, result } = {}) {
|
||||
console.error(await response.text());
|
||||
}
|
||||
} catch (e) {
|
||||
if (error) {
|
||||
if (error && e instanceof Error) {
|
||||
error({ data, form, error: e, response: null });
|
||||
} else {
|
||||
throw e;
|
||||
|
||||
@@ -16,6 +16,7 @@
|
||||
|
||||
<svelte:head>
|
||||
<title>About</title>
|
||||
<meta name="description" content="About this app" />
|
||||
</svelte:head>
|
||||
|
||||
<div class="content">
|
||||
@@ -26,8 +27,7 @@
|
||||
following into your command line and following the prompts:
|
||||
</p>
|
||||
|
||||
<!-- TODO lose the @next! -->
|
||||
<pre>npm init svelte@next</pre>
|
||||
<pre>npm init svelte</pre>
|
||||
|
||||
<p>
|
||||
The page you're looking at is purely static HTML, with no client-side interactivity needed.
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
|
||||
<svelte:head>
|
||||
<title>Home</title>
|
||||
<meta name="description" content="Svelte demo app" />
|
||||
</svelte:head>
|
||||
|
||||
<section>
|
||||
|
||||
@@ -11,6 +11,11 @@
|
||||
|
||||
const base = 'https://api.svelte.dev';
|
||||
|
||||
/**
|
||||
* @param {string} method
|
||||
* @param {string} resource
|
||||
* @param {Record<string, unknown>} [data]
|
||||
*/
|
||||
export function api(method, resource, data) {
|
||||
return fetch(`${base}/${resource}`, {
|
||||
method,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { api } from './_api';
|
||||
|
||||
/** @type {import('./__types').RequestHandler} */
|
||||
export const get = async ({ locals }) => {
|
||||
// locals.userid comes from src/hooks.js
|
||||
const response = await api('get', `todos/${locals.userid}`);
|
||||
@@ -27,6 +28,7 @@ export const get = async ({ locals }) => {
|
||||
};
|
||||
};
|
||||
|
||||
/** @type {import('./index').RequestHandler} */
|
||||
export const post = async ({ request, locals }) => {
|
||||
const form = await request.formData();
|
||||
|
||||
@@ -46,6 +48,7 @@ const redirect = {
|
||||
}
|
||||
};
|
||||
|
||||
/** @type {import('./index').RequestHandler} */
|
||||
export const patch = async ({ request, locals }) => {
|
||||
const form = await request.formData();
|
||||
|
||||
@@ -57,6 +60,7 @@ export const patch = async ({ request, locals }) => {
|
||||
return redirect;
|
||||
};
|
||||
|
||||
/** @type {import('./index').RequestHandler} */
|
||||
export const del = async ({ request, locals }) => {
|
||||
const form = await request.formData();
|
||||
|
||||
|
||||
@@ -3,11 +3,23 @@
|
||||
import { scale } from 'svelte/transition';
|
||||
import { flip } from 'svelte/animate';
|
||||
|
||||
/**
|
||||
* @typedef {{
|
||||
* uid: string;
|
||||
* created_at: Date;
|
||||
* text: string;
|
||||
* done: boolean;
|
||||
* pending_delete: boolean;
|
||||
* }} Todo
|
||||
*/
|
||||
|
||||
/** @type {Todo[]} */
|
||||
export let todos;
|
||||
</script>
|
||||
|
||||
<svelte:head>
|
||||
<title>Todos</title>
|
||||
<meta name="description" content="A todo list app" />
|
||||
</svelte:head>
|
||||
|
||||
<div class="todos">
|
||||
|
||||
@@ -31,7 +31,7 @@
|
||||
"prettier": "2.6.2",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "28.0.0-next.1",
|
||||
"turbo": "1.2.9"
|
||||
"turbo": "1.2.14"
|
||||
},
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "3.1.1-canary.2",
|
||||
"version": "4.1.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -23,7 +23,7 @@
|
||||
"@types/cross-spawn": "6.0.0",
|
||||
"@types/end-of-stream": "^1.4.0",
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/glob": "7.2.0",
|
||||
"@types/jest": "27.4.1",
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/ms": "0.7.31",
|
||||
@@ -31,7 +31,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "2.4.2",
|
||||
"@vercel/frameworks": "0.9.2-canary.0",
|
||||
"@vercel/frameworks": "1.0.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
@@ -40,7 +40,7 @@
|
||||
"cross-spawn": "6.0.5",
|
||||
"end-of-stream": "1.4.1",
|
||||
"fs-extra": "10.0.0",
|
||||
"glob": "7.1.3",
|
||||
"glob": "8.0.3",
|
||||
"into-stream": "5.0.0",
|
||||
"js-yaml": "3.13.1",
|
||||
"minimatch": "3.0.4",
|
||||
|
||||
85
packages/build-utils/src/fs/get-glob-fs.ts
Normal file
85
packages/build-utils/src/fs/get-glob-fs.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import fs from 'fs';
|
||||
import { DetectorFilesystem } from '../detectors/filesystem';
|
||||
|
||||
type GlobFs = typeof fs;
|
||||
|
||||
function normalizePath(path: string) {
|
||||
// on windows, this will return a path like
|
||||
// D:/c/package.json
|
||||
// since we abstract the filesystem, we need to remove windows specific info from the path
|
||||
// and let the FS decide how to process the path
|
||||
// D:/c/package.json => /c/package.json
|
||||
return path.replace(/^[a-zA-Z]:/, '');
|
||||
}
|
||||
|
||||
export function getGlobFs(_fs: DetectorFilesystem): GlobFs {
|
||||
const readdir = (
|
||||
path: fs.PathLike,
|
||||
callback: (err: NodeJS.ErrnoException | null, files: string[]) => void
|
||||
): void => {
|
||||
_fs
|
||||
.readdir(normalizePath(String(path)))
|
||||
.then(stats =>
|
||||
callback(
|
||||
null,
|
||||
stats.map(stat => stat.name)
|
||||
)
|
||||
)
|
||||
.catch(err => callback(err, []));
|
||||
};
|
||||
|
||||
const stat = (
|
||||
path: fs.PathLike,
|
||||
callback: (
|
||||
err: NodeJS.ErrnoException | null,
|
||||
stats: fs.Stats | null
|
||||
) => void
|
||||
): void => {
|
||||
_fs
|
||||
.isFile(normalizePath(String(path)))
|
||||
.then(isPathAFile => {
|
||||
callback(null, {
|
||||
ino: 0,
|
||||
mode: 0,
|
||||
nlink: 0,
|
||||
uid: 0,
|
||||
gid: 0,
|
||||
rdev: 0,
|
||||
size: 0,
|
||||
blksize: 0,
|
||||
blocks: 0,
|
||||
atimeMs: 0,
|
||||
mtimeMs: 0,
|
||||
ctimeMs: 0,
|
||||
birthtimeMs: 0,
|
||||
atime: new Date(),
|
||||
mtime: new Date(),
|
||||
ctime: new Date(),
|
||||
birthtime: new Date(),
|
||||
dev: 0,
|
||||
isBlockDevice: () => false,
|
||||
isCharacterDevice: () => false,
|
||||
isDirectory: () => !isPathAFile,
|
||||
isFIFO: () => false,
|
||||
isFile: () => isPathAFile,
|
||||
isSocket: () => false,
|
||||
isSymbolicLink: () => false,
|
||||
});
|
||||
})
|
||||
.catch(err => callback(err, null));
|
||||
};
|
||||
|
||||
return new Proxy(fs, {
|
||||
get(_target, prop) {
|
||||
switch (prop) {
|
||||
case 'readdir':
|
||||
return readdir;
|
||||
case 'lstat':
|
||||
case 'stat':
|
||||
return stat;
|
||||
default:
|
||||
throw new Error('Not Implemented');
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import Sema from 'async-sema';
|
||||
import spawn from 'cross-spawn';
|
||||
import { coerce, intersects, validRange } from 'semver';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { deprecate } from 'util';
|
||||
import debug from '../debug';
|
||||
@@ -205,7 +206,9 @@ export function getSpawnOptions(
|
||||
|
||||
if (!meta.isDev) {
|
||||
// Ensure that the selected Node version is at the beginning of the `$PATH`
|
||||
opts.env.PATH = `/node${nodeVersion.major}/bin:${opts.env.PATH}`;
|
||||
opts.env.PATH = `/node${nodeVersion.major}/bin${path.delimiter}${
|
||||
opts.env.PATH || process.env.PATH
|
||||
}`;
|
||||
}
|
||||
|
||||
return opts;
|
||||
@@ -217,9 +220,9 @@ export async function getNodeVersion(
|
||||
config: Config = {},
|
||||
meta: Meta = {}
|
||||
): Promise<NodeVersion> {
|
||||
const latest = getLatestNodeVersion();
|
||||
if (meta && meta.isDev) {
|
||||
// Use the system-installed version of `node` in PATH for `vercel dev`
|
||||
const latest = getLatestNodeVersion();
|
||||
return { ...latest, runtime: 'nodejs' };
|
||||
}
|
||||
const { packageJson } = await scanParentDirs(destPath, true);
|
||||
@@ -227,10 +230,27 @@ export async function getNodeVersion(
|
||||
let isAuto = true;
|
||||
if (packageJson && packageJson.engines && packageJson.engines.node) {
|
||||
const { node } = packageJson.engines;
|
||||
if (nodeVersion && nodeVersion !== node && !meta.isDev) {
|
||||
if (
|
||||
nodeVersion &&
|
||||
validRange(node) &&
|
||||
!intersects(nodeVersion, node) &&
|
||||
!meta.isDev
|
||||
) {
|
||||
console.warn(
|
||||
`Warning: Due to "engines": { "node": "${node}" } in your \`package.json\` file, the Node.js Version defined in your Project Settings ("${nodeVersion}") will not apply. Learn More: http://vercel.link/node-version`
|
||||
);
|
||||
} else if (coerce(node)?.raw === node && !meta.isDev) {
|
||||
console.warn(
|
||||
`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` with major.minor.patch, but only major Node.js Version can be selected. Learn More: http://vercel.link/node-version`
|
||||
);
|
||||
} else if (
|
||||
validRange(node) &&
|
||||
intersects(`${latest.major + 1}.x`, node) &&
|
||||
!meta.isDev
|
||||
) {
|
||||
console.warn(
|
||||
`Warning: Detected "engines": { "node": "${node}" } in your \`package.json\` that will automatically upgrade when a new major Node.js Version is released. Learn More: http://vercel.link/node-version`
|
||||
);
|
||||
}
|
||||
nodeVersion = node;
|
||||
isAuto = false;
|
||||
@@ -433,13 +453,13 @@ export function getEnvForPackageManager({
|
||||
(nodeVersion?.major || 0) < 16
|
||||
) {
|
||||
// Ensure that npm 7 is at the beginning of the `$PATH`
|
||||
newEnv.PATH = `/node16/bin-npm7:${env.PATH}`;
|
||||
newEnv.PATH = `/node16/bin-npm7${path.delimiter}${env.PATH}`;
|
||||
console.log('Detected `package-lock.json` generated by npm 7...');
|
||||
}
|
||||
} else if (cliType === 'pnpm') {
|
||||
if (typeof lockfileVersion === 'number' && lockfileVersion === 5.4) {
|
||||
// Ensure that pnpm 7 is at the beginning of the `$PATH`
|
||||
newEnv.PATH = `/pnpm7/node_modules/.bin:${env.PATH}`;
|
||||
newEnv.PATH = `/pnpm7/node_modules/.bin${path.delimiter}${env.PATH}`;
|
||||
console.log('Detected `pnpm-lock.yaml` generated by pnpm 7...');
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -117,5 +117,14 @@ export const isStaticRuntime = (name?: string): boolean => {
|
||||
};
|
||||
|
||||
export { workspaceManagers } from './workspaces/workspace-managers';
|
||||
export { getWorkspaces } from './workspaces/get-workspaces';
|
||||
export {
|
||||
getWorkspaces,
|
||||
GetWorkspaceOptions,
|
||||
Workspace,
|
||||
WorkspaceType,
|
||||
} from './workspaces/get-workspaces';
|
||||
export {
|
||||
getWorkspacePackagePaths,
|
||||
GetWorkspacePackagePathsOptions,
|
||||
} from './workspaces/get-workspace-package-paths';
|
||||
export { monorepoManagers } from './monorepos/monorepo-managers';
|
||||
|
||||
@@ -82,7 +82,7 @@ export interface BuildOptions {
|
||||
* is the Git Repository Root. This is only relevant for Monorepos.
|
||||
* See https://vercel.com/blog/monorepos
|
||||
*/
|
||||
repoRootPath?: string;
|
||||
repoRootPath: string;
|
||||
|
||||
/**
|
||||
* An arbitrary object passed by the user in the build definition defined
|
||||
@@ -123,7 +123,7 @@ export interface PrepareCacheOptions {
|
||||
* is the Git Repository Root. This is only relevant for Monorepos.
|
||||
* See https://vercel.com/blog/monorepos
|
||||
*/
|
||||
repoRootPath?: string;
|
||||
repoRootPath: string;
|
||||
|
||||
/**
|
||||
* An arbitrary object passed by the user in the build definition defined
|
||||
@@ -295,6 +295,7 @@ export interface PackageJson {
|
||||
readonly preferGlobal?: boolean;
|
||||
readonly private?: boolean;
|
||||
readonly publishConfig?: PackageJson.PublishConfig;
|
||||
readonly packageManager?: string;
|
||||
}
|
||||
|
||||
export interface NodeVersion {
|
||||
@@ -427,7 +428,7 @@ export interface BuildResultV2Typical {
|
||||
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
|
||||
|
||||
export interface BuildResultV3 {
|
||||
output: Lambda;
|
||||
output: Lambda | EdgeFunction;
|
||||
}
|
||||
|
||||
export type BuildV2 = (options: BuildOptions) => Promise<BuildResultV2>;
|
||||
|
||||
@@ -0,0 +1,113 @@
|
||||
import _path from 'path';
|
||||
import yaml from 'js-yaml';
|
||||
import glob from 'glob';
|
||||
import { DetectorFilesystem } from '../detectors/filesystem';
|
||||
import { Workspace } from './get-workspaces';
|
||||
import { getGlobFs } from '../fs/get-glob-fs';
|
||||
import { normalizePath } from '../fs/normalize-path';
|
||||
|
||||
const posixPath = _path.posix;
|
||||
|
||||
interface GetPackagePathOptions {
|
||||
fs: DetectorFilesystem;
|
||||
}
|
||||
|
||||
export interface GetWorkspacePackagePathsOptions extends GetPackagePathOptions {
|
||||
fs: DetectorFilesystem;
|
||||
workspace: Workspace;
|
||||
}
|
||||
|
||||
export async function getWorkspacePackagePaths({
|
||||
fs,
|
||||
workspace,
|
||||
}: GetWorkspacePackagePathsOptions): Promise<string[]> {
|
||||
const { type, rootPath } = workspace;
|
||||
const workspaceFs = fs.chdir(rootPath);
|
||||
|
||||
let results: string[] = [];
|
||||
|
||||
switch (type) {
|
||||
case 'yarn':
|
||||
case 'npm':
|
||||
results = await getPackageJsonWorkspacePackagePaths({ fs: workspaceFs });
|
||||
break;
|
||||
case 'pnpm':
|
||||
results = await getPnpmWorkspacePackagePaths({ fs: workspaceFs });
|
||||
break;
|
||||
default:
|
||||
throw new Error(`Unknown workspace implementation: ${type}`);
|
||||
}
|
||||
|
||||
return results.map(packagePath => {
|
||||
return posixPath.join(rootPath, posixPath.dirname(packagePath));
|
||||
});
|
||||
}
|
||||
|
||||
type PackageJsonWithWorkspace = {
|
||||
workspaces?:
|
||||
| {
|
||||
packages?: string[];
|
||||
noHoist?: string[];
|
||||
}
|
||||
| string[];
|
||||
};
|
||||
|
||||
type PnpmWorkspaces = {
|
||||
packages?: string[];
|
||||
};
|
||||
|
||||
async function getPackagePaths(
|
||||
packages: string[],
|
||||
fs: DetectorFilesystem
|
||||
): Promise<string[]> {
|
||||
return (
|
||||
await Promise.all(
|
||||
packages.map(
|
||||
packageGlob =>
|
||||
new Promise<string[]>((resolve, reject) => {
|
||||
glob(
|
||||
normalizePath(posixPath.join(packageGlob, 'package.json')),
|
||||
{
|
||||
cwd: '/',
|
||||
fs: getGlobFs(fs),
|
||||
},
|
||||
(err, matches) => {
|
||||
if (err) reject(err);
|
||||
else resolve(matches);
|
||||
}
|
||||
);
|
||||
})
|
||||
)
|
||||
)
|
||||
).flat();
|
||||
}
|
||||
|
||||
async function getPackageJsonWorkspacePackagePaths({
|
||||
fs,
|
||||
}: GetPackagePathOptions): Promise<string[]> {
|
||||
const packageJsonAsBuffer = await fs.readFile('package.json');
|
||||
const { workspaces } = JSON.parse(
|
||||
packageJsonAsBuffer.toString()
|
||||
) as PackageJsonWithWorkspace;
|
||||
|
||||
let packages: string[] = [];
|
||||
|
||||
if (Array.isArray(workspaces)) {
|
||||
packages = workspaces;
|
||||
} else {
|
||||
packages = workspaces?.packages ?? [];
|
||||
}
|
||||
|
||||
return getPackagePaths(packages, fs);
|
||||
}
|
||||
|
||||
async function getPnpmWorkspacePackagePaths({
|
||||
fs,
|
||||
}: GetPackagePathOptions): Promise<string[]> {
|
||||
const pnpmWorkspaceAsBuffer = await fs.readFile('pnpm-workspace.yaml');
|
||||
const { packages = [] } = yaml.load(
|
||||
pnpmWorkspaceAsBuffer.toString()
|
||||
) as PnpmWorkspaces;
|
||||
|
||||
return getPackagePaths(packages, fs);
|
||||
}
|
||||
@@ -17,5 +17,13 @@ checkPkgOrThrow('exeggcute');
|
||||
|
||||
// This is to satisfy `@vercel/static-build` which needs a `dist` directory.
|
||||
const { exec } = require('exeggcute');
|
||||
exec('mkdir dist', __dirname);
|
||||
exec('echo "node-env:RANDOMNESS_PLACEHOLDER" > dist/index.html', __dirname);
|
||||
exec('mkdir dist', __dirname)
|
||||
.then(() => {
|
||||
exec(
|
||||
'echo "node-env:RANDOMNESS_PLACEHOLDER" > dist/index.html',
|
||||
__dirname
|
||||
).then(() => {
|
||||
console.log('Success');
|
||||
});
|
||||
})
|
||||
.catch(console.error);
|
||||
|
||||
@@ -6,5 +6,10 @@ const b = require('./b');
|
||||
a();
|
||||
b();
|
||||
|
||||
exec('mkdir public', __dirname);
|
||||
exec('echo "Hello, World!" > public/index.html', __dirname);
|
||||
exec('mkdir public', __dirname)
|
||||
.then(() => {
|
||||
exec('echo "Hello, World!" > public/index.html', __dirname).then(() => {
|
||||
console.log('Success');
|
||||
});
|
||||
})
|
||||
.catch(console.error);
|
||||
|
||||
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/a/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/a/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "a",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.2"
|
||||
}
|
||||
}
|
||||
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/b/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/25-multiple-lock-files-yarn/b/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "b",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"cowsay": "^1.5.0"
|
||||
}
|
||||
}
|
||||
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/a/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/a/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "a",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"debug": "^4.3.2"
|
||||
}
|
||||
}
|
||||
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/b/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/b/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"name": "b",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"cowsay": "^1.5.0"
|
||||
}
|
||||
}
|
||||
245
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/pnpm-lock.yaml
generated
vendored
245
packages/build-utils/test/fixtures/26-multiple-lock-files-pnpm/pnpm-lock.yaml
generated
vendored
@@ -1,19 +1,260 @@
|
||||
lockfileVersion: 5.3
|
||||
lockfileVersion: 5.4
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
specifiers:
|
||||
once: ^1.4.0
|
||||
|
||||
dependencies:
|
||||
once: 1.4.0
|
||||
|
||||
a:
|
||||
specifiers:
|
||||
debug: ^4.3.2
|
||||
dependencies:
|
||||
debug: 4.3.4
|
||||
|
||||
b:
|
||||
specifiers:
|
||||
cowsay: ^1.5.0
|
||||
dependencies:
|
||||
cowsay: 1.5.0
|
||||
|
||||
packages:
|
||||
|
||||
/ansi-regex/3.0.1:
|
||||
resolution: {integrity: sha512-+O9Jct8wf++lXxxFc4hc8LsjaSq0HFzzL7cVsw8pRDIPdjKD2mT4ytDZlLuSBZ4cLKZFXIrMGO7DbQCtMJJMKw==}
|
||||
engines: {node: '>=4'}
|
||||
dev: false
|
||||
|
||||
/ansi-regex/5.0.1:
|
||||
resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==}
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/ansi-styles/4.3.0:
|
||||
resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
color-convert: 2.0.1
|
||||
dev: false
|
||||
|
||||
/camelcase/5.3.1:
|
||||
resolution: {integrity: sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==}
|
||||
engines: {node: '>=6'}
|
||||
dev: false
|
||||
|
||||
/cliui/6.0.0:
|
||||
resolution: {integrity: sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==}
|
||||
dependencies:
|
||||
string-width: 4.2.3
|
||||
strip-ansi: 6.0.1
|
||||
wrap-ansi: 6.2.0
|
||||
dev: false
|
||||
|
||||
/color-convert/2.0.1:
|
||||
resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==}
|
||||
engines: {node: '>=7.0.0'}
|
||||
dependencies:
|
||||
color-name: 1.1.4
|
||||
dev: false
|
||||
|
||||
/color-name/1.1.4:
|
||||
resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==}
|
||||
dev: false
|
||||
|
||||
/cowsay/1.5.0:
|
||||
resolution: {integrity: sha512-8Ipzr54Z8zROr/62C8f0PdhQcDusS05gKTS87xxdji8VbWefWly0k8BwGK7+VqamOrkv3eGsCkPtvlHzrhWsCA==}
|
||||
engines: {node: '>= 4'}
|
||||
hasBin: true
|
||||
dependencies:
|
||||
get-stdin: 8.0.0
|
||||
string-width: 2.1.1
|
||||
strip-final-newline: 2.0.0
|
||||
yargs: 15.4.1
|
||||
dev: false
|
||||
|
||||
/debug/4.3.4:
|
||||
resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==}
|
||||
engines: {node: '>=6.0'}
|
||||
peerDependencies:
|
||||
supports-color: '*'
|
||||
peerDependenciesMeta:
|
||||
supports-color:
|
||||
optional: true
|
||||
dependencies:
|
||||
ms: 2.1.2
|
||||
dev: false
|
||||
|
||||
/decamelize/1.2.0:
|
||||
resolution: {integrity: sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==}
|
||||
engines: {node: '>=0.10.0'}
|
||||
dev: false
|
||||
|
||||
/emoji-regex/8.0.0:
|
||||
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
|
||||
dev: false
|
||||
|
||||
/find-up/4.1.0:
|
||||
resolution: {integrity: sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
locate-path: 5.0.0
|
||||
path-exists: 4.0.0
|
||||
dev: false
|
||||
|
||||
/get-caller-file/2.0.5:
|
||||
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
|
||||
engines: {node: 6.* || 8.* || >= 10.*}
|
||||
dev: false
|
||||
|
||||
/get-stdin/8.0.0:
|
||||
resolution: {integrity: sha512-sY22aA6xchAzprjyqmSEQv4UbAAzRN0L2dQB0NlN5acTTK9Don6nhoc3eAbUnpZiCANAMfd/+40kVdKfFygohg==}
|
||||
engines: {node: '>=10'}
|
||||
dev: false
|
||||
|
||||
/is-fullwidth-code-point/2.0.0:
|
||||
resolution: {integrity: sha512-VHskAKYM8RfSFXwee5t5cbN5PZeq1Wrh6qd5bkyiXIf6UQcN6w/A0eXM9r6t8d+GYOh+o6ZhiEnb88LN/Y8m2w==}
|
||||
engines: {node: '>=4'}
|
||||
dev: false
|
||||
|
||||
/is-fullwidth-code-point/3.0.0:
|
||||
resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==}
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/locate-path/5.0.0:
|
||||
resolution: {integrity: sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
p-locate: 4.1.0
|
||||
dev: false
|
||||
|
||||
/ms/2.1.2:
|
||||
resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==}
|
||||
dev: false
|
||||
|
||||
/once/1.4.0:
|
||||
resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
|
||||
dependencies:
|
||||
wrappy: 1.0.2
|
||||
dev: false
|
||||
|
||||
/p-limit/2.3.0:
|
||||
resolution: {integrity: sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==}
|
||||
engines: {node: '>=6'}
|
||||
dependencies:
|
||||
p-try: 2.2.0
|
||||
dev: false
|
||||
|
||||
/p-locate/4.1.0:
|
||||
resolution: {integrity: sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
p-limit: 2.3.0
|
||||
dev: false
|
||||
|
||||
/p-try/2.2.0:
|
||||
resolution: {integrity: sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==}
|
||||
engines: {node: '>=6'}
|
||||
dev: false
|
||||
|
||||
/path-exists/4.0.0:
|
||||
resolution: {integrity: sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==}
|
||||
engines: {node: '>=8'}
|
||||
dev: false
|
||||
|
||||
/require-directory/2.1.1:
|
||||
resolution: {integrity: sha1-jGStX9MNqxyXbiNE/+f3kqam30I=}
|
||||
engines: {node: '>=0.10.0'}
|
||||
dev: false
|
||||
|
||||
/require-main-filename/2.0.0:
|
||||
resolution: {integrity: sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==}
|
||||
dev: false
|
||||
|
||||
/set-blocking/2.0.0:
|
||||
resolution: {integrity: sha1-BF+XgtARrppoA93TgrJDkrPYkPc=}
|
||||
dev: false
|
||||
|
||||
/string-width/2.1.1:
|
||||
resolution: {integrity: sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==}
|
||||
engines: {node: '>=4'}
|
||||
dependencies:
|
||||
is-fullwidth-code-point: 2.0.0
|
||||
strip-ansi: 4.0.0
|
||||
dev: false
|
||||
|
||||
/string-width/4.2.3:
|
||||
resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
emoji-regex: 8.0.0
|
||||
is-fullwidth-code-point: 3.0.0
|
||||
strip-ansi: 6.0.1
|
||||
dev: false
|
||||
|
||||
/strip-ansi/4.0.0:
|
||||
resolution: {integrity: sha1-qEeQIusaw2iocTibY1JixQXuNo8=}
|
||||
engines: {node: '>=4'}
|
||||
dependencies:
|
||||
ansi-regex: 3.0.1
|
||||
dev: false
|
||||
|
||||
/strip-ansi/6.0.1:
|
||||
resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
ansi-regex: 5.0.1
|
||||
dev: false
|
||||
|
||||
/strip-final-newline/2.0.0:
|
||||
resolution: {integrity: sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==}
|
||||
engines: {node: '>=6'}
|
||||
dev: false
|
||||
|
||||
/which-module/2.0.0:
|
||||
resolution: {integrity: sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=}
|
||||
dev: false
|
||||
|
||||
/wrap-ansi/6.2.0:
|
||||
resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
ansi-styles: 4.3.0
|
||||
string-width: 4.2.3
|
||||
strip-ansi: 6.0.1
|
||||
dev: false
|
||||
|
||||
/wrappy/1.0.2:
|
||||
resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
|
||||
dev: false
|
||||
|
||||
/y18n/4.0.3:
|
||||
resolution: {integrity: sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==}
|
||||
dev: false
|
||||
|
||||
/yargs-parser/18.1.3:
|
||||
resolution: {integrity: sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==}
|
||||
engines: {node: '>=6'}
|
||||
dependencies:
|
||||
camelcase: 5.3.1
|
||||
decamelize: 1.2.0
|
||||
dev: false
|
||||
|
||||
/yargs/15.4.1:
|
||||
resolution: {integrity: sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==}
|
||||
engines: {node: '>=8'}
|
||||
dependencies:
|
||||
cliui: 6.0.0
|
||||
decamelize: 1.2.0
|
||||
find-up: 4.1.0
|
||||
get-caller-file: 2.0.5
|
||||
require-directory: 2.1.1
|
||||
require-main-filename: 2.0.0
|
||||
set-blocking: 2.0.0
|
||||
string-width: 4.2.3
|
||||
which-module: 2.0.0
|
||||
y18n: 4.0.3
|
||||
yargs-parser: 18.1.3
|
||||
dev: false
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": "16.14.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">=16"
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
import assert from 'assert';
|
||||
import { delimiter } from 'path';
|
||||
import { getEnvForPackageManager } from '../src';
|
||||
|
||||
describe('Test `getEnvForPackageManager()`', () => {
|
||||
@@ -34,7 +35,7 @@ describe('Test `getEnvForPackageManager()`', () => {
|
||||
},
|
||||
want: {
|
||||
FOO: 'bar',
|
||||
PATH: `/node16/bin-npm7:foo`,
|
||||
PATH: `/node16/bin-npm7${delimiter}foo`,
|
||||
},
|
||||
},
|
||||
{
|
||||
@@ -97,7 +98,7 @@ describe('Test `getEnvForPackageManager()`', () => {
|
||||
},
|
||||
want: {
|
||||
FOO: 'bar',
|
||||
PATH: '/pnpm7/node_modules/.bin:foo',
|
||||
PATH: `/pnpm7/node_modules/.bin${delimiter}foo`,
|
||||
},
|
||||
},
|
||||
{
|
||||
|
||||
38
packages/build-utils/test/unit.get-workspaces-package-paths.test.ts
vendored
Normal file
38
packages/build-utils/test/unit.get-workspaces-package-paths.test.ts
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
import path from 'path';
|
||||
import { getWorkspaces } from '../src/workspaces/get-workspaces';
|
||||
import { getWorkspacePackagePaths } from '../src/workspaces/get-workspace-package-paths';
|
||||
import { FixtureFilesystem } from './utils/fixture-filesystem';
|
||||
|
||||
describe.each<[string, string[]]>([
|
||||
['21-npm-workspaces', ['/a', '/b']],
|
||||
['23-pnpm-workspaces', ['/c', '/d']],
|
||||
['27-yarn-workspaces', ['/a', '/b']],
|
||||
['25-multiple-lock-files-yarn', ['/a', '/b']],
|
||||
['26-multiple-lock-files-pnpm', ['/a', '/b']],
|
||||
[
|
||||
'29-nested-workspaces',
|
||||
['/backend/c', '/backend/d', '/frontend/a', '/frontend/b'],
|
||||
],
|
||||
['22-pnpm', []],
|
||||
])('`getWorkspacesPackagePaths()`', (fixturePath, packagePaths) => {
|
||||
const testName =
|
||||
packagePaths.length > 0
|
||||
? `should detect ${packagePaths.join()} package${
|
||||
packagePaths.length > 1 ? 's' : ''
|
||||
} for ${fixturePath}`
|
||||
: `should not detect any workspace for ${fixturePath}`;
|
||||
|
||||
it(testName, async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', fixturePath);
|
||||
const fs = new FixtureFilesystem(fixture);
|
||||
|
||||
const workspaces = await getWorkspaces({ fs });
|
||||
const actualPackagePaths = (
|
||||
await Promise.all(
|
||||
workspaces.map(workspace => getWorkspacePackagePaths({ fs, workspace }))
|
||||
)
|
||||
).flat();
|
||||
|
||||
expect(actualPackagePaths).toEqual(packagePaths);
|
||||
});
|
||||
});
|
||||
48
packages/build-utils/test/unit.test.ts
vendored
48
packages/build-utils/test/unit.test.ts
vendored
@@ -277,7 +277,45 @@ it('should prefer package.json engines over project setting from config and warn
|
||||
]);
|
||||
});
|
||||
|
||||
it('should warn when package.json engines is exact version', async () => {
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node-exact'),
|
||||
undefined,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '16.x');
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Warning: Detected "engines": { "node": "16.14.0" } in your `package.json` with major.minor.patch, but only major Node.js Version can be selected. Learn More: http://vercel.link/node-version',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should warn when package.json engines is greater than', async () => {
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node-greaterthan'),
|
||||
undefined,
|
||||
{},
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '16.x');
|
||||
expect(warningMessages).toStrictEqual([
|
||||
'Warning: Detected "engines": { "node": ">=16" } in your `package.json` that will automatically upgrade when a new major Node.js Version is released. Learn More: http://vercel.link/node-version',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should not warn when package.json engines matches project setting from config', async () => {
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node'),
|
||||
undefined,
|
||||
{ nodeVersion: '14' },
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '14.x');
|
||||
expect(warningMessages).toStrictEqual([]);
|
||||
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node'),
|
||||
@@ -287,6 +325,16 @@ it('should not warn when package.json engines matches project setting from confi
|
||||
)
|
||||
).toHaveProperty('range', '14.x');
|
||||
expect(warningMessages).toStrictEqual([]);
|
||||
|
||||
expect(
|
||||
await getNodeVersion(
|
||||
path.join(__dirname, 'pkg-engine-node'),
|
||||
undefined,
|
||||
{ nodeVersion: '<15' },
|
||||
{}
|
||||
)
|
||||
).toHaveProperty('range', '14.x');
|
||||
expect(warningMessages).toStrictEqual([]);
|
||||
});
|
||||
|
||||
it('should get latest node version', async () => {
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
@@ -13,7 +13,7 @@
|
||||
"outDir": "./dist",
|
||||
"types": ["node", "jest"],
|
||||
"strict": true,
|
||||
"target": "es2019"
|
||||
"target": "ES2020"
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "24.2.5-canary.2",
|
||||
"version": "25.1.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -30,7 +30,6 @@
|
||||
"scripts/preinstall.js"
|
||||
],
|
||||
"ava": {
|
||||
"compileEnhancements": false,
|
||||
"extensions": [
|
||||
"ts"
|
||||
],
|
||||
@@ -40,19 +39,19 @@
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "3.1.1-canary.2",
|
||||
"@vercel/go": "1.4.4-canary.2",
|
||||
"@vercel/next": "2.8.67-canary.2",
|
||||
"@vercel/node": "1.15.4-canary.2",
|
||||
"@vercel/python": "2.3.4-canary.2",
|
||||
"@vercel/redwood": "0.8.4-canary.2",
|
||||
"@vercel/remix": "0.0.2-canary.2",
|
||||
"@vercel/ruby": "1.3.7-canary.2",
|
||||
"@vercel/static-build": "0.25.3-canary.2",
|
||||
"update-notifier": "4.1.0"
|
||||
"@vercel/build-utils": "4.1.0",
|
||||
"@vercel/go": "2.0.1",
|
||||
"@vercel/next": "3.0.1",
|
||||
"@vercel/node": "2.1.0",
|
||||
"@vercel/python": "3.0.1",
|
||||
"@vercel/redwood": "1.0.1",
|
||||
"@vercel/remix": "1.0.1",
|
||||
"@vercel/ruby": "1.3.9",
|
||||
"@vercel/static-build": "1.0.1",
|
||||
"update-notifier": "5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@alex_neo/jest-expect-message": "1.0.5",
|
||||
@@ -95,8 +94,8 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "11.0.4-canary.2",
|
||||
"@vercel/frameworks": "0.9.2-canary.0",
|
||||
"@vercel/client": "12.0.1",
|
||||
"@vercel/frameworks": "1.0.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
|
||||
@@ -4,6 +4,7 @@ import dotenv from 'dotenv';
|
||||
import { join, relative } from 'path';
|
||||
import {
|
||||
detectBuilders,
|
||||
normalizePath,
|
||||
Files,
|
||||
FileFsRef,
|
||||
PackageJson,
|
||||
@@ -45,6 +46,7 @@ import {
|
||||
writeBuildResult,
|
||||
} from '../util/build/write-build-result';
|
||||
import { importBuilders, BuilderWithPkg } from '../util/build/import-builders';
|
||||
import { initCorepack, cleanupCorepack } from '../util/build/corepack';
|
||||
|
||||
type BuildResult = BuildResultV2 | BuildResultV3;
|
||||
|
||||
@@ -177,7 +179,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
|
||||
// Get a list of source files
|
||||
const files = (await getFiles(workPath, client)).map(f =>
|
||||
relative(workPath, f)
|
||||
normalizePath(relative(workPath, f))
|
||||
);
|
||||
|
||||
const routesResult = getTransformedRoutes({ nowConfig: vercelConfig || {} });
|
||||
@@ -265,19 +267,20 @@ export default async function main(client: Client): Promise<number> {
|
||||
}
|
||||
|
||||
// Delete output directory from potential previous build
|
||||
await fs.remove(OUTPUT_DIR);
|
||||
const outputDir = join(cwd, OUTPUT_DIR);
|
||||
await fs.remove(outputDir);
|
||||
|
||||
const buildStamp = stamp();
|
||||
|
||||
// Create fresh new output directory
|
||||
await fs.mkdirp(OUTPUT_DIR);
|
||||
await fs.mkdirp(outputDir);
|
||||
|
||||
const ops: Promise<Error | void>[] = [];
|
||||
|
||||
// Write the `detectedBuilders` result to output dir
|
||||
ops.push(
|
||||
fs.writeJSON(
|
||||
join(OUTPUT_DIR, 'builds.json'),
|
||||
join(outputDir, 'builds.json'),
|
||||
{
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
@@ -312,6 +315,10 @@ export default async function main(client: Client): Promise<number> {
|
||||
// TODO: parallelize builds
|
||||
const buildResults: Map<Builder, BuildResult> = new Map();
|
||||
const overrides: PathOverride[] = [];
|
||||
const repoRootPath = cwd;
|
||||
const rootPackageJsonPath = repoRootPath || workPath;
|
||||
const corepackShimDir = await initCorepack({ cwd, rootPackageJsonPath });
|
||||
|
||||
for (const build of builds) {
|
||||
if (typeof build.src !== 'string') continue;
|
||||
|
||||
@@ -331,7 +338,6 @@ export default async function main(client: Client): Promise<number> {
|
||||
framework: project.settings.framework,
|
||||
nodeVersion: project.settings.nodeVersion,
|
||||
};
|
||||
const repoRootPath = cwd === workPath ? undefined : cwd;
|
||||
const buildOptions: BuildOptions = {
|
||||
files: filesMap,
|
||||
entrypoint: build.src,
|
||||
@@ -352,6 +358,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
// Start flushing the file outputs to the filesystem asynchronously
|
||||
ops.push(
|
||||
writeBuildResult(
|
||||
outputDir,
|
||||
buildResult,
|
||||
build,
|
||||
builder,
|
||||
@@ -366,6 +373,10 @@ export default async function main(client: Client): Promise<number> {
|
||||
);
|
||||
}
|
||||
|
||||
if (corepackShimDir) {
|
||||
cleanupCorepack(corepackShimDir);
|
||||
}
|
||||
|
||||
// Wait for filesystem operations to complete
|
||||
// TODO render progress bar?
|
||||
let hadError = false;
|
||||
@@ -379,7 +390,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
if (hadError) return 1;
|
||||
|
||||
// Merge existing `config.json` file into the one that will be produced
|
||||
const configPath = join(OUTPUT_DIR, 'config.json');
|
||||
const configPath = join(outputDir, 'config.json');
|
||||
// TODO: properly type
|
||||
const existingConfig = await readJSONFile<any>(configPath);
|
||||
if (existingConfig instanceof CantParseJSONFile) {
|
||||
@@ -437,7 +448,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
wildcard: mergedWildcard,
|
||||
overrides: mergedOverrides,
|
||||
};
|
||||
await fs.writeJSON(join(OUTPUT_DIR, 'config.json'), config, { spaces: 2 });
|
||||
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
|
||||
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { resolve, join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
|
||||
import DevServer from '../../util/dev/server';
|
||||
import parseListen from '../../util/dev/parse-listen';
|
||||
@@ -12,6 +13,7 @@ import setupAndLink from '../../util/link/setup-and-link';
|
||||
import getSystemEnvValues from '../../util/env/get-system-env-values';
|
||||
import { getCommandName } from '../../util/pkg-name';
|
||||
import param from '../../util/output/param';
|
||||
import { OUTPUT_DIR } from '../../util/build/write-build-result';
|
||||
|
||||
type Options = {
|
||||
'--listen': string;
|
||||
@@ -104,6 +106,15 @@ export default async function dev(
|
||||
devCommand = process.env.VERCEL_DEV_COMMAND;
|
||||
}
|
||||
|
||||
// If there is no Development Command, we must delete the
|
||||
// v3 Build Output because it will incorrectly be detected by
|
||||
// @vercel/static-build in BuildOutputV3.getBuildOutputDirectory()
|
||||
if (!devCommand) {
|
||||
output.log(`Removing ${OUTPUT_DIR}`);
|
||||
const outputDir = join(cwd, OUTPUT_DIR);
|
||||
await fs.remove(outputDir);
|
||||
}
|
||||
|
||||
const devServer = new DevServer(cwd, {
|
||||
output,
|
||||
devCommand,
|
||||
|
||||
7
packages/cli/src/commands/env/index.ts
vendored
7
packages/cli/src/commands/env/index.ts
vendored
@@ -42,6 +42,13 @@ const help = () => {
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray(
|
||||
'–'
|
||||
)} Pull all Development Environment Variables down from the cloud
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} env pull <file>`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} env pull .env.development.local`)}
|
||||
|
||||
${chalk.gray('–')} Add a new variable to multiple Environments
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} env add <name>`)}
|
||||
|
||||
@@ -25,7 +25,7 @@ import {
|
||||
|
||||
const help = () => {
|
||||
return console.log(`
|
||||
${chalk.bold(`${logo} ${getPkgName()} pull`)} [path]
|
||||
${chalk.bold(`${logo} ${getPkgName()} pull`)} [project-path]
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
@@ -42,25 +42,29 @@ const help = () => {
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} Pull the latest Project Settings from the cloud
|
||||
${chalk.gray(
|
||||
'–'
|
||||
)} Pull the latest Environment Variables and Project Settings from the cloud
|
||||
and stores them in \`.vercel/.env.\${target}.local\` and \`.vercel/project.json\` respectively.
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} pull`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} pull ./path-to-project`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} pull --env .env.local`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} pull ./path-to-project --env .env.local`)}
|
||||
|
||||
${chalk.gray('–')} Pull specific environment's Project Settings from the cloud
|
||||
${chalk.gray('–')} Pull for a specific environment
|
||||
|
||||
${chalk.cyan(
|
||||
`$ ${getPkgName()} pull --environment=${getEnvTargetPlaceholder()}`
|
||||
)}
|
||||
|
||||
${chalk.gray(
|
||||
'If you want to download environment variables to a specific file, use `vercel env pull` instead.'
|
||||
)}
|
||||
`);
|
||||
};
|
||||
|
||||
function processArgs(client: Client) {
|
||||
return getArgs(client.argv.slice(2), {
|
||||
'--yes': Boolean,
|
||||
'--env': String, // deprecated
|
||||
'--environment': String,
|
||||
'--debug': Boolean,
|
||||
'-d': '--debug',
|
||||
|
||||
@@ -58,6 +58,7 @@ const isCanary = pkg.version.includes('canary');
|
||||
const notifier = updateNotifier({
|
||||
pkg,
|
||||
distTag: isCanary ? 'canary' : 'latest',
|
||||
updateCheckInterval: 1000 * 60 * 60 * 24 * 7, // 1 week
|
||||
});
|
||||
|
||||
const VERCEL_DIR = getGlobalPathConfig();
|
||||
|
||||
82
packages/cli/src/util/build/corepack.ts
Normal file
82
packages/cli/src/util/build/corepack.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { delimiter, join } from 'path';
|
||||
import { PackageJson, spawnAsync } from '@vercel/build-utils';
|
||||
import fs from 'fs-extra';
|
||||
import { CantParseJSONFile } from '../errors-ts';
|
||||
import { VERCEL_DIR } from '../projects/link';
|
||||
import readJSONFile from '../read-json-file';
|
||||
|
||||
export async function initCorepack({
|
||||
cwd,
|
||||
rootPackageJsonPath,
|
||||
}: {
|
||||
cwd: string;
|
||||
rootPackageJsonPath: string;
|
||||
}): Promise<string | null> {
|
||||
if (process.env.ENABLE_EXPERIMENTAL_COREPACK !== '1') {
|
||||
// Since corepack is experimental, we need to exit early
|
||||
// unless the user explicitly enables it with the env var.
|
||||
return null;
|
||||
}
|
||||
const pkg = await readJSONFile<PackageJson>(
|
||||
join(rootPackageJsonPath, 'package.json')
|
||||
);
|
||||
if (pkg instanceof CantParseJSONFile) {
|
||||
console.warn(
|
||||
'Warning: Could not enable corepack because package.json is invalid JSON'
|
||||
);
|
||||
} else if (!pkg?.packageManager) {
|
||||
console.warn(
|
||||
'Warning: Could not enable corepack because package.json is missing "packageManager" property'
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`Detected ENABLE_EXPERIMENTAL_COREPACK=1 and "${pkg.packageManager}" in package.json`
|
||||
);
|
||||
const corepackRootDir = join(cwd, VERCEL_DIR, 'cache', 'corepack');
|
||||
const corepackHomeDir = join(corepackRootDir, 'home');
|
||||
const corepackShimDir = join(corepackRootDir, 'shim');
|
||||
await fs.mkdirp(corepackHomeDir);
|
||||
await fs.mkdirp(corepackShimDir);
|
||||
process.env.COREPACK_HOME = corepackHomeDir;
|
||||
process.env.PATH = `${corepackShimDir}${delimiter}${process.env.PATH}`;
|
||||
process.env.DEBUG = process.env.DEBUG
|
||||
? `corepack,${process.env.DEBUG}`
|
||||
: 'corepack';
|
||||
const pkgManagerName = pkg.packageManager.split('@')[0];
|
||||
// We must explicitly call `corepack enable npm` since `corepack enable`
|
||||
// doesn't work with npm. See https://github.com/nodejs/corepack/pull/24
|
||||
// Also, `corepack enable` is too broad and will change the verison of
|
||||
// yarn & pnpm even though those versions are not specified by the user.
|
||||
// See https://github.com/nodejs/corepack#known-good-releases
|
||||
// Finally, we use `--install-directory` so we can cache the result to
|
||||
// reuse for subsequent builds. See `@vercel/vc-build` for `prepareCache`.
|
||||
await spawnAsync(
|
||||
'corepack',
|
||||
['enable', pkgManagerName, '--install-directory', corepackShimDir],
|
||||
{
|
||||
prettyCommand: `corepack enable ${pkgManagerName}`,
|
||||
}
|
||||
);
|
||||
return corepackShimDir;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function cleanupCorepack(corepackShimDir: string) {
|
||||
if (process.env.COREPACK_HOME) {
|
||||
delete process.env.COREPACK_HOME;
|
||||
}
|
||||
if (process.env.PATH) {
|
||||
process.env.PATH = process.env.PATH.replace(
|
||||
`${corepackShimDir}${delimiter}`,
|
||||
''
|
||||
);
|
||||
}
|
||||
if (process.env.DEBUG) {
|
||||
if (process.env.DEBUG === 'corepack') {
|
||||
delete process.env.DEBUG;
|
||||
} else {
|
||||
process.env.DEBUG = process.env.DEBUG.replace('corepack,', '');
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -106,9 +106,10 @@ export async function resolveBuilders(
|
||||
// If `pkgPath` wasn't found in `.vercel/builders` then try as a CLI local
|
||||
// dependency. `require.resolve()` will throw if the Builder is not a CLI
|
||||
// dep, in which case we'll install it into `.vercel/builders`.
|
||||
pkgPath = require.resolve(`${name}/package.json`, {
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
pkgPath = eval('require').resolve(`${name}/package.json`, {
|
||||
paths: [__dirname],
|
||||
});
|
||||
}) as string;
|
||||
builderPkg = await readJSON(pkgPath);
|
||||
}
|
||||
|
||||
@@ -148,7 +149,9 @@ export async function resolveBuilders(
|
||||
// TODO: handle `parsed.type === 'tag'` ("latest" vs. anything else?)
|
||||
|
||||
const path = join(dirname(pkgPath), builderPkg.main || 'index.js');
|
||||
const builder = require(path);
|
||||
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
const builder = eval('require')(path);
|
||||
|
||||
builders.set(spec, {
|
||||
builder,
|
||||
|
||||
@@ -23,6 +23,7 @@ import { VERCEL_DIR } from '../projects/link';
|
||||
export const OUTPUT_DIR = join(VERCEL_DIR, 'output');
|
||||
|
||||
export async function writeBuildResult(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV2 | BuildResultV3,
|
||||
build: Builder,
|
||||
builder: BuilderV2 | BuilderV3,
|
||||
@@ -31,9 +32,13 @@ export async function writeBuildResult(
|
||||
) {
|
||||
const { version } = builder;
|
||||
if (version === 2) {
|
||||
return writeBuildResultV2(buildResult as BuildResultV2, cleanUrls);
|
||||
return writeBuildResultV2(
|
||||
outputDir,
|
||||
buildResult as BuildResultV2,
|
||||
cleanUrls
|
||||
);
|
||||
} else if (version === 3) {
|
||||
return writeBuildResultV3(buildResult as BuildResultV3, build);
|
||||
return writeBuildResultV3(outputDir, buildResult as BuildResultV3, build);
|
||||
}
|
||||
throw new Error(
|
||||
`Unsupported Builder version \`${version}\` from "${builderPkg.name}"`
|
||||
@@ -67,11 +72,12 @@ export interface PathOverride {
|
||||
* the filesystem.
|
||||
*/
|
||||
async function writeBuildResultV2(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV2,
|
||||
cleanUrls?: boolean
|
||||
) {
|
||||
if ('buildOutputPath' in buildResult) {
|
||||
await mergeBuilderOutput(buildResult);
|
||||
await mergeBuilderOutput(outputDir, buildResult);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -79,16 +85,16 @@ async function writeBuildResultV2(
|
||||
const overrides: Record<string, PathOverride> = {};
|
||||
for (const [path, output] of Object.entries(buildResult.output)) {
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(output, path, lambdas);
|
||||
await writeLambda(outputDir, output, path, lambdas);
|
||||
} else if (isPrerender(output)) {
|
||||
await writeLambda(output.lambda, path, lambdas);
|
||||
await writeLambda(outputDir, output.lambda, path, lambdas);
|
||||
|
||||
// Write the fallback file alongside the Lambda directory
|
||||
let fallback = output.fallback;
|
||||
if (fallback) {
|
||||
const ext = getFileExtension(fallback);
|
||||
const fallbackName = `${path}.prerender-fallback${ext}`;
|
||||
const fallbackPath = join(OUTPUT_DIR, 'functions', fallbackName);
|
||||
const fallbackPath = join(outputDir, 'functions', fallbackName);
|
||||
const stream = fallback.toStream();
|
||||
await pipe(
|
||||
stream,
|
||||
@@ -101,7 +107,7 @@ async function writeBuildResultV2(
|
||||
}
|
||||
|
||||
const prerenderConfigPath = join(
|
||||
OUTPUT_DIR,
|
||||
outputDir,
|
||||
'functions',
|
||||
`${path}.prerender-config.json`
|
||||
);
|
||||
@@ -112,9 +118,9 @@ async function writeBuildResultV2(
|
||||
};
|
||||
await fs.writeJSON(prerenderConfigPath, prerenderConfig, { spaces: 2 });
|
||||
} else if (isFile(output)) {
|
||||
await writeStaticFile(output, path, overrides, cleanUrls);
|
||||
await writeStaticFile(outputDir, output, path, overrides, cleanUrls);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(output, path);
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${(output as any).type}" for ${path}`
|
||||
@@ -128,15 +134,24 @@ async function writeBuildResultV2(
|
||||
* Writes the output from the `build()` return value of a v3 Builder to
|
||||
* the filesystem.
|
||||
*/
|
||||
async function writeBuildResultV3(buildResult: BuildResultV3, build: Builder) {
|
||||
async function writeBuildResultV3(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultV3,
|
||||
build: Builder
|
||||
) {
|
||||
const { output } = buildResult;
|
||||
if (isLambda(output)) {
|
||||
const src = build.src!;
|
||||
const src = build.src;
|
||||
if (typeof src !== 'string') {
|
||||
throw new Error(`Expected "build.src" to be a string`);
|
||||
}
|
||||
const ext = extname(src);
|
||||
const path = build.config?.zeroConfig
|
||||
? src.substring(0, src.length - ext.length)
|
||||
: src;
|
||||
await writeLambda(output, path);
|
||||
if (isLambda(output)) {
|
||||
await writeLambda(outputDir, output, path);
|
||||
} else if (isEdgeFunction(output)) {
|
||||
await writeEdgeFunction(outputDir, output, path);
|
||||
} else {
|
||||
throw new Error(
|
||||
`Unsupported output type: "${(output as any).type}" for ${build.src}`
|
||||
@@ -154,6 +169,7 @@ async function writeBuildResultV3(buildResult: BuildResultV3, build: Builder) {
|
||||
* @param overrides Record of override configuration when a File is renamed or has other metadata
|
||||
*/
|
||||
async function writeStaticFile(
|
||||
outputDir: string,
|
||||
file: File,
|
||||
path: string,
|
||||
overrides: Record<string, PathOverride>,
|
||||
@@ -191,7 +207,7 @@ async function writeStaticFile(
|
||||
overrides[fsPath] = override;
|
||||
}
|
||||
|
||||
const dest = join(OUTPUT_DIR, 'static', fsPath);
|
||||
const dest = join(outputDir, 'static', fsPath);
|
||||
await fs.mkdirp(dirname(dest));
|
||||
|
||||
// TODO: handle (or skip) symlinks?
|
||||
@@ -205,8 +221,12 @@ async function writeStaticFile(
|
||||
* @param edgeFunction The `EdgeFunction` instance
|
||||
* @param path The URL path where the `EdgeFunction` can be accessed from
|
||||
*/
|
||||
async function writeEdgeFunction(edgeFunction: EdgeFunction, path: string) {
|
||||
const dest = join(OUTPUT_DIR, 'functions', `${path}.func`);
|
||||
async function writeEdgeFunction(
|
||||
outputDir: string,
|
||||
edgeFunction: EdgeFunction,
|
||||
path: string
|
||||
) {
|
||||
const dest = join(outputDir, 'functions', `${path}.func`);
|
||||
|
||||
await fs.mkdirp(dest);
|
||||
const ops: Promise<any>[] = [];
|
||||
@@ -235,11 +255,12 @@ async function writeEdgeFunction(edgeFunction: EdgeFunction, path: string) {
|
||||
* @param lambdas (optional) Map of `Lambda` instances that have previously been written
|
||||
*/
|
||||
async function writeLambda(
|
||||
outputDir: string,
|
||||
lambda: Lambda,
|
||||
path: string,
|
||||
lambdas?: Map<Lambda, string>
|
||||
) {
|
||||
const dest = join(OUTPUT_DIR, 'functions', `${path}.func`);
|
||||
const dest = join(outputDir, 'functions', `${path}.func`);
|
||||
|
||||
// If the `lambda` has already been written to the filesystem at a different
|
||||
// location then create a symlink to the previous location instead of copying
|
||||
@@ -248,7 +269,7 @@ async function writeLambda(
|
||||
if (existingLambdaPath) {
|
||||
const destDir = dirname(dest);
|
||||
const targetDest = join(
|
||||
OUTPUT_DIR,
|
||||
outputDir,
|
||||
'functions',
|
||||
`${existingLambdaPath}.func`
|
||||
);
|
||||
@@ -312,14 +333,17 @@ async function writeLambda(
|
||||
* `.vercel/output` directory that was specified by the Builder into the
|
||||
* `vc build` output directory.
|
||||
*/
|
||||
async function mergeBuilderOutput(buildResult: BuildResultBuildOutput) {
|
||||
const absOutputDir = resolve(OUTPUT_DIR);
|
||||
async function mergeBuilderOutput(
|
||||
outputDir: string,
|
||||
buildResult: BuildResultBuildOutput
|
||||
) {
|
||||
const absOutputDir = resolve(outputDir);
|
||||
if (absOutputDir === buildResult.buildOutputPath) {
|
||||
// `.vercel/output` dir is already in the correct location,
|
||||
// so no need to do anything
|
||||
return;
|
||||
}
|
||||
await fs.copy(buildResult.buildOutputPath, OUTPUT_DIR);
|
||||
await fs.copy(buildResult.buildOutputPath, outputDir);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -142,6 +142,7 @@ export async function executeBuild(
|
||||
files,
|
||||
entrypoint,
|
||||
workPath,
|
||||
repoRootPath: workPath,
|
||||
config,
|
||||
meta: {
|
||||
isDev: true,
|
||||
|
||||
@@ -1735,6 +1735,7 @@ export default class DevServer {
|
||||
entrypoint: match.entrypoint,
|
||||
workPath,
|
||||
config: match.config || {},
|
||||
repoRootPath: this.cwd,
|
||||
meta: {
|
||||
isDev: true,
|
||||
requestPath,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { resolve } from 'path';
|
||||
import _glob, { IOptions as GlobOptions } from 'glob';
|
||||
import fs from 'fs-extra';
|
||||
import { getVercelIgnore } from '@vercel/client';
|
||||
import uniqueStrings from './unique-strings';
|
||||
@@ -21,14 +20,6 @@ function flatten(
|
||||
return res;
|
||||
}
|
||||
|
||||
async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
_glob(pattern, options, (err, files) => {
|
||||
err ? reject(err) : resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Transform relative paths into absolutes,
|
||||
* and maintains absolutes as such.
|
||||
@@ -65,15 +56,16 @@ interface StaticFilesOptions {
|
||||
export async function staticFiles(
|
||||
path: string,
|
||||
{ output, src }: StaticFilesOptions
|
||||
) {
|
||||
): Promise<string[]> {
|
||||
const { debug, time } = output;
|
||||
let files: string[] = [];
|
||||
|
||||
// The package.json `files` whitelist still
|
||||
// honors ignores: https://docs.npmjs.com/files/package.json#files
|
||||
const source = src || '.';
|
||||
// Convert all filenames into absolute paths
|
||||
const search = await glob(source, { cwd: path, absolute: true, dot: true });
|
||||
|
||||
// Ensure that `path` is an absolute path
|
||||
const search = resolve(path, source);
|
||||
|
||||
// Compile list of ignored patterns and files
|
||||
const { ig } = await getVercelIgnore(path);
|
||||
@@ -104,7 +96,7 @@ export async function staticFiles(
|
||||
// Locate files
|
||||
files = await time(
|
||||
`Locating files ${path}`,
|
||||
explode(search, {
|
||||
explode([search], {
|
||||
accepts,
|
||||
output,
|
||||
})
|
||||
@@ -164,7 +156,7 @@ async function explode(
|
||||
const all = await fs.readdir(file);
|
||||
/* eslint-disable no-use-before-define */
|
||||
const recursive = many(all.map(subdir => asAbsolute(subdir, file)));
|
||||
return (recursive as any) as Promise<string | null>;
|
||||
return recursive as any as Promise<string | null>;
|
||||
/* eslint-enable no-use-before-define */
|
||||
}
|
||||
if (!s.isFile()) {
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
* @param {Array} arr Input array that potentially has duplicates
|
||||
* @returns {Array} An array of the unique values in `arr`
|
||||
*/
|
||||
export default (arr: string[]) => {
|
||||
export default (arr: string[]): string[] => {
|
||||
const len = arr.length;
|
||||
const res: string[] = [];
|
||||
const o: { [key: string]: string | number } = {};
|
||||
|
||||
2
packages/cli/test/fixtures/unit/commands/build/.gitignore
vendored
Normal file
2
packages/cli/test/fixtures/unit/commands/build/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
!/*/.vercel
|
||||
/*/.vercel/output
|
||||
1
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/.gitignore
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
!node_modules
|
||||
15
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/builder.js
generated
vendored
Normal file
15
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/builder.js
generated
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
const { EdgeFunction } = require('@vercel/build-utils');
|
||||
|
||||
exports.version = 3;
|
||||
|
||||
exports.build = async ({ entrypoint, files }) => {
|
||||
const output = new EdgeFunction({
|
||||
name: entrypoint,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint,
|
||||
files: {
|
||||
[entrypoint]: files[entrypoint]
|
||||
},
|
||||
});
|
||||
return { output };
|
||||
};
|
||||
6
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/package.json
generated
vendored
Normal file
6
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/builders/node_modules/edge-function/package.json
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "edge-function",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"main": "builder.js"
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/edge-function/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
5
packages/cli/test/fixtures/unit/commands/build/edge-function/api/edge.js
vendored
Normal file
5
packages/cli/test/fixtures/unit/commands/build/edge-function/api/edge.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default req => new Response('from edge');
|
||||
7
packages/cli/test/fixtures/unit/commands/build/edge-function/vercel.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/edge-function/vercel.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/*.js": {
|
||||
"runtime": "edge-function@0.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/node/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/node/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node/api/es6.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node/api/es6.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node/api/index.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node/api/index.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = (req, res) => res.end('Vercel');
|
||||
1
packages/cli/test/fixtures/unit/commands/build/node/api/mjs.mjs
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/node/api/mjs.mjs
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default (req, res) => res.end('Vercel');
|
||||
3
packages/cli/test/fixtures/unit/commands/build/node/api/typescript.ts
vendored
Normal file
3
packages/cli/test/fixtures/unit/commands/build/node/api/typescript.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
|
||||
export default (req: IncomingMessage, res: ServerResponse) => res.end('Vercel');
|
||||
7
packages/cli/test/fixtures/unit/commands/build/static/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/static/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/static/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/static/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Vercel</h1>
|
||||
@@ -0,0 +1 @@
|
||||
!node_modules
|
||||
14
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/index.js
generated
vendored
Normal file
14
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/index.js
generated
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
const { Lambda } = require('@vercel/build-utils');
|
||||
|
||||
exports.version = 3;
|
||||
|
||||
exports.build = async ({ entrypoint, files }) => {
|
||||
const output = new Lambda({
|
||||
files: {
|
||||
[entrypoint]: files[entrypoint]
|
||||
},
|
||||
runtime: 'provided',
|
||||
handler: entrypoint
|
||||
});
|
||||
return { output };
|
||||
};
|
||||
6
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/package.json
generated
vendored
Normal file
6
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/builders/node_modules/txt-builder/package.json
generated
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "txt-builder",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"main": "index.js"
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/third-party-builder/api/foo.txt
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/third-party-builder/api/foo.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Text file
|
||||
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/vercel.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/third-party-builder/vercel.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/*.txt": {
|
||||
"runtime": "txt-builder@0.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,5 +6,10 @@ const b = require('./b');
|
||||
a();
|
||||
b();
|
||||
|
||||
exec('mkdir public', __dirname);
|
||||
exec('echo "Hello, World!" > public/index.html', __dirname);
|
||||
exec('mkdir public', __dirname)
|
||||
.then(() => {
|
||||
exec('echo "Hello, World!" > public/index.html', __dirname).then(() => {
|
||||
console.log('Success');
|
||||
});
|
||||
})
|
||||
.catch(console.error);
|
||||
|
||||
@@ -19,7 +19,7 @@ const getRevertAliasConfigFile = () => {
|
||||
],
|
||||
});
|
||||
};
|
||||
module.exports = async function prepare(session, binaryPath) {
|
||||
module.exports = async function prepare(session, binaryPath, tmpFixturesDir) {
|
||||
const spec = {
|
||||
'static-single-file': {
|
||||
'first.png': getImageFile(session, { size: 30 }),
|
||||
@@ -426,16 +426,72 @@ module.exports = async function prepare(session, binaryPath) {
|
||||
projectId: 'QmRoBYhejkkmssotLZr8tWgewPdPcjYucYUNERFbhJrRNi',
|
||||
}),
|
||||
},
|
||||
'vc-build-static-build': {
|
||||
'.vercel/project.json': JSON.stringify({
|
||||
orgId: '.',
|
||||
projectId: '.',
|
||||
settings: {
|
||||
framework: null,
|
||||
},
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
scripts: {
|
||||
build: 'mkdir -p public && echo hi > public/index.txt',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'vc-build-corepack-npm': {
|
||||
'.vercel/project.json': JSON.stringify({
|
||||
orgId: '.',
|
||||
projectId: '.',
|
||||
settings: {
|
||||
framework: null,
|
||||
},
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
private: true,
|
||||
packageManager: 'npm@8.1.0',
|
||||
scripts: {
|
||||
build: 'mkdir -p public && npm --version > public/index.txt',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'vc-build-corepack-pnpm': {
|
||||
'.vercel/project.json': JSON.stringify({
|
||||
orgId: '.',
|
||||
projectId: '.',
|
||||
settings: {
|
||||
framework: null,
|
||||
},
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
private: true,
|
||||
packageManager: 'pnpm@7.1.0',
|
||||
scripts: {
|
||||
build: 'mkdir -p public && pnpm --version > public/index.txt',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'vc-build-corepack-yarn': {
|
||||
'.vercel/project.json': JSON.stringify({
|
||||
orgId: '.',
|
||||
projectId: '.',
|
||||
settings: {
|
||||
framework: null,
|
||||
},
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
private: true,
|
||||
packageManager: 'yarn@2.4.3',
|
||||
scripts: {
|
||||
build: 'mkdir -p public && yarn --version > public/index.txt',
|
||||
},
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
for (const [typeName, needed] of Object.entries(spec)) {
|
||||
const directory = join(
|
||||
__dirname,
|
||||
'..',
|
||||
'fixtures',
|
||||
'integration',
|
||||
typeName
|
||||
);
|
||||
const directory = join(tmpFixturesDir, typeName);
|
||||
|
||||
await mkdirp(directory);
|
||||
|
||||
|
||||
147
packages/cli/test/integration.js
vendored
147
packages/cli/test/integration.js
vendored
@@ -4,7 +4,7 @@ import { URL, parse as parseUrl } from 'url';
|
||||
import test from 'ava';
|
||||
import semVer from 'semver';
|
||||
import { Readable } from 'stream';
|
||||
import { homedir } from 'os';
|
||||
import { homedir, tmpdir } from 'os';
|
||||
import _execa from 'execa';
|
||||
import XDGAppPaths from 'xdg-app-paths';
|
||||
import fetch from 'node-fetch';
|
||||
@@ -31,7 +31,7 @@ function execa(file, args, options) {
|
||||
}
|
||||
|
||||
function fixture(name) {
|
||||
const directory = path.join(__dirname, 'fixtures', 'integration', name);
|
||||
const directory = path.join(tmpFixturesDir, name);
|
||||
const config = path.join(directory, 'project.json');
|
||||
|
||||
// We need to remove it, otherwise we can't re-use fixtures
|
||||
@@ -146,6 +146,7 @@ let email;
|
||||
let contextName;
|
||||
|
||||
let tmpDir;
|
||||
let tmpFixturesDir = path.join(tmpdir(), 'tmp-fixtures');
|
||||
|
||||
let globalDir = XDGAppPaths('com.vercel.cli').dataDirs()[0];
|
||||
|
||||
@@ -327,7 +328,7 @@ async function setupProject(process, projectName, overrides) {
|
||||
test.before(async () => {
|
||||
try {
|
||||
await createUser();
|
||||
await prepareFixtures(contextName, binaryPath);
|
||||
await prepareFixtures(contextName, binaryPath, tmpFixturesDir);
|
||||
} catch (err) {
|
||||
console.log('Failed `test.before`');
|
||||
console.log(err);
|
||||
@@ -335,6 +336,8 @@ test.before(async () => {
|
||||
});
|
||||
|
||||
test.after.always(async () => {
|
||||
delete process.env.ENABLE_EXPERIMENTAL_COREPACK;
|
||||
|
||||
if (loginApiServer) {
|
||||
// Stop mock server
|
||||
loginApiServer.close();
|
||||
@@ -349,6 +352,11 @@ test.after.always(async () => {
|
||||
// Remove config directory entirely
|
||||
tmpDir.removeCallback();
|
||||
}
|
||||
|
||||
if (tmpFixturesDir) {
|
||||
console.log('removing tmpFixturesDir', tmpFixturesDir);
|
||||
fs.removeSync(tmpFixturesDir);
|
||||
}
|
||||
});
|
||||
|
||||
test('default command should prompt login with empty auth.json', async t => {
|
||||
@@ -390,6 +398,99 @@ test('login', async t => {
|
||||
t.is(auth.token, token);
|
||||
});
|
||||
|
||||
test('[vc build] should build project with corepack and select npm@8.1.0', async t => {
|
||||
process.env.ENABLE_EXPERIMENTAL_COREPACK = '1';
|
||||
const directory = fixture('vc-build-corepack-npm');
|
||||
const before = await _execa('npm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
const output = await execute(['build'], { cwd: directory });
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
t.regex(output.stderr, /Build Completed/gm);
|
||||
const after = await _execa('npm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
// Ensure global npm didn't change
|
||||
t.is(before.stdout, after.stdout);
|
||||
// Ensure version is correct
|
||||
t.is(
|
||||
await fs.readFile(
|
||||
path.join(directory, '.vercel/output/static/index.txt'),
|
||||
'utf8'
|
||||
),
|
||||
'8.1.0\n'
|
||||
);
|
||||
// Ensure corepack will be cached
|
||||
const contents = fs.readdirSync(
|
||||
path.join(directory, '.vercel/cache/corepack')
|
||||
);
|
||||
t.deepEqual(contents, ['home', 'shim']);
|
||||
});
|
||||
|
||||
test('[vc build] should build project with corepack and select pnpm@7.1.0', async t => {
|
||||
process.env.ENABLE_EXPERIMENTAL_COREPACK = '1';
|
||||
const directory = fixture('vc-build-corepack-pnpm');
|
||||
const before = await _execa('pnpm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
const output = await execute(['build'], { cwd: directory });
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
t.regex(output.stderr, /Build Completed/gm);
|
||||
const after = await _execa('pnpm', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
// Ensure global pnpm didn't change
|
||||
t.is(before.stdout, after.stdout);
|
||||
// Ensure version is correct
|
||||
t.is(
|
||||
await fs.readFile(
|
||||
path.join(directory, '.vercel/output/static/index.txt'),
|
||||
'utf8'
|
||||
),
|
||||
'7.1.0\n'
|
||||
);
|
||||
// Ensure corepack will be cached
|
||||
const contents = fs.readdirSync(
|
||||
path.join(directory, '.vercel/cache/corepack')
|
||||
);
|
||||
t.deepEqual(contents, ['home', 'shim']);
|
||||
});
|
||||
|
||||
test('[vc build] should build project with corepack and select yarn@2.4.3', async t => {
|
||||
process.env.ENABLE_EXPERIMENTAL_COREPACK = '1';
|
||||
const directory = fixture('vc-build-corepack-yarn');
|
||||
const before = await _execa('yarn', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
const output = await execute(['build'], { cwd: directory });
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
t.regex(output.stderr, /Build Completed/gm);
|
||||
const after = await _execa('yarn', ['--version'], {
|
||||
cwd: directory,
|
||||
reject: false,
|
||||
});
|
||||
// Ensure global yarn didn't change
|
||||
t.is(before.stdout, after.stdout);
|
||||
// Ensure version is correct
|
||||
t.is(
|
||||
await fs.readFile(
|
||||
path.join(directory, '.vercel/output/static/index.txt'),
|
||||
'utf8'
|
||||
),
|
||||
'2.4.3\n'
|
||||
);
|
||||
// Ensure corepack will be cached
|
||||
const contents = fs.readdirSync(
|
||||
path.join(directory, '.vercel/cache/corepack')
|
||||
);
|
||||
t.deepEqual(contents, ['home', 'shim']);
|
||||
});
|
||||
|
||||
test('default command should deploy directory', async t => {
|
||||
const projectDir = fixture('deploy-default-with-sub-directory');
|
||||
const target = 'output';
|
||||
@@ -1507,7 +1608,7 @@ test('try to purchase a domain', async t => {
|
||||
|
||||
const { stderr, stdout, exitCode } = await execa(
|
||||
binaryPath,
|
||||
['domains', 'buy', `${session}-test.org`, ...defaultArgs],
|
||||
['domains', 'buy', `${session}-test.com`, ...defaultArgs],
|
||||
{
|
||||
reject: false,
|
||||
input: stream,
|
||||
@@ -1522,10 +1623,9 @@ test('try to purchase a domain', async t => {
|
||||
console.log(exitCode);
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes(
|
||||
`Error! Could not purchase domain. Please add a payment method using \`vercel billing add\`.`
|
||||
)
|
||||
t.regex(
|
||||
stderr,
|
||||
/Error! Could not purchase domain\. Please add a payment method using/
|
||||
);
|
||||
});
|
||||
|
||||
@@ -1537,7 +1637,7 @@ test('try to transfer-in a domain with "--code" option', async t => {
|
||||
'transfer-in',
|
||||
'--code',
|
||||
'xyz',
|
||||
`${session}-test.org`,
|
||||
`${session}-test.com`,
|
||||
...defaultArgs,
|
||||
],
|
||||
{
|
||||
@@ -1551,7 +1651,7 @@ test('try to transfer-in a domain with "--code" option', async t => {
|
||||
|
||||
t.true(
|
||||
stderr.includes(
|
||||
`Error! The domain "${session}-test.org" is not transferable.`
|
||||
`Error! The domain "${session}-test.com" is not transferable.`
|
||||
)
|
||||
);
|
||||
t.is(exitCode, 1);
|
||||
@@ -3782,3 +3882,30 @@ test('[vc link] should support the `--project` flag', async t => {
|
||||
formatOutput(output)
|
||||
);
|
||||
});
|
||||
|
||||
test('[vc build] should build project with `@vercel/static-build`', async t => {
|
||||
const directory = fixture('vc-build-static-build');
|
||||
const output = await execute(['build'], { cwd: directory });
|
||||
t.is(output.exitCode, 0);
|
||||
t.true(output.stderr.includes('Build Completed in .vercel/output'));
|
||||
|
||||
t.is(
|
||||
await fs.readFile(
|
||||
path.join(directory, '.vercel/output/static/index.txt'),
|
||||
'utf8'
|
||||
),
|
||||
'hi\n'
|
||||
);
|
||||
|
||||
const config = await fs.readJSON(
|
||||
path.join(directory, '.vercel/output/config.json')
|
||||
);
|
||||
t.is(config.version, 3);
|
||||
|
||||
const builds = await fs.readJSON(
|
||||
path.join(directory, '.vercel/output/builds.json')
|
||||
);
|
||||
t.is(builds.target, 'preview');
|
||||
t.is(builds.builds[0].src, 'package.json');
|
||||
t.is(builds.builds[0].use, '@vercel/static-build');
|
||||
});
|
||||
|
||||
240
packages/cli/test/unit/commands/build.test.ts
Normal file
240
packages/cli/test/unit/commands/build.test.ts
Normal file
@@ -0,0 +1,240 @@
|
||||
import ms from 'ms';
|
||||
import fs from 'fs-extra';
|
||||
import { join } from 'path';
|
||||
import { client } from '../../mocks/client';
|
||||
import build from '../../../src/commands/build';
|
||||
|
||||
jest.setTimeout(ms('1 minute'));
|
||||
|
||||
const fixture = (name: string) =>
|
||||
join(__dirname, '../../fixtures/unit/commands/build', name);
|
||||
|
||||
describe('build', () => {
|
||||
const originalCwd = process.cwd();
|
||||
|
||||
it('should build with `@vercel/static`', async () => {
|
||||
const cwd = fixture('static');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/static" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
src: '**',
|
||||
use: '@vercel/static',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory contains static files
|
||||
const files = await fs.readdir(join(output, 'static'));
|
||||
expect(files.sort()).toEqual(['index.html']);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should build with `@vercel/node`', async () => {
|
||||
const cwd = fixture('node');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/node" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/es6.js',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/index.js',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/mjs.mjs',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
{
|
||||
require: '@vercel/node',
|
||||
apiVersion: 3,
|
||||
use: '@vercel/node',
|
||||
src: 'api/typescript.ts',
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory is empty
|
||||
const hasStaticFiles = await fs.pathExists(join(output, 'static'));
|
||||
expect(
|
||||
hasStaticFiles,
|
||||
'Expected ".vercel/output/static" to not exist'
|
||||
).toEqual(false);
|
||||
|
||||
// "functions/api" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions/api'));
|
||||
expect(functions.sort()).toEqual([
|
||||
'es6.func',
|
||||
'index.func',
|
||||
'mjs.func',
|
||||
'typescript.func',
|
||||
]);
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should build with 3rd party Builder', async () => {
|
||||
const cwd = fixture('third-party-builder');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/node" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'preview',
|
||||
builds: [
|
||||
{
|
||||
require: 'txt-builder',
|
||||
apiVersion: 3,
|
||||
use: 'txt-builder@0.0.0',
|
||||
src: 'api/foo.txt',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
functions: {
|
||||
'api/*.txt': {
|
||||
runtime: 'txt-builder@0.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
use: '@vercel/static',
|
||||
src: '!{api/**,package.json}',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory is empty
|
||||
const hasStaticFiles = await fs.pathExists(join(output, 'static'));
|
||||
expect(
|
||||
hasStaticFiles,
|
||||
'Expected ".vercel/output/static" to not exist'
|
||||
).toEqual(false);
|
||||
|
||||
// "functions/api" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions/api'));
|
||||
expect(functions.sort()).toEqual(['foo.func']);
|
||||
|
||||
const vcConfig = await fs.readJSON(
|
||||
join(output, 'functions/api/foo.func/.vc-config.json')
|
||||
);
|
||||
expect(vcConfig).toMatchObject({
|
||||
handler: 'api/foo.txt',
|
||||
runtime: 'provided',
|
||||
environment: {},
|
||||
});
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
|
||||
it('should serialize `EdgeFunction` output in version 3 Builder', async () => {
|
||||
const cwd = fixture('edge-function');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
try {
|
||||
process.chdir(cwd);
|
||||
client.setArgv('build', '--prod');
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// `builds.json` says that "@vercel/node" was run
|
||||
const builds = await fs.readJSON(join(output, 'builds.json'));
|
||||
expect(builds).toMatchObject({
|
||||
target: 'production',
|
||||
builds: [
|
||||
{
|
||||
require: 'edge-function',
|
||||
apiVersion: 3,
|
||||
use: 'edge-function@0.0.0',
|
||||
src: 'api/edge.js',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
functions: {
|
||||
'api/*.js': {
|
||||
runtime: 'edge-function@0.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
require: '@vercel/static',
|
||||
apiVersion: 2,
|
||||
use: '@vercel/static',
|
||||
src: '!{api/**,package.json}',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
// "static" directory is empty
|
||||
const hasStaticFiles = await fs.pathExists(join(output, 'static'));
|
||||
expect(
|
||||
hasStaticFiles,
|
||||
'Expected ".vercel/output/static" to not exist'
|
||||
).toEqual(false);
|
||||
|
||||
// "functions/api" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions/api'));
|
||||
expect(functions.sort()).toEqual(['edge.func']);
|
||||
|
||||
const vcConfig = await fs.readJSON(
|
||||
join(output, 'functions/api/edge.func/.vc-config.json')
|
||||
);
|
||||
expect(vcConfig).toMatchObject({
|
||||
runtime: 'edge',
|
||||
name: 'api/edge.js',
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: 'api/edge.js',
|
||||
});
|
||||
} finally {
|
||||
process.chdir(originalCwd);
|
||||
delete process.env.__VERCEL_BUILD_RUNNING;
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -10,7 +10,7 @@ import {
|
||||
import vercelNextPkg from '@vercel/next/package.json';
|
||||
import vercelNodePkg from '@vercel/node/package.json';
|
||||
|
||||
jest.setTimeout(ms('20 seconds'));
|
||||
jest.setTimeout(ms('30 seconds'));
|
||||
|
||||
describe('importBuilders()', () => {
|
||||
it('should import built-in Builders', async () => {
|
||||
|
||||
@@ -18,8 +18,15 @@ const getStaticFiles = async (dir: string) => {
|
||||
|
||||
const normalizeWindowsPaths = (files: string[]) => {
|
||||
if (process.platform === 'win32') {
|
||||
const prefix = 'D:/a/vercel/vercel/packages/cli/test/fixtures/unit/';
|
||||
return files.map(f => f.replace(/\\/g, '/').slice(prefix.length));
|
||||
// GitHub Actions absolute path "f" that looks like:
|
||||
// "D:/a/vercel/vercel/packages/cli/test/fixtures/unit/"
|
||||
// but other OS's are relative path so we normalize here.
|
||||
const prefix = 'packages/cli/test/fixtures/unit/';
|
||||
return files.map(f => {
|
||||
const normal = f.replace(/\\/g, '/');
|
||||
const i = normal.indexOf(prefix);
|
||||
return normal.slice(i + prefix.length);
|
||||
});
|
||||
}
|
||||
return files;
|
||||
};
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"moduleResolution": "node",
|
||||
"module": "commonjs",
|
||||
"target": "es2019",
|
||||
"target": "ES2020",
|
||||
"esModuleInterop": true,
|
||||
"allowJs": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"resolveJsonModule": true,
|
||||
"sourceMap": true,
|
||||
"outDir": "./dist",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "11.0.4-canary.2",
|
||||
"version": "12.0.1",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -20,7 +20,7 @@
|
||||
"test-unit": "yarn test tests/unit.*test.*"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
"node": ">= 14"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/async-retry": "1.4.1",
|
||||
@@ -42,7 +42,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "3.1.1-canary.2",
|
||||
"@vercel/build-utils": "4.1.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"outDir": "dist",
|
||||
@@ -12,7 +12,7 @@
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"strict": true,
|
||||
"target": "es2019"
|
||||
"target": "ES2020"
|
||||
},
|
||||
"include": ["./src"]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "0.9.2-canary.0",
|
||||
"version": "1.0.1",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "1.13.3",
|
||||
"@vercel/routing-utils": "1.13.4",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
@@ -13,7 +13,7 @@
|
||||
"outDir": "./dist",
|
||||
"types": ["node", "jest"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
"target": "ES2020"
|
||||
},
|
||||
"include": ["src/*.ts"],
|
||||
"exclude": ["node_modules"]
|
||||
|
||||
@@ -2,7 +2,7 @@ import tar from 'tar';
|
||||
import execa from 'execa';
|
||||
import fetch from 'node-fetch';
|
||||
import { mkdirp, pathExists, readFile } from 'fs-extra';
|
||||
import { join } from 'path';
|
||||
import { join, delimiter } from 'path';
|
||||
import stringArgv from 'string-argv';
|
||||
import { debug } from '@vercel/build-utils';
|
||||
const versionMap = new Map([
|
||||
@@ -121,7 +121,7 @@ export async function createGo(
|
||||
) {
|
||||
const binPath = join(getGoDir(workPath), 'bin');
|
||||
debug(`Adding ${binPath} to PATH`);
|
||||
const path = `${binPath}:${process.env.PATH}`;
|
||||
const path = `${binPath}${delimiter}${process.env.PATH}`;
|
||||
const env: { [key: string]: string } = {
|
||||
...process.env,
|
||||
PATH: path,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "1.4.4-canary.2",
|
||||
"version": "2.0.1",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -25,7 +25,7 @@
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "3.1.1-canary.2",
|
||||
"@vercel/build-utils": "4.1.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"lib": ["ES2020"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
@@ -13,6 +13,6 @@
|
||||
"noImplicitThis": false,
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "es2018"
|
||||
"target": "ES2020"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "2.8.67-canary.2",
|
||||
"version": "3.0.1",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -45,9 +45,9 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "3.1.1-canary.2",
|
||||
"@vercel/build-utils": "4.1.0",
|
||||
"@vercel/nft": "0.19.1",
|
||||
"@vercel/routing-utils": "1.13.3",
|
||||
"@vercel/routing-utils": "1.13.4",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
"cheerio": "1.0.0-rc.10",
|
||||
|
||||
@@ -2,23 +2,6 @@ const KIB = 1024;
|
||||
const MIB = 1024 * KIB;
|
||||
|
||||
/**
|
||||
* The limit after compression. it has to be kibibyte instead of kilobyte
|
||||
* See https://github.com/cloudflare/wrangler/blob/8907b12add3d70ee21ac597b69cd66f6807571f4/src/wranglerjs/output.rs#L44
|
||||
* The maximum size of a *compressed* edge function.
|
||||
*/
|
||||
const EDGE_FUNCTION_SCRIPT_SIZE_LIMIT = MIB;
|
||||
|
||||
/**
|
||||
* This safety buffer must cover the size of our whole runtime layer compressed
|
||||
* plus some extra space to allow it to grow in the future. At the time of
|
||||
* writing this comment the compressed size size is ~7KiB so 20KiB should
|
||||
* be more than enough.
|
||||
*/
|
||||
const EDGE_FUNCTION_SCRIPT_SIZE_BUFFER = 20 * KIB;
|
||||
|
||||
/**
|
||||
* The max size we allow for compressed user code is the compressed script
|
||||
* limit minus the compressed safety buffer. We must check this limit after
|
||||
* compressing the user code.
|
||||
*/
|
||||
export const EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT =
|
||||
EDGE_FUNCTION_SCRIPT_SIZE_LIMIT - EDGE_FUNCTION_SCRIPT_SIZE_BUFFER;
|
||||
export const EDGE_FUNCTION_SIZE_LIMIT = MIB;
|
||||
|
||||
@@ -3,7 +3,7 @@ import { readFile } from 'fs-extra';
|
||||
import { ConcatSource, Source } from 'webpack-sources';
|
||||
import { fileToSource, raw, sourcemapped } from '../sourcemapped';
|
||||
import { join } from 'path';
|
||||
import { EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT } from './constants';
|
||||
import { EDGE_FUNCTION_SIZE_LIMIT } from './constants';
|
||||
import zlib from 'zlib';
|
||||
import { promisify } from 'util';
|
||||
import bytes from 'pretty-bytes';
|
||||
@@ -74,11 +74,11 @@ function getWasmImportStatements(wasm: { name: string }[] = []) {
|
||||
|
||||
async function validateScript(content: string) {
|
||||
const gzipped = await gzip(content);
|
||||
if (gzipped.length > EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT) {
|
||||
if (gzipped.length > EDGE_FUNCTION_SIZE_LIMIT) {
|
||||
throw new Error(
|
||||
`Exceeds maximum edge function script size: ${bytes(
|
||||
gzipped.length
|
||||
)} / ${bytes(EDGE_FUNCTION_USER_SCRIPT_SIZE_LIMIT)}`
|
||||
)} / ${bytes(EDGE_FUNCTION_SIZE_LIMIT)}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -56,6 +56,7 @@ import {
|
||||
getExportStatus,
|
||||
getFilesMapFromReasons,
|
||||
getImagesManifest,
|
||||
getMiddlewareManifest,
|
||||
getNextConfig,
|
||||
getPageLambdaGroups,
|
||||
getPrerenderManifest,
|
||||
@@ -77,6 +78,7 @@ import {
|
||||
updateRouteSrc,
|
||||
validateEntrypoint,
|
||||
} from './utils';
|
||||
import assert from 'assert';
|
||||
|
||||
export const version = 2;
|
||||
export const htmlContentType = 'text/html; charset=utf-8';
|
||||
@@ -130,10 +132,11 @@ function getRealNextVersion(entryPath: string): string | false {
|
||||
// First try to resolve the `next` dependency and get the real version from its
|
||||
// package.json. This allows the builder to be used with frameworks like Blitz that
|
||||
// bundle Next but where Next isn't in the project root's package.json
|
||||
const nextVersion: string = require(resolveFrom(
|
||||
entryPath,
|
||||
'next/package.json'
|
||||
)).version;
|
||||
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
const nextVersion: string = eval('require')(
|
||||
resolveFrom(entryPath, 'next/package.json')
|
||||
).version;
|
||||
console.log(`Detected Next.js version: ${nextVersion}`);
|
||||
return nextVersion;
|
||||
} catch (_ignored) {
|
||||
@@ -320,7 +323,7 @@ export const build: BuildV2 = async ({
|
||||
(nodeVersion?.major || 0) < 16
|
||||
) {
|
||||
// Ensure that npm 7 is at the beginning of the `$PATH`
|
||||
env.PATH = `/node16/bin-npm7:${env.PATH}`;
|
||||
env.PATH = `/node16/bin-npm7${path.delimiter}${env.PATH}`;
|
||||
console.log('Detected `package-lock.json` generated by npm 7...');
|
||||
}
|
||||
}
|
||||
@@ -1000,7 +1003,11 @@ export const build: BuildV2 = async ({
|
||||
buildId,
|
||||
'pages'
|
||||
);
|
||||
const pages = await glob('**/!(_middleware).js', pagesDir);
|
||||
const pages = await getServerlessPages({
|
||||
pagesDir,
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
});
|
||||
const launcherPath = path.join(__dirname, 'legacy-launcher.js');
|
||||
const launcherData = await readFile(launcherPath, 'utf8');
|
||||
|
||||
@@ -1073,7 +1080,11 @@ export const build: BuildV2 = async ({
|
||||
'pages'
|
||||
);
|
||||
|
||||
const pages = await glob('**/!(_middleware).js', pagesDir);
|
||||
const pages = await getServerlessPages({
|
||||
pagesDir,
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
});
|
||||
const isApiPage = (page: string) =>
|
||||
page
|
||||
.replace(/\\/g, '/')
|
||||
@@ -2574,3 +2585,32 @@ export const prepareCache: PrepareCache = async ({
|
||||
debug('Cache file manifest produced');
|
||||
return cache;
|
||||
};
|
||||
|
||||
async function getServerlessPages(params: {
|
||||
pagesDir: string;
|
||||
entryPath: string;
|
||||
outputDirectory: string;
|
||||
}) {
|
||||
const [pages, middlewareManifest] = await Promise.all([
|
||||
glob('**/!(_middleware).js', params.pagesDir),
|
||||
getMiddlewareManifest(params.entryPath, params.outputDirectory),
|
||||
]);
|
||||
|
||||
// Edge Functions do not consider as Serverless Functions
|
||||
for (const edgeFunctionFile of Object.keys(
|
||||
middlewareManifest?.functions ?? {}
|
||||
)) {
|
||||
// `getStaticProps` are expecting `Prerender` output which is a Serverless function
|
||||
// and not an Edge Function. Therefore we only remove API endpoints for now, as they
|
||||
// don't have `getStaticProps`.
|
||||
//
|
||||
// Context: https://github.com/vercel/vercel/pull/7905#discussion_r890213165
|
||||
assert(
|
||||
edgeFunctionFile.startsWith('/api/'),
|
||||
`Only API endpoints are currently supported for Edge endpoints.`
|
||||
);
|
||||
delete pages[edgeFunctionFile.slice(1) + '.js'];
|
||||
}
|
||||
|
||||
return pages;
|
||||
}
|
||||
|
||||
@@ -54,6 +54,7 @@ import prettyBytes from 'pretty-bytes';
|
||||
|
||||
// related PR: https://github.com/vercel/next.js/pull/30046
|
||||
const CORRECT_NOT_FOUND_ROUTES_VERSION = 'v12.0.1';
|
||||
const CORRECT_MIDDLEWARE_ORDER_VERSION = 'v12.1.7-canary.29';
|
||||
|
||||
export async function serverBuild({
|
||||
dynamicPages,
|
||||
@@ -131,6 +132,10 @@ export async function serverBuild({
|
||||
nextVersion,
|
||||
CORRECT_NOT_FOUND_ROUTES_VERSION
|
||||
);
|
||||
const isCorrectMiddlewareOrder = semver.gte(
|
||||
nextVersion,
|
||||
CORRECT_MIDDLEWARE_ORDER_VERSION
|
||||
);
|
||||
let hasStatic500 = !!staticPages[path.join(entryDirectory, '500')];
|
||||
|
||||
if (lambdaPageKeys.length === 0) {
|
||||
@@ -788,6 +793,7 @@ export async function serverBuild({
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
routesManifest,
|
||||
isCorrectMiddlewareOrder,
|
||||
});
|
||||
|
||||
const dynamicRoutes = await getDynamicRoutes(
|
||||
@@ -1025,6 +1031,10 @@ export async function serverBuild({
|
||||
|
||||
...redirects,
|
||||
|
||||
// middleware comes directly after redirects but before
|
||||
// beforeFiles rewrites as middleware is not a "file" route
|
||||
...(isCorrectMiddlewareOrder ? middleware.staticRoutes : []),
|
||||
|
||||
...beforeFilesRewrites,
|
||||
|
||||
// Make sure to 404 for the /404 path itself
|
||||
@@ -1067,7 +1077,10 @@ export async function serverBuild({
|
||||
},
|
||||
]),
|
||||
|
||||
...middleware.staticRoutes,
|
||||
// while middleware was in beta the order came right before
|
||||
// handle: 'filesystem' we maintain this for older versions
|
||||
// to prevent a local/deploy mismatch
|
||||
...(!isCorrectMiddlewareOrder ? middleware.staticRoutes : []),
|
||||
|
||||
// Next.js page lambdas, `static/` folder, reserved assets, and `public/`
|
||||
// folder
|
||||
|
||||
@@ -244,9 +244,7 @@ export async function getRoutesManifest(
|
||||
});
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const routesManifest: RoutesManifest = require(pathRoutesManifest);
|
||||
|
||||
const routesManifest: RoutesManifest = await fs.readJSON(pathRoutesManifest);
|
||||
// remove temporary array based routeKeys from v1/v2 of routes
|
||||
// manifest since it can result in invalid routes
|
||||
for (const route of routesManifest.dataRoutes || []) {
|
||||
@@ -368,10 +366,10 @@ export async function getDynamicRoutes(
|
||||
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
|
||||
|
||||
try {
|
||||
({ getRouteRegex, getSortedRoutes } = require(resolveFrom(
|
||||
entryPath,
|
||||
'next-server/dist/lib/router/utils'
|
||||
)));
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
({ getRouteRegex, getSortedRoutes } = eval('require')(
|
||||
resolveFrom(entryPath, 'next-server/dist/lib/router/utils')
|
||||
));
|
||||
if (typeof getRouteRegex !== 'function') {
|
||||
getRouteRegex = undefined;
|
||||
}
|
||||
@@ -379,10 +377,10 @@ export async function getDynamicRoutes(
|
||||
|
||||
if (!getRouteRegex || !getSortedRoutes) {
|
||||
try {
|
||||
({ getRouteRegex, getSortedRoutes } = require(resolveFrom(
|
||||
entryPath,
|
||||
'next/dist/next-server/lib/router/utils'
|
||||
)));
|
||||
// NOTE: `eval('require')` is necessary to avoid bad transpilation to `__webpack_require__`
|
||||
({ getRouteRegex, getSortedRoutes } = eval('require')(
|
||||
resolveFrom(entryPath, 'next/dist/next-server/lib/router/utils')
|
||||
));
|
||||
if (typeof getRouteRegex !== 'function') {
|
||||
getRouteRegex = undefined;
|
||||
}
|
||||
@@ -536,9 +534,7 @@ export async function getImagesManifest(
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
const imagesManifest: NextImagesManifest = require(pathImagesManifest);
|
||||
return imagesManifest;
|
||||
return fs.readJson(pathImagesManifest);
|
||||
}
|
||||
|
||||
type FileMap = { [page: string]: FileFsRef };
|
||||
@@ -2128,12 +2124,11 @@ export {
|
||||
interface MiddlewareManifest {
|
||||
version: 1;
|
||||
sortedMiddleware: string[];
|
||||
middleware: {
|
||||
[page: string]: MiddlewareInfo;
|
||||
};
|
||||
middleware: { [page: string]: EdgeFunctionInfo };
|
||||
functions?: { [page: string]: EdgeFunctionInfo };
|
||||
}
|
||||
|
||||
interface MiddlewareInfo {
|
||||
interface EdgeFunctionInfo {
|
||||
env: string[];
|
||||
files: string[];
|
||||
name: string;
|
||||
@@ -2146,25 +2141,45 @@ export async function getMiddlewareBundle({
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
routesManifest,
|
||||
isCorrectMiddlewareOrder,
|
||||
}: {
|
||||
entryPath: string;
|
||||
outputDirectory: string;
|
||||
routesManifest: RoutesManifest;
|
||||
isCorrectMiddlewareOrder: boolean;
|
||||
}) {
|
||||
const middlewareManifest = await getMiddlewareManifest(
|
||||
entryPath,
|
||||
outputDirectory
|
||||
);
|
||||
const sortedFunctions = [
|
||||
...(!middlewareManifest
|
||||
? []
|
||||
: middlewareManifest.sortedMiddleware.map(key => ({
|
||||
key,
|
||||
edgeFunction: middlewareManifest?.middleware[key],
|
||||
type: 'middleware' as const,
|
||||
}))),
|
||||
|
||||
if (middlewareManifest && middlewareManifest?.sortedMiddleware.length > 0) {
|
||||
...Object.entries(middlewareManifest?.functions ?? {}).map(
|
||||
([key, edgeFunction]) => {
|
||||
return {
|
||||
key,
|
||||
edgeFunction,
|
||||
type: 'function' as const,
|
||||
};
|
||||
}
|
||||
),
|
||||
];
|
||||
|
||||
if (middlewareManifest && sortedFunctions.length > 0) {
|
||||
const workerConfigs = await Promise.all(
|
||||
middlewareManifest.sortedMiddleware.map(async key => {
|
||||
const middleware = middlewareManifest.middleware[key];
|
||||
sortedFunctions.map(async ({ key, edgeFunction, type }) => {
|
||||
try {
|
||||
const wrappedModuleSource = await getNextjsEdgeFunctionSource(
|
||||
middleware.files,
|
||||
edgeFunction.files,
|
||||
{
|
||||
name: middleware.name,
|
||||
name: edgeFunction.name,
|
||||
staticRoutes: routesManifest.staticRoutes,
|
||||
dynamicRoutes: routesManifest.dynamicRoutes.filter(
|
||||
r => !('isMiddleware' in r)
|
||||
@@ -2175,18 +2190,19 @@ export async function getMiddlewareBundle({
|
||||
},
|
||||
},
|
||||
path.resolve(entryPath, outputDirectory),
|
||||
middleware.wasm
|
||||
edgeFunction.wasm
|
||||
);
|
||||
|
||||
return {
|
||||
page: middlewareManifest.middleware[key].page,
|
||||
type,
|
||||
page: edgeFunction.page,
|
||||
edgeFunction: (() => {
|
||||
const { source, map } = wrappedModuleSource.sourceAndMap();
|
||||
const transformedMap = stringifySourceMap(
|
||||
transformSourceMap(map)
|
||||
);
|
||||
|
||||
const wasmFiles = (middleware.wasm ?? []).reduce(
|
||||
const wasmFiles = (edgeFunction.wasm ?? []).reduce(
|
||||
(acc: Files, { filePath, name }) => {
|
||||
const fullFilePath = path.join(
|
||||
entryPath,
|
||||
@@ -2205,7 +2221,7 @@ export async function getMiddlewareBundle({
|
||||
|
||||
return new EdgeFunction({
|
||||
deploymentTarget: 'v8-worker',
|
||||
name: middleware.name,
|
||||
name: edgeFunction.name,
|
||||
files: {
|
||||
'index.js': new FileBlob({
|
||||
data: source,
|
||||
@@ -2222,13 +2238,10 @@ export async function getMiddlewareBundle({
|
||||
...wasmFiles,
|
||||
},
|
||||
entrypoint: 'index.js',
|
||||
envVarsInUse: middleware.env,
|
||||
envVarsInUse: edgeFunction.env,
|
||||
});
|
||||
})(),
|
||||
routeSrc: getRouteSrc(
|
||||
middlewareManifest.middleware[key],
|
||||
routesManifest
|
||||
),
|
||||
routeSrc: getRouteSrc(edgeFunction, routesManifest),
|
||||
};
|
||||
} catch (e: any) {
|
||||
e.message = `Can't build edge function ${key}: ${e.message}`;
|
||||
@@ -2249,15 +2262,23 @@ export async function getMiddlewareBundle({
|
||||
|
||||
for (const worker of workerConfigs.values()) {
|
||||
const edgeFile = worker.edgeFunction.name;
|
||||
worker.edgeFunction.name = edgeFile.replace(/^pages\//, '');
|
||||
source.edgeFunctions[edgeFile] = worker.edgeFunction;
|
||||
const route = {
|
||||
const shortPath = edgeFile.replace(/^pages\//, '');
|
||||
worker.edgeFunction.name = shortPath;
|
||||
source.edgeFunctions[shortPath] = worker.edgeFunction;
|
||||
const route: Route = {
|
||||
continue: true,
|
||||
override: true,
|
||||
middlewarePath: edgeFile,
|
||||
src: worker.routeSrc,
|
||||
};
|
||||
|
||||
if (worker.type === 'function') {
|
||||
route.dest = shortPath;
|
||||
} else {
|
||||
route.middlewarePath = shortPath;
|
||||
if (isCorrectMiddlewareOrder) {
|
||||
route.override = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (routesManifest.version > 3 && isDynamicRoute(worker.page)) {
|
||||
source.dynamicRouteMap.set(worker.page, route);
|
||||
} else {
|
||||
@@ -2280,7 +2301,7 @@ export async function getMiddlewareBundle({
|
||||
* location. If the manifest can't be found it will resolve to
|
||||
* undefined.
|
||||
*/
|
||||
async function getMiddlewareManifest(
|
||||
export async function getMiddlewareManifest(
|
||||
entryPath: string,
|
||||
outputDirectory: string
|
||||
): Promise<MiddlewareManifest | undefined> {
|
||||
@@ -2299,8 +2320,7 @@ async function getMiddlewareManifest(
|
||||
return;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
||||
return require(middlewareManifestPath);
|
||||
return fs.readJSON(middlewareManifestPath);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -2313,7 +2333,7 @@ async function getMiddlewareManifest(
|
||||
* @returns A regexp string for the middleware route.
|
||||
*/
|
||||
function getRouteSrc(
|
||||
{ regexp, page }: MiddlewareInfo,
|
||||
{ regexp, page }: EdgeFunctionInfo,
|
||||
{ basePath = '', i18n }: RoutesManifest
|
||||
): string {
|
||||
if (page === '/') {
|
||||
|
||||
@@ -4,6 +4,8 @@ const cheerio = require('cheerio');
|
||||
const { check, deployAndTest } = require('../../utils');
|
||||
const fetch = require('../../../../../test/lib/deployment/fetch-retry');
|
||||
|
||||
const ABSOLUTE_URL_PATTERN = /^https?:\/\//i;
|
||||
|
||||
async function checkForChange(url, initialValue, hardError) {
|
||||
return check(
|
||||
async () => {
|
||||
@@ -32,6 +34,13 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should deploy and pass probe checks', async () => {
|
||||
const info = await deployAndTest(__dirname);
|
||||
Object.assign(ctx, info);
|
||||
|
||||
if (!ABSOLUTE_URL_PATTERN.test(ctx.deploymentUrl)) {
|
||||
const details = JSON.stringify(ctx);
|
||||
throw new Error(
|
||||
`Deployment did not result in an absolute deploymentUrl: ${details}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should revalidate content properly from /', async () => {
|
||||
|
||||
8
packages/next/test/fixtures/00-middleware-nested/index.test.js
vendored
Normal file
8
packages/next/test/fixtures/00-middleware-nested/index.test.js
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
const path = require('path');
|
||||
const { deployAndTest } = require('../../utils');
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
it('should deploy and pass probe checks', async () => {
|
||||
await deployAndTest(__dirname);
|
||||
});
|
||||
});
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user