Compare commits

..

7 Commits

Author SHA1 Message Date
Javi Velasco
b27501c281 wip 2024-02-06 09:58:28 +01:00
dvoytenko
3f29798a5a strip to .next, report more failures 2024-01-31 16:17:45 +01:00
dvoytenko
5358a00611 Use absolute paths for require 2024-01-31 16:17:45 +01:00
Javi Velasco
70db54f59b Add measures 2024-01-31 16:17:45 +01:00
Javi Velasco
aafdd66b79 Use require 2024-01-31 16:17:45 +01:00
Javi Velasco
ca59387aac Add common and rest of chunks to the server launcher template 2024-01-31 16:17:45 +01:00
Javi Velasco
fb5eac2fbc Add logic to add common and rest of chunks 2024-01-31 16:17:45 +01:00
541 changed files with 3642 additions and 41882 deletions

View File

@@ -0,0 +1,5 @@
---
"@vercel/next": patch
---
Fix rewrite RSC handling with trailingSlash

View File

@@ -38,7 +38,7 @@ packages/static-build/test/cache-fixtures
packages/redwood/test/fixtures
# remix
packages/remix/test/fixtures-*
packages/remix/test/fixtures
# gatsby-plugin-vercel-analytics
packages/gatsby-plugin-vercel-analytics

View File

@@ -1,79 +0,0 @@
version: 2
updates:
- schedule:
interval: 'daily'
open-pull-requests-limit: 1
reviewers:
- 'trek'
- 'TooTallNate'
- 'EndangeredMassa'
commit-message:
prefix: '[framework-fixtures]'
package-ecosystem: 'npm'
directory: /packages/static-build/test/fixtures/angular-v17
allow:
- dependency-name: '@angular*'
ignore:
- dependency-name: '@angular*'
update-types:
['version-update:semver-major', 'version-update:semver-patch']
groups:
core:
patterns:
- '@angular*'
update-types:
- 'minor'
- schedule:
interval: 'daily'
open-pull-requests-limit: 1
reviewers:
- 'trek'
- 'TooTallNate'
- 'EndangeredMassa'
commit-message:
prefix: '[framework-fixtures]'
package-ecosystem: 'npm'
directory: /packages/static-build/test/fixtures/astro-v4
allow:
- dependency-name: 'astro*'
ignore:
- dependency-name: 'astro*'
update-types:
['version-update:semver-major', 'version-update:semver-patch']
groups:
core:
patterns:
- 'astro*'
update-types:
- 'minor'
- schedule:
interval: 'daily'
open-pull-requests-limit: 1
reviewers:
- 'trek'
- 'TooTallNate'
- 'EndangeredMassa'
commit-message:
prefix: '[framework-fixtures]'
package-ecosystem: 'npm'
directory: /packages/static-build/test/fixtures/hydrogen-v2023
allow:
- dependency-name: '@remix-run*'
- dependency-name: '@shopify*'
ignore:
- dependency-name: '@remix-run*'
update-types:
['version-update:semver-major', 'version-update:semver-patch']
- dependency-name: '@shopify*'
update-types:
['version-update:semver-major', 'version-update:semver-patch']
groups:
core:
patterns:
- '@remix-run*'
- '@shopify*'
update-types:
- 'minor'

View File

@@ -29,7 +29,6 @@ turbo-cache-key.json
packages/*/dist
packages/*/node_modules
packages/**/test/fixtures
packages/**/test/fixtures-*
packages/**/test/dev/fixtures
packages/**/test/build-fixtures
packages/**/test/cache-fixtures

View File

@@ -27,6 +27,3 @@ end
# Performance-booster for watching directories on Windows
gem "wdm", "~> 0.1.1", :platforms => [:mingw, :x64_mingw, :mswin]
# Webrick not installed by default in Ruby 3.0+
gem "webrick"

View File

@@ -1,20 +1,20 @@
GEM
remote: https://rubygems.org/
specs:
addressable (2.8.6)
public_suffix (>= 2.0.2, < 6.0)
addressable (2.7.0)
public_suffix (>= 2.0.2, < 5.0)
colorator (1.1.0)
concurrent-ruby (1.2.3)
em-websocket (0.5.3)
concurrent-ruby (1.1.8)
em-websocket (0.5.2)
eventmachine (>= 0.12.9)
http_parser.rb (~> 0)
http_parser.rb (~> 0.6.0)
eventmachine (1.2.7)
ffi (1.16.3)
ffi (1.14.2)
forwardable-extended (2.6.0)
http_parser.rb (0.8.0)
i18n (1.14.1)
http_parser.rb (0.6.0)
i18n (1.8.9)
concurrent-ruby (~> 1.0)
jekyll (4.2.2)
jekyll (4.2.0)
addressable (~> 2.4)
colorator (~> 1.0)
em-websocket (~> 0.5)
@@ -29,20 +29,20 @@ GEM
rouge (~> 3.0)
safe_yaml (~> 1.0)
terminal-table (~> 2.0)
jekyll-feed (0.17.0)
jekyll-feed (0.15.1)
jekyll (>= 3.7, < 5.0)
jekyll-sass-converter (2.2.0)
jekyll-sass-converter (2.1.0)
sassc (> 2.0.1, < 3.0)
jekyll-seo-tag (2.8.0)
jekyll-seo-tag (2.7.1)
jekyll (>= 3.8, < 5.0)
jekyll-watch (2.2.1)
listen (~> 3.0)
kramdown (2.4.0)
kramdown (2.3.0)
rexml
kramdown-parser-gfm (1.1.0)
kramdown (~> 2.0)
liquid (4.0.4)
listen (3.9.0)
liquid (4.0.3)
listen (3.4.1)
rb-fsevent (~> 0.10, >= 0.10.3)
rb-inotify (~> 0.9, >= 0.9.10)
mercenary (0.4.0)
@@ -52,22 +52,21 @@ GEM
jekyll-seo-tag (~> 2.1)
pathutil (0.16.2)
forwardable-extended (~> 2.6)
public_suffix (5.0.4)
rb-fsevent (0.11.2)
public_suffix (4.0.6)
rb-fsevent (0.10.4)
rb-inotify (0.10.1)
ffi (~> 1.0)
rexml (3.2.6)
rouge (3.30.0)
rexml (3.2.4)
rouge (3.26.0)
safe_yaml (1.0.5)
sassc (2.4.0)
ffi (~> 1.9)
terminal-table (2.0.0)
unicode-display_width (~> 1.1, >= 1.1.1)
unicode-display_width (1.8.0)
webrick (1.8.1)
unicode-display_width (1.7.0)
PLATFORMS
ruby
x86_64-linux
DEPENDENCIES
jekyll (~> 4.2.0)
@@ -76,7 +75,6 @@ DEPENDENCIES
tzinfo (~> 1.2)
tzinfo-data
wdm (~> 0.1.1)
webrick
BUNDLED WITH
2.5.6
2.2.4

View File

@@ -1,6 +1,6 @@
source 'https://rubygems.org'
gem 'middleman', '~> 4.5'
gem 'middleman-autoprefixer', '~> 3.0'
gem 'middleman', '~> 4.2'
gem 'middleman-autoprefixer', '~> 2.7'
gem 'tzinfo-data', platforms: [:mswin, :mingw, :jruby, :x64_mingw]
gem 'wdm', '~> 0.1', platforms: [:mswin, :mingw, :x64_mingw]

View File

@@ -1,60 +1,59 @@
GEM
remote: https://rubygems.org/
specs:
activesupport (7.0.8.1)
activesupport (5.2.4.5)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 1.6, < 2)
minitest (>= 5.1)
tzinfo (~> 2.0)
addressable (2.8.6)
public_suffix (>= 2.0.2, < 6.0)
autoprefixer-rails (10.4.16.0)
execjs (~> 2)
backports (3.24.1)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
tzinfo (~> 1.1)
addressable (2.7.0)
public_suffix (>= 2.0.2, < 5.0)
autoprefixer-rails (9.8.6.5)
execjs
backports (3.20.2)
coffee-script (2.4.1)
coffee-script-source
execjs
coffee-script-source (1.12.2)
concurrent-ruby (1.2.3)
contracts (0.16.1)
dotenv (3.1.0)
concurrent-ruby (1.1.8)
contracts (0.13.0)
dotenv (2.7.6)
erubis (2.7.0)
execjs (2.9.1)
fast_blank (1.0.1)
fastimage (2.3.0)
ffi (1.16.3)
haml (6.3.0)
temple (>= 0.8.2)
thor
execjs (2.7.0)
fast_blank (1.0.0)
fastimage (2.2.2)
ffi (1.14.2)
haml (5.2.1)
temple (>= 0.8.0)
tilt
hamster (3.0.0)
concurrent-ruby (~> 1.0)
hashie (3.6.0)
i18n (1.6.0)
i18n (0.9.5)
concurrent-ruby (~> 1.0)
kramdown (2.4.0)
kramdown (2.3.0)
rexml
listen (3.9.0)
rb-fsevent (~> 0.10, >= 0.10.3)
rb-inotify (~> 0.9, >= 0.9.10)
listen (3.0.8)
rb-fsevent (~> 0.9, >= 0.9.4)
rb-inotify (~> 0.9, >= 0.9.7)
memoist (0.16.2)
middleman (4.5.1)
middleman (4.3.11)
coffee-script (~> 2.2)
haml (>= 4.0.5)
kramdown (>= 2.3.0)
middleman-cli (= 4.5.1)
middleman-core (= 4.5.1)
middleman-autoprefixer (3.0.0)
autoprefixer-rails (~> 10.0)
middleman-core (>= 4.0.0)
middleman-cli (4.5.1)
thor (>= 0.17.0, < 1.3.0)
middleman-core (4.5.1)
activesupport (>= 6.1, < 7.1)
addressable (~> 2.4)
middleman-cli (= 4.3.11)
middleman-core (= 4.3.11)
middleman-autoprefixer (2.10.1)
autoprefixer-rails (~> 9.1)
middleman-core (>= 3.3.3)
middleman-cli (4.3.11)
thor (>= 0.17.0, < 2.0)
middleman-core (4.3.11)
activesupport (>= 4.2, < 6.0)
addressable (~> 2.3)
backports (~> 3.6)
bundler (~> 2.0)
contracts (~> 0.13, < 0.17)
bundler
contracts (~> 0.13.0)
dotenv
erubis
execjs (~> 2.0)
@@ -62,52 +61,48 @@ GEM
fastimage (~> 2.0)
hamster (~> 3.0)
hashie (~> 3.4)
i18n (~> 1.6.0)
listen (~> 3.0)
i18n (~> 0.9.0)
listen (~> 3.0.0)
memoist (~> 0.14)
padrino-helpers (~> 0.15.0)
padrino-helpers (~> 0.13.0)
parallel
rack (>= 1.4.5, < 3)
sassc (~> 2.0)
servolux
tilt (~> 2.0.9)
toml
uglifier (~> 3.0)
webrick
minitest (5.22.2)
padrino-helpers (0.15.3)
i18n (>= 0.6.7, < 2)
padrino-support (= 0.15.3)
minitest (5.14.3)
padrino-helpers (0.13.3.4)
i18n (~> 0.6, >= 0.6.7)
padrino-support (= 0.13.3.4)
tilt (>= 1.4.1, < 3)
padrino-support (0.15.3)
parallel (1.24.0)
parslet (2.0.0)
public_suffix (5.0.4)
rack (2.2.8.1)
rb-fsevent (0.11.2)
padrino-support (0.13.3.4)
activesupport (>= 3.1)
parallel (1.20.1)
public_suffix (4.0.6)
rack (2.2.3)
rb-fsevent (0.10.4)
rb-inotify (0.10.1)
ffi (~> 1.0)
rexml (3.2.6)
rexml (3.2.4)
sassc (2.4.0)
ffi (~> 1.9)
servolux (0.13.0)
temple (0.10.3)
thor (1.2.2)
tilt (2.0.11)
toml (0.3.0)
parslet (>= 1.8.0, < 3.0.0)
tzinfo (2.0.6)
concurrent-ruby (~> 1.0)
temple (0.8.2)
thor (1.1.0)
thread_safe (0.3.6)
tilt (2.0.10)
tzinfo (1.2.9)
thread_safe (~> 0.1)
uglifier (3.2.0)
execjs (>= 0.3.0, < 3)
webrick (1.8.1)
PLATFORMS
x86_64-linux
DEPENDENCIES
middleman (~> 4.5)
middleman-autoprefixer (~> 3.0)
middleman (~> 4.2)
middleman-autoprefixer (~> 2.7)
tzinfo-data
wdm (~> 0.1)

File diff suppressed because it is too large Load Diff

View File

@@ -11,7 +11,7 @@
"dependencies": {
"react": "^18",
"react-dom": "^18",
"next": "14.1.2"
"next": "14.1.0"
},
"devDependencies": {
"typescript": "^5",
@@ -22,6 +22,6 @@
"postcss": "^8",
"tailwindcss": "^3.3.0",
"eslint": "^8",
"eslint-config-next": "14.1.2"
"eslint-config-next": "14.1.0"
}
}

View File

@@ -9,7 +9,7 @@
},
"devDependencies": {
"@types/jest": "27.4.1",
"@vercel/frameworks": "3.0.0"
"@vercel/frameworks": "2.0.6"
},
"version": null
}

View File

@@ -1,5 +1,7 @@
node_modules
/.cache
/build
/public/build
.env
.vercel

View File

@@ -1,13 +1,20 @@
import { cssBundleHref } from "@remix-run/css-bundle";
import {
Links,
LiveReload,
Meta,
Outlet,
Scripts,
ScrollRestoration,
} from "@remix-run/react";
import { Analytics } from "@vercel/analytics/react";
import type { LinksFunction } from "@vercel/remix";
export function Layout({ children }: { children: React.ReactNode }) {
export const links: LinksFunction = () => [
...(cssBundleHref ? [{ rel: "stylesheet", href: cssBundleHref }] : []),
];
export default function App() {
return (
<html lang="en">
<head>
@@ -17,15 +24,12 @@ export function Layout({ children }: { children: React.ReactNode }) {
<Links />
</head>
<body>
{children}
<Outlet />
<ScrollRestoration />
<Scripts />
<LiveReload />
<Analytics />
</body>
</html>
);
}
export default function App() {
return <Outlet />;
}

View File

@@ -4,29 +4,29 @@
"sideEffects": false,
"type": "module",
"scripts": {
"build": "remix vite:build",
"dev": "remix vite:dev",
"build": "remix build",
"dev": "remix dev --manual",
"start": "remix-serve ./build/index.js",
"typecheck": "tsc"
},
"dependencies": {
"@remix-run/node": "^2.8.0",
"@remix-run/react": "^2.8.0",
"@remix-run/server-runtime": "^2.8.0",
"@vercel/analytics": "^1.2.2",
"@vercel/remix": "^2.8.0",
"isbot": "^4",
"@remix-run/css-bundle": "^2.0.0",
"@remix-run/node": "^2.0.0",
"@remix-run/react": "^2.0.0",
"@remix-run/serve": "^2.0.0",
"@vercel/analytics": "^1.0.2",
"@vercel/remix": "^2.0.0",
"isbot": "^3.6.8",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@remix-run/dev": "^2.8.0",
"@remix-run/eslint-config": "^2.8.0",
"@remix-run/dev": "^2.0.0",
"@remix-run/eslint-config": "^2.0.0",
"@types/react": "^18.2.20",
"@types/react-dom": "^18.2.7",
"eslint": "^8.38.0",
"typescript": "^5.1.6",
"vite": "^5.1.0",
"vite-tsconfig-paths": "^4.2.1"
"typescript": "^5.1.6"
},
"engines": {
"node": ">=18.0.0"

View File

@@ -1,25 +1,22 @@
{
"include": ["**/*.ts", "**/*.tsx"],
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
"compilerOptions": {
"lib": ["DOM", "DOM.Iterable", "ES2022"],
"types": ["@vercel/remix", "node", "vite/client"],
"isolatedModules": true,
"esModuleInterop": true,
"jsx": "react-jsx",
"module": "ESNext",
"moduleResolution": "Bundler",
"resolveJsonModule": true,
"target": "ES2022",
"strict": true,
"allowJs": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"baseUrl": ".",
"paths": {
"~/*": ["./app/*"]
},
// Vite takes care of building everything, not tsc.
// Remix takes care of building everything in `remix build`.
"noEmit": true
}
}

View File

@@ -1,11 +0,0 @@
import { vitePlugin as remix } from "@remix-run/dev";
import { installGlobals } from "@remix-run/node";
import { defineConfig } from "vite";
import { vercelPreset } from '@vercel/remix/vite';
import tsconfigPaths from "vite-tsconfig-paths";
installGlobals();
export default defineConfig({
plugins: [remix({ presets: [vercelPreset()] }), tsconfigPaths()],
});

View File

@@ -1,26 +1,5 @@
# @vercel-internals/types
## 1.0.24
### Patch Changes
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
- @vercel/build-utils@7.7.1
## 1.0.23
### Patch Changes
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
- @vercel/build-utils@7.7.0
## 1.0.22
### Patch Changes
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
- @vercel/build-utils@7.6.0
## 1.0.21
### Patch Changes

View File

@@ -1,7 +1,7 @@
{
"private": true,
"name": "@vercel-internals/types",
"version": "1.0.24",
"version": "1.0.21",
"types": "index.d.ts",
"main": "index.d.ts",
"files": [
@@ -10,7 +10,7 @@
"dependencies": {
"@types/node": "14.14.31",
"@vercel-internals/constants": "1.0.4",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"@vercel/routing-utils": "3.1.0"
},
"devDependencies": {

View File

@@ -33,7 +33,7 @@
"source-map-support": "0.5.12",
"ts-eager": "2.0.2",
"ts-jest": "29.1.0",
"turbo": "1.12.4",
"turbo": "1.11.3",
"typescript": "4.9.5"
},
"scripts": {

View File

@@ -1,31 +1,5 @@
# @vercel/build-utils
## 7.7.1
### Patch Changes
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
## 7.7.0
### Minor Changes
- Revert "Revert "Default ruby to only currently supported version (3.2.0)"" ([#11137](https://github.com/vercel/vercel/pull/11137))
## 7.6.0
### Minor Changes
- Revert "Default ruby to only currently supported version (3.2.0)" ([#11135](https://github.com/vercel/vercel/pull/11135))
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
- [build-utils] change default package manager when no lockfile detected from `yarn` to `npm` (gated behind feature flag) ([#11131](https://github.com/vercel/vercel/pull/11131))
### Patch Changes
- Update internal type for variants ([#11111](https://github.com/vercel/vercel/pull/11111))
## 7.5.1
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/build-utils",
"version": "7.7.1",
"version": "7.5.1",
"license": "Apache-2.0",
"main": "./dist/index.js",
"types": "./dist/index.d.js",

View File

@@ -302,7 +302,7 @@ export async function scanParentDirs(
});
let lockfilePath: string | undefined;
let lockfileVersion: number | undefined;
let cliType: CliType;
let cliType: CliType = 'yarn';
const [hasYarnLock, packageLockJson, pnpmLockYaml, bunLockBin] =
await Promise.all([
@@ -338,12 +338,6 @@ export async function scanParentDirs(
lockfilePath = bunLockPath;
// TODO: read "bun-lockfile-format-v0"
lockfileVersion = 0;
} else {
if (process.env.VERCEL_ENABLE_NPM_DEFAULT === '1') {
cliType = 'npm';
} else {
cliType = 'yarn';
}
}
const packageJsonPath = pkgJsonPath || undefined;
@@ -420,14 +414,6 @@ export async function runNpmInstall(
destPath
);
if (!packageJsonPath) {
debug(
`Skipping dependency installation because no package.json was found for ${destPath}`
);
runNpmInstallSema.release();
return false;
}
// Only allow `runNpmInstall()` to run once per `package.json`
// when doing a default install (no additional args)
if (meta && packageJsonPath && args.length === 0) {

View File

@@ -14,7 +14,7 @@ export const functionsSchema = {
},
memory: {
minimum: 128,
maximum: 3009,
maximum: 3008,
},
maxDuration: {
type: 'number',

View File

@@ -440,9 +440,7 @@ export interface Cron {
schedule: string;
}
/**
* @deprecated Replaced by Variants. Remove once fully replaced.
*/
// TODO: Proper description once complete
export interface Flag {
key: string;
defaultValue?: unknown;
@@ -473,9 +471,7 @@ export interface BuildResultV2Typical {
framework?: {
version: string;
};
/** @deprecated Replaced by Variants. Remove once fully replaced. */
flags?: Flag[];
variants?: Record<string, VariantDefinition>;
}
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
@@ -495,28 +491,3 @@ export type ShouldServe = (
export type StartDevServer = (
options: StartDevServerOptions
) => Promise<StartDevServerResult>;
/**
* TODO: The following types will eventually be exported by a more
* relevant package.
*/
type VariantJSONArray = ReadonlyArray<VariantJSONValue>;
type VariantJSONValue =
| string
| boolean
| number
| null
| VariantJSONArray
| { [key: string]: VariantJSONValue };
type VariantOption = {
value: VariantJSONValue;
label?: string;
};
export interface VariantDefinition {
options?: VariantOption[];
origin?: string;
description?: string;
}

View File

@@ -1,4 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1

View File

@@ -1,4 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1

View File

@@ -1,4 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1

View File

@@ -1,9 +0,0 @@
{
"private": true,
"scripts": {
"build": "mkdir -p public && (printf \"npm version: \" && npm -v) > public/index.txt"
},
"dependencies": {
"sharp": "0.33.2"
}
}

View File

@@ -1,3 +0,0 @@
{
"probes": [{ "path": "/", "mustContain": "npm version: 8" }]
}

View File

@@ -1,7 +0,0 @@
{
"build": {
"env": {
"VERCEL_ENABLE_NPM_DEFAULT": "1"
}
}
}

View File

@@ -23,6 +23,8 @@ const skipFixtures: string[] = [
'23-pnpm-workspaces',
'41-nx-monorepo',
'42-npm-workspace-with-nx',
'jekyll-v4',
'middleman-v4',
];
// eslint-disable-next-line no-restricted-syntax

View File

@@ -576,52 +576,6 @@ it(
ms('1m')
);
it('should return cliType npm when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is set', async () => {
const originalRepoLockfilePath = path.join(
__dirname,
'..',
'..',
'..',
'pnpm-lock.yaml'
);
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
await fs.remove(originalRepoLockfilePath);
try {
process.env.VERCEL_ENABLE_NPM_DEFAULT = '1';
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
const result = await scanParentDirs(fixture);
expect(result.cliType).toEqual('npm');
expect(result.lockfileVersion).toEqual(undefined);
expect(result.lockfilePath).toEqual(undefined);
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
} finally {
delete process.env.VERCEL_ENABLE_NPM_DEFAULT;
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
}
});
it('should return cliType yarn when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is not set', async () => {
const originalRepoLockfilePath = path.join(
__dirname,
'..',
'..',
'..',
'pnpm-lock.yaml'
);
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
await fs.remove(originalRepoLockfilePath);
try {
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
const result = await scanParentDirs(fixture);
expect(result.cliType).toEqual('yarn');
expect(result.lockfileVersion).toEqual(undefined);
expect(result.lockfilePath).toEqual(undefined);
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
} finally {
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
}
});
it('should return cliType bun and correct lock file for bun v1', async () => {
const fixture = path.join(__dirname, 'fixtures', '31-bun-v1-with-yarn-lock');
const result = await scanParentDirs(fixture);

View File

@@ -1,63 +1,5 @@
# vercel
## 33.5.4
### Patch Changes
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
- Updated dependencies [[`b1d8b83ab`](https://github.com/vercel/vercel/commit/b1d8b83abbf23a3485aedb490992d0a3bf44573f), [`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f), [`20237d4f7`](https://github.com/vercel/vercel/commit/20237d4f7b55b0697b57db15636c11204cb0dc39), [`f8fab639b`](https://github.com/vercel/vercel/commit/f8fab639bf49a60389b8d0b7b265a737c17b4ae1), [`6ed0fe6fb`](https://github.com/vercel/vercel/commit/6ed0fe6fb1e487545a790ff5b9fc691cf625f005)]:
- @vercel/next@4.1.4
- @vercel/build-utils@7.7.1
- @vercel/remix-builder@2.1.1
- @vercel/static-build@2.4.1
- @vercel/node@3.0.21
## 33.5.3
### Patch Changes
- Updated dependencies [[`c2d99855e`](https://github.com/vercel/vercel/commit/c2d99855ea6132380434ed29643120680f95fad7), [`1333071a3`](https://github.com/vercel/vercel/commit/1333071a3a2d324679327bfdd4e872f8fd3521c6)]:
- @vercel/next@4.1.3
- @vercel/remix-builder@2.1.0
## 33.5.2
### Patch Changes
- Updated dependencies [[`e109e3325`](https://github.com/vercel/vercel/commit/e109e3325ab5299da0903034175fabe72d486a4e), [`d17abf463`](https://github.com/vercel/vercel/commit/d17abf463acabf9e1e43b91200f18efd34e91f62), [`644721a90`](https://github.com/vercel/vercel/commit/644721a90da8cf98414d272be9da0a821a2ce217), [`ea0e9aeae`](https://github.com/vercel/vercel/commit/ea0e9aeaec8ddddb5a726be0d252df9cdbd84808), [`e318a0eea`](https://github.com/vercel/vercel/commit/e318a0eea55c9b8536b0874f66cfd03aca6f0adf), [`1fee87e76`](https://github.com/vercel/vercel/commit/1fee87e76f18d2f5e5524247cfce615fa1832e49), [`bfc01fd98`](https://github.com/vercel/vercel/commit/bfc01fd98f760a008d0d2e6c52b5216503b44b75), [`7910f2f30`](https://github.com/vercel/vercel/commit/7910f2f3070ff69742e845e795d4db77d598c181), [`440ef3ba9`](https://github.com/vercel/vercel/commit/440ef3ba98af8f05e7714c86c67c36dbda11e85c)]:
- @vercel/remix-builder@2.0.20
- @vercel/next@4.1.2
- @vercel/node@3.0.20
- @vercel/redwood@2.0.8
## 33.5.1
### Patch Changes
- build: upgrade edge-runtime ([#11148](https://github.com/vercel/vercel/pull/11148))
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab), [`10e200e0b`](https://github.com/vercel/vercel/commit/10e200e0bf8f692b6740e098e0572b4e7de83850), [`678ebbe52`](https://github.com/vercel/vercel/commit/678ebbe5255766656bf2dddc574e86b2999f11c8)]:
- @vercel/build-utils@7.7.0
- @vercel/static-build@2.4.0
- @vercel/node@3.0.19
## 33.5.0
### Minor Changes
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
### Patch Changes
- Updated dependencies [[`c32a909af`](https://github.com/vercel/vercel/commit/c32a909afcedf0ee55777d5dcaecc0c8383dd8c8), [`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`d21bb9f87`](https://github.com/vercel/vercel/commit/d21bb9f87e1d837666fe8104d4e199b2590725d6), [`4027a1833`](https://github.com/vercel/vercel/commit/4027a1833718a92be74b2b3c5a4df23745d19a36), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c), [`3bad73401`](https://github.com/vercel/vercel/commit/3bad73401b4ec1f61e515965732cde8dcc052b17)]:
- @vercel/next@4.1.1
- @vercel/node@3.0.18
- @vercel/redwood@2.0.7
- @vercel/remix-builder@2.0.19
- @vercel/build-utils@7.6.0
- @vercel/static-build@2.3.0
## 33.4.1
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "vercel",
"version": "33.5.4",
"version": "33.4.1",
"preferGlobal": true,
"license": "Apache-2.0",
"description": "The command-line interface for Vercel",
@@ -31,22 +31,22 @@
"node": ">= 16"
},
"dependencies": {
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"@vercel/fun": "1.1.0",
"@vercel/go": "3.0.5",
"@vercel/hydrogen": "1.0.2",
"@vercel/next": "4.1.4",
"@vercel/node": "3.0.21",
"@vercel/next": "4.1.0",
"@vercel/node": "3.0.17",
"@vercel/python": "4.1.1",
"@vercel/redwood": "2.0.8",
"@vercel/remix-builder": "2.1.1",
"@vercel/redwood": "2.0.6",
"@vercel/remix-builder": "2.0.18",
"@vercel/ruby": "2.0.5",
"@vercel/static-build": "2.4.1",
"@vercel/static-build": "2.2.0",
"chokidar": "3.3.1"
},
"devDependencies": {
"@alex_neo/jest-expect-message": "1.0.5",
"@edge-runtime/node-utils": "2.3.0",
"@edge-runtime/node-utils": "2.2.2",
"@next/env": "11.1.2",
"@sentry/node": "5.5.0",
"@sindresorhus/slugify": "0.11.0",
@@ -88,11 +88,11 @@
"@types/yauzl-promise": "2.1.0",
"@vercel-internals/constants": "1.0.4",
"@vercel-internals/get-package-json": "1.0.0",
"@vercel-internals/types": "1.0.24",
"@vercel/client": "13.1.4",
"@vercel-internals/types": "1.0.21",
"@vercel/client": "13.1.1",
"@vercel/error-utils": "2.0.2",
"@vercel/frameworks": "3.0.0",
"@vercel/fs-detectors": "5.2.1",
"@vercel/frameworks": "2.0.6",
"@vercel/fs-detectors": "5.1.6",
"@vercel/routing-utils": "3.1.0",
"ajv": "6.12.2",
"alpha-sort": "2.0.1",

View File

@@ -22,7 +22,6 @@ import {
Cron,
validateNpmrc,
Flag,
VariantDefinition,
} from '@vercel/build-utils';
import {
detectBuilders,
@@ -96,9 +95,7 @@ interface BuildOutputConfig {
version: string;
};
crons?: Cron[];
/** @deprecated Replaced by Variants. Remove once fully replaced. */
flags?: Flag[];
variants?: Record<string, VariantDefinition>;
}
/**
@@ -681,13 +678,10 @@ async function doBuild(
overrides: mergedOverrides,
framework,
crons: mergedCrons,
/** @deprecated Replaced by Variants. Remove once fully replaced. */
flags: mergedFlags,
};
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
await writeVariantsJson(client, buildResults.values(), outputDir);
const relOutputDir = relative(cwd, outputDir);
output.print(
`${prependEmoji(
@@ -832,51 +826,6 @@ function mergeFlags(
});
}
/**
* Takes the build output and writes all the variants into the `variants.json`
* file. It'll skip variants that already exist.
*/
async function writeVariantsJson(
{ output }: Client,
buildResults: Iterable<BuildResult | BuildOutputConfig>,
outputDir: string
): Promise<void> {
const variantsFilePath = join(outputDir, 'variants.json');
let hasVariants = true;
const variants = (await fs.readJSON(variantsFilePath).catch(error => {
if (error.code === 'ENOENT') {
hasVariants = false;
return { definitions: {} };
}
throw error;
})) as { definitions: Record<string, VariantDefinition> };
for (const result of buildResults) {
if (!('variants' in result) || !result.variants) continue;
for (const [key, defintion] of Object.entries(result.variants)) {
if (result.variants[key]) {
output.warn(
`The variant "${key}" was found multiple times. Only its first occurrence will be considered.`
);
continue;
}
hasVariants = true;
variants.definitions[key] = defintion;
}
}
// Only create the file when there are variants to write,
// or when the file already exists.
if (hasVariants) {
await fs.writeJSON(variantsFilePath, variants, { spaces: 2 });
}
}
async function writeBuildJson(buildsJson: BuildsManifest, outputDir: string) {
await fs.writeJSON(join(outputDir, 'builds.json'), buildsJson, { spaces: 2 });
}

View File

@@ -367,7 +367,7 @@ export async function executeBuild(
Code: { ZipFile },
Handler: asset.handler,
Runtime: asset.runtime,
MemorySize: asset.memory || 3009,
MemorySize: asset.memory || 3008,
Environment: {
Variables: {
...vercelConfig.env,

View File

@@ -1175,7 +1175,7 @@ test('render build errors', async () => {
const output = await execCli(binaryPath, [deploymentPath, '--yes']);
expect(output.exitCode, formatOutput(output)).toBe(1);
expect(output.stderr).toMatch(/Command "npm run build" exited with 1/gm);
expect(output.stderr).toMatch(/Command "yarn run build" exited with 1/gm);
});
test('invalid deployment, projects and alias names', async () => {

View File

@@ -253,12 +253,12 @@ describe('validateConfig', () => {
const error = validateConfig({
functions: {
'api/test.js': {
memory: 3010,
memory: 3009,
},
},
});
expect(error!.message).toEqual(
"Invalid vercel.json - `functions['api/test.js'].memory` should be <= 3009."
"Invalid vercel.json - `functions['api/test.js'].memory` should be <= 3008."
);
expect(error!.link).toEqual(
'https://vercel.com/docs/concepts/projects/project-configuration#functions'

View File

@@ -1,26 +1,5 @@
# @vercel/client
## 13.1.4
### Patch Changes
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
- @vercel/build-utils@7.7.1
## 13.1.3
### Patch Changes
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
- @vercel/build-utils@7.7.0
## 13.1.2
### Patch Changes
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
- @vercel/build-utils@7.6.0
## 13.1.1
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/client",
"version": "13.1.4",
"version": "13.1.1",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"homepage": "https://vercel.com",
@@ -37,7 +37,7 @@
"typescript": "4.9.5"
},
"dependencies": {
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"@vercel/error-utils": "2.0.2",
"@vercel/routing-utils": "3.1.0",
"@zeit/fetch": "5.2.0",

View File

@@ -1,11 +1,5 @@
# @vercel/frameworks
## 3.0.0
### Major Changes
- Make "remix" framework preset supersede "vite" ([#11031](https://github.com/vercel/vercel/pull/11031))
## 2.0.6
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/frameworks",
"version": "3.0.0",
"version": "2.0.6",
"main": "./dist/frameworks.js",
"types": "./dist/frameworks.d.ts",
"files": [

View File

@@ -202,14 +202,11 @@ export const frameworks = [
description: 'A new Remix app — the result of running `npx create-remix`.',
website: 'https://remix.run',
sort: 6,
supersedes: ['hydrogen', 'vite'],
supersedes: 'hydrogen',
useRuntime: { src: 'package.json', use: '@vercel/remix-builder' },
ignoreRuntimes: ['@vercel/node'],
detectors: {
some: [
{
matchPackage: '@remix-run/dev',
},
{
path: 'remix.config.js',
},
@@ -1737,7 +1734,7 @@ export const frameworks = [
tagline: 'React framework for headless commerce',
description: 'React framework for headless commerce',
website: 'https://hydrogen.shopify.dev',
supersedes: ['vite'],
supersedes: 'vite',
useRuntime: { src: 'package.json', use: '@vercel/hydrogen' },
envPrefix: 'PUBLIC_',
detectors: {

View File

@@ -220,7 +220,7 @@ export interface Framework {
*/
defaultVersion?: string;
/**
* Array of slugs for other framework presets which this framework supersedes.
* Slug of another framework preset in which this framework supersedes.
*/
supersedes?: string[];
supersedes?: string;
}

View File

@@ -199,7 +199,7 @@ const Schema = {
dependency: { type: 'string' },
cachePattern: { type: 'string' },
defaultVersion: { type: 'string' },
supersedes: { type: 'array', items: { type: 'string' } },
supersedes: { type: 'string' },
},
},
};

View File

@@ -1,22 +1,5 @@
# @vercel/fs-detectors
## 5.2.1
### Patch Changes
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
## 5.2.0
### Minor Changes
- Make "remix" framework preset supersede "vite" ([#11031](https://github.com/vercel/vercel/pull/11031))
### Patch Changes
- Updated dependencies [[`1333071a3`](https://github.com/vercel/vercel/commit/1333071a3a2d324679327bfdd4e872f8fd3521c6)]:
- @vercel/frameworks@3.0.0
## 5.1.6
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/fs-detectors",
"version": "5.2.1",
"version": "5.1.6",
"description": "Vercel filesystem detectors",
"main": "./dist/index.js",
"types": "./dist/index.d.ts",
@@ -22,7 +22,7 @@
},
"dependencies": {
"@vercel/error-utils": "2.0.2",
"@vercel/frameworks": "3.0.0",
"@vercel/frameworks": "2.0.6",
"@vercel/routing-utils": "3.1.0",
"glob": "8.0.3",
"js-yaml": "4.1.0",
@@ -37,7 +37,7 @@
"@types/minimatch": "3.0.5",
"@types/node": "14.18.33",
"@types/semver": "7.3.10",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"jest-junit": "16.0.0",
"typescript": "4.9.5"
}

View File

@@ -612,11 +612,11 @@ function validateFunctions({ functions = {} }: Options) {
if (
func.memory !== undefined &&
(func.memory < 128 || func.memory > 3009)
(func.memory < 128 || func.memory > 3008)
) {
return {
code: 'invalid_function_memory',
message: 'Functions must have a memory value between 128 and 3009',
message: 'Functions must have a memory value between 128 and 3008',
};
}

View File

@@ -143,9 +143,7 @@ function removeSupersededFramework(
const framework = matches[index];
if (framework) {
if (framework.supersedes) {
for (const slug of framework.supersedes) {
removeSupersededFramework(matches, slug);
}
removeSupersededFramework(matches, framework.supersedes);
}
matches.splice(index, 1);
}
@@ -156,9 +154,7 @@ export function removeSupersededFrameworks(
) {
for (const match of matches.slice()) {
if (match?.supersedes) {
for (const slug of match.supersedes) {
removeSupersededFramework(matches, slug);
}
removeSupersededFramework(matches, match.supersedes);
}
}
}

View File

@@ -166,12 +166,12 @@ describe('removeSupersededFrameworks()', () => {
const matches = [
{ slug: 'storybook' },
{ slug: 'vite' },
{ slug: 'hydrogen', supersedes: ['vite'] },
{ slug: 'hydrogen', supersedes: 'vite' },
];
removeSupersededFrameworks(matches);
expect(matches).toEqual([
{ slug: 'storybook' },
{ slug: 'hydrogen', supersedes: ['vite'] },
{ slug: 'hydrogen', supersedes: 'vite' },
]);
});
@@ -179,13 +179,13 @@ describe('removeSupersededFrameworks()', () => {
const matches = [
{ slug: 'storybook' },
{ slug: 'vite' },
{ slug: 'hydrogen', supersedes: ['vite'] },
{ slug: 'remix', supersedes: ['hydrogen'] },
{ slug: 'hydrogen', supersedes: 'vite' },
{ slug: 'remix', supersedes: 'hydrogen' },
];
removeSupersededFrameworks(matches);
expect(matches).toEqual([
{ slug: 'storybook' },
{ slug: 'remix', supersedes: ['hydrogen'] },
{ slug: 'remix', supersedes: 'hydrogen' },
]);
});
});
@@ -442,20 +442,6 @@ describe('detectFramework()', () => {
expect(await detectFramework({ fs, frameworkList })).toBe('storybook');
});
it('Should detect Remix + Vite as `remix`', async () => {
const fs = new VirtualFilesystem({
'vite.config.ts': '',
'package.json': JSON.stringify({
dependencies: {
'@remix-run/dev': 'latest',
vite: 'latest',
},
}),
});
expect(await detectFramework({ fs, frameworkList })).toBe('remix');
});
});
describe('detectFrameworks()', () => {
@@ -511,23 +497,6 @@ describe('detectFrameworks()', () => {
expect(slugs).toEqual(['nextjs', 'storybook']);
});
it('Should detect Remix + Vite as `remix`', async () => {
const fs = new VirtualFilesystem({
'vite.config.ts': '',
'package.json': JSON.stringify({
dependencies: {
'@remix-run/dev': 'latest',
vite: 'latest',
},
}),
});
const slugs = (await detectFrameworks({ fs, frameworkList })).map(
f => f.slug
);
expect(slugs).toEqual(['remix']);
});
it('Should detect "hydrogen" template as `hydrogen`', async () => {
const fs = new LocalFileSystemDetector(join(EXAMPLES_DIR, 'hydrogen'));

View File

@@ -1,28 +1,5 @@
# @vercel/gatsby-plugin-vercel-builder
## 2.0.19
### Patch Changes
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
- @vercel/build-utils@7.7.1
## 2.0.18
### Patch Changes
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
- @vercel/build-utils@7.7.0
## 2.0.17
### Patch Changes
- [gatsby-plugin-vercel-builder] use --keep-names esbuild flag ([#11117](https://github.com/vercel/vercel/pull/11117))
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
- @vercel/build-utils@7.6.0
## 2.0.16
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/gatsby-plugin-vercel-builder",
"version": "2.0.19",
"version": "2.0.16",
"main": "dist/index.js",
"files": [
"dist",
@@ -20,7 +20,7 @@
},
"dependencies": {
"@sinclair/typebox": "0.25.24",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"@vercel/routing-utils": "3.1.0",
"esbuild": "0.14.47",
"etag": "1.8.1",

View File

@@ -35,9 +35,6 @@ export const writeHandler = async ({
platform: 'node',
bundle: true,
minify: true,
// prevents renaming edge cases from causing failures like:
// https://github.com/node-fetch/node-fetch/issues/784
keepNames: true,
define: {
'process.env.NODE_ENV': "'production'",
vercel_pathPrefix: JSON.stringify(prefix),

View File

@@ -29,7 +29,7 @@
"@types/node-fetch": "^2.3.0",
"@types/tar": "6.1.5",
"@types/yauzl-promise": "2.1.0",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"async-retry": "1.3.3",
"execa": "^1.0.0",
"fs-extra": "^7.0.0",

View File

@@ -26,7 +26,7 @@
"devDependencies": {
"@types/jest": "27.5.1",
"@types/node": "14.18.33",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"execa": "3.2.0",
"fs-extra": "11.1.0",
"jest-junit": "16.0.0"

View File

@@ -1,43 +1,5 @@
# @vercel/next
## 4.1.4
### Patch Changes
- Enable partial prerendering support for pre-generated pages ([#11183](https://github.com/vercel/vercel/pull/11183))
## 4.1.3
### Patch Changes
- Fix manifest with experimental flag ([#11192](https://github.com/vercel/vercel/pull/11192))
## 4.1.2
### Patch Changes
- Update experimental bundle flag ([#11187](https://github.com/vercel/vercel/pull/11187))
- [next] Add flag for experimental grouping ([#11177](https://github.com/vercel/vercel/pull/11177))
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
- fix: missing experimental field ([#11184](https://github.com/vercel/vercel/pull/11184))
## 4.1.1
### Patch Changes
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
- Load common chunks on module initialization ([#11126](https://github.com/vercel/vercel/pull/11126))
- Fix index normalizing for app outputs ([#11099](https://github.com/vercel/vercel/pull/11099))
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
- Fix rewrite RSC handling with trailingSlash ([#11107](https://github.com/vercel/vercel/pull/11107))
## 4.1.0
### Minor Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/next",
"version": "4.1.4",
"version": "4.1.0",
"license": "Apache-2.0",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
@@ -23,7 +23,7 @@
"dist"
],
"dependencies": {
"@vercel/nft": "0.26.4"
"@vercel/nft": "0.26.2"
},
"devDependencies": {
"@types/aws-lambda": "8.10.19",
@@ -40,7 +40,7 @@
"@types/semver": "6.0.0",
"@types/text-table": "0.2.1",
"@types/webpack-sources": "3.2.0",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"@vercel/routing-utils": "3.1.0",
"async-sema": "3.0.1",
"buffer-crc32": "0.2.13",

View File

@@ -511,7 +511,7 @@ export const build: BuildV2 = async ({
entryPath,
outputDirectory
);
const omittedPrerenderRoutes: ReadonlySet<string> = new Set(
const omittedPrerenderRoutes = new Set(
Object.keys(prerenderManifest.omittedRoutes)
);
@@ -1142,10 +1142,6 @@ export const build: BuildV2 = async ({
appPathRoutesManifest,
});
/**
* This is a detection for preview mode that's required for the pages
* router.
*/
const canUsePreviewMode = Object.keys(pages).some(page =>
isApiPage(pages[page].fsPath)
);
@@ -1320,22 +1316,6 @@ export const build: BuildV2 = async ({
}
}
/**
* All of the routes that have `experimentalPPR` enabled.
*/
const experimentalPPRRoutes = new Set<string>();
for (const [route, { experimentalPPR }] of [
...Object.entries(prerenderManifest.staticRoutes),
...Object.entries(prerenderManifest.blockingFallbackRoutes),
...Object.entries(prerenderManifest.fallbackRoutes),
...Object.entries(prerenderManifest.omittedRoutes),
]) {
if (!experimentalPPR) continue;
experimentalPPRRoutes.add(route);
}
if (requiredServerFilesManifest) {
if (!routesManifest) {
throw new Error(
@@ -1391,7 +1371,6 @@ export const build: BuildV2 = async ({
hasIsr404Page,
hasIsr500Page,
variantsManifest,
experimentalPPRRoutes,
});
}
@@ -1904,18 +1883,17 @@ export const build: BuildV2 = async ({
);
}
dynamicRoutes = await getDynamicRoutes({
dynamicRoutes = await getDynamicRoutes(
entryPath,
entryDirectory,
dynamicPages,
isDev: false,
false,
routesManifest,
omittedRoutes: omittedPrerenderRoutes,
omittedPrerenderRoutes,
canUsePreviewMode,
bypassToken: prerenderManifest.bypassToken || '',
isServerMode,
experimentalPPRRoutes,
}).then(arr =>
prerenderManifest.bypassToken || '',
isServerMode
).then(arr =>
localizeDynamicRoutes(
arr,
dynamicPrefix,
@@ -1934,18 +1912,17 @@ export const build: BuildV2 = async ({
// we need to include the prerenderManifest.omittedRoutes here
// for the page to be able to be matched in the lambda for preview mode
const completeDynamicRoutes = await getDynamicRoutes({
const completeDynamicRoutes = await getDynamicRoutes(
entryPath,
entryDirectory,
dynamicPages,
isDev: false,
false,
routesManifest,
omittedRoutes: undefined,
undefined,
canUsePreviewMode,
bypassToken: prerenderManifest.bypassToken || '',
isServerMode,
experimentalPPRRoutes,
}).then(arr =>
prerenderManifest.bypassToken || '',
isServerMode
).then(arr =>
arr.map(route => {
route.src = route.src.replace('^', `^${dynamicPrefix}`);
return route;
@@ -2142,33 +2119,22 @@ export const build: BuildV2 = async ({
appPathRoutesManifest,
isSharedLambdas,
canUsePreviewMode,
omittedPrerenderRoutes,
});
await Promise.all(
Object.keys(prerenderManifest.staticRoutes).map(route =>
prerenderRoute(route, {})
)
Object.keys(prerenderManifest.staticRoutes).forEach(route =>
prerenderRoute(route, { isBlocking: false, isFallback: false })
);
await Promise.all(
Object.keys(prerenderManifest.fallbackRoutes).map(route =>
prerenderRoute(route, { isFallback: true })
)
Object.keys(prerenderManifest.fallbackRoutes).forEach(route =>
prerenderRoute(route, { isBlocking: false, isFallback: true })
);
await Promise.all(
Object.keys(prerenderManifest.blockingFallbackRoutes).map(route =>
prerenderRoute(route, { isBlocking: true })
)
Object.keys(prerenderManifest.blockingFallbackRoutes).forEach(route =>
prerenderRoute(route, { isBlocking: true, isFallback: false })
);
if (static404Page && canUsePreviewMode) {
await Promise.all(
Array.from(omittedPrerenderRoutes).map(route =>
prerenderRoute(route, { isOmitted: true })
)
);
omittedPrerenderRoutes.forEach(route => {
prerenderRoute(route, { isOmitted: true });
});
}
// We still need to use lazyRoutes if the dataRoutes field

View File

@@ -47,12 +47,11 @@ import {
UnwrapPromise,
getOperationType,
FunctionsConfigManifestV1,
VariantsManifestLegacy,
VariantsManifest,
RSC_CONTENT_TYPE,
RSC_PREFETCH_SUFFIX,
normalizePrefetches,
CreateLambdaFromPseudoLayersOptions,
getPostponeResumePathname,
} from './utils';
import {
nodeFileTrace,
@@ -71,30 +70,6 @@ const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
const EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION = 'v12.2.0';
const CORRECTED_MANIFESTS_VERSION = 'v12.2.0';
// Ideally this should be in a Next.js manifest so we can change it in
// the future but this also allows us to improve existing versions
const PRELOAD_CHUNKS = {
APP_ROUTER_PAGES: [
'.next/server/webpack-runtime.js',
'next/dist/client/components/action-async-storage.external.js',
'next/dist/client/components/request-async-storage.external.js',
'next/dist/client/components/static-generation-async-storage.external.js',
'next/dist/compiled/next-server/app-page.runtime.prod.js',
],
APP_ROUTER_HANDLER: [
'.next/server/webpack-runtime.js',
'next/dist/compiled/next-server/app-route.runtime.prod.js',
],
PAGES_ROUTER_PAGES: [
'.next/server/webpack-runtime.js',
'next/dist/compiled/next-server/pages.runtime.prod.js',
],
PAGES_ROUTER_API: [
'.next/server/webpack-api-runtime.js',
'next/dist/compiled/next-server/pages-api.runtime.prod.js',
],
};
// related PR: https://github.com/vercel/next.js/pull/52997
// and https://github.com/vercel/next.js/pull/56318
const BUNDLED_SERVER_NEXT_VERSION = 'v13.5.4';
@@ -143,7 +118,6 @@ export async function serverBuild({
lambdaCompressedByteLimit,
requiredServerFilesManifest,
variantsManifest,
experimentalPPRRoutes,
}: {
appPathRoutesManifest?: Record<string, string>;
dynamicPages: string[];
@@ -153,7 +127,7 @@ export async function serverBuild({
pagesDir: string;
baseDir: string;
canUsePreviewMode: boolean;
omittedPrerenderRoutes: ReadonlySet<string>;
omittedPrerenderRoutes: Set<string>;
localePrefixed404: boolean;
staticPages: { [key: string]: FileFsRef };
lambdaAppPaths: { [key: string]: FileFsRef };
@@ -184,15 +158,10 @@ export async function serverBuild({
imagesManifest?: NextImagesManifest;
prerenderManifest: NextPrerenderedRoutes;
requiredServerFilesManifest: NextRequiredServerFilesManifest;
variantsManifest: VariantsManifestLegacy | null;
experimentalPPRRoutes: ReadonlySet<string>;
variantsManifest: VariantsManifest | null;
}): Promise<BuildResult> {
lambdaPages = Object.assign({}, lambdaPages, lambdaAppPaths);
const experimentalAllowBundling = Boolean(
process.env.NEXT_EXPERIMENTAL_FUNCTION_BUNDLING
);
const lambdas: { [key: string]: Lambda } = {};
const prerenders: { [key: string]: Prerender } = {};
const lambdaPageKeys = Object.keys(lambdaPages);
@@ -273,11 +242,10 @@ export async function serverBuild({
nextVersion,
CORRECT_MIDDLEWARE_ORDER_VERSION
);
// experimental bundling prevents filtering manifests
// as we don't know what to filter by at this stage
const isCorrectManifests =
!experimentalAllowBundling &&
semver.gte(nextVersion, CORRECTED_MANIFESTS_VERSION);
const isCorrectManifests = semver.gte(
nextVersion,
CORRECTED_MANIFESTS_VERSION
);
let hasStatic500 = !!staticPages[path.posix.join(entryDirectory, '500')];
@@ -356,7 +324,19 @@ export async function serverBuild({
internalPages.push('404.js');
}
const prerenderRoutes: ReadonlySet<string> = new Set<string>([
const experimentalPPRRoutes = new Set<string>();
for (const [route, { experimentalPPR }] of [
...Object.entries(prerenderManifest.staticRoutes),
...Object.entries(prerenderManifest.blockingFallbackRoutes),
...Object.entries(prerenderManifest.fallbackRoutes),
]) {
if (!experimentalPPR) continue;
experimentalPPRRoutes.add(route);
}
const prerenderRoutes = new Set<string>([
...(canUsePreviewMode ? omittedPrerenderRoutes : []),
...Object.keys(prerenderManifest.blockingFallbackRoutes),
...Object.keys(prerenderManifest.fallbackRoutes),
@@ -764,7 +744,7 @@ export async function serverBuild({
.filter(Boolean) as string[];
let traceResult: NodeFileTraceResult | undefined;
let parentFilesMap: ReadonlyMap<string, Set<string>> | undefined;
let parentFilesMap: Map<string, Set<string>> | undefined;
if (pathsToTrace.length > 0) {
traceResult = await nodeFileTrace(pathsToTrace, {
@@ -879,7 +859,6 @@ export async function serverBuild({
const pageExtensions = requiredServerFilesManifest.config?.pageExtensions;
const pageLambdaGroups = await getPageLambdaGroups({
experimentalAllowBundling,
entryPath: projectDir,
config,
functionsConfigManifest,
@@ -901,7 +880,6 @@ export async function serverBuild({
}
const appRouterLambdaGroups = await getPageLambdaGroups({
experimentalAllowBundling,
entryPath: projectDir,
config,
functionsConfigManifest,
@@ -920,7 +898,6 @@ export async function serverBuild({
});
const appRouteHandlersLambdaGroups = await getPageLambdaGroups({
experimentalAllowBundling,
entryPath: projectDir,
config,
functionsConfigManifest,
@@ -1024,12 +1001,54 @@ export async function serverBuild({
for (const group of combinedGroups) {
const groupPageFiles: { [key: string]: PseudoFile } = {};
const filesCounter = new Map<string, number>();
const filesMap = new Map<string, string>();
for (const page of [...group.pages, ...internalPages]) {
const pageFileName = path.normalize(
path.relative(baseDir, lambdaPages[page].fsPath)
);
groupPageFiles[pageFileName] = compressedPages[page];
const traceFileRef = getBuildTraceFile(getOriginalPagePath(page));
if (traceFileRef && !internalPages.includes(page)) {
const traceFile = await fs.readFile(traceFileRef.fsPath, 'utf8');
const { files } = JSON.parse(traceFile) as { files: string[] };
const pagePath = path.join(
appDir && Boolean(lambdaAppPaths[page]) ? appDir : pagesDir,
getOriginalPagePath(page)
);
const pageDir = path.dirname(pagePath);
filesCounter.set(pagePath, 1);
let relPath0 = `./${path.relative(path.resolve(baseDir), pagePath)}`;
if (relPath0.startsWith('./apps/vercel-site/')) {
relPath0 = relPath0.replace('./apps/vercel-site/', './');
}
filesMap.set(pagePath, relPath0);
// filesMap.set(pagePath, pagePath);
files.forEach((file: string) => {
const absolutePath = path.join(pageDir, file);
const count = filesCounter.get(absolutePath) || 0;
filesCounter.set(absolutePath, count + 1);
let relPath = path.relative(
path.resolve(baseDir),
path.resolve(absolutePath)
);
if (!relPath.startsWith('..')) {
relPath = './' + relPath;
}
if (relPath.startsWith('./apps/vercel-site/')) {
relPath = relPath.replace('./apps/vercel-site/', './');
}
filesMap.set(absolutePath, relPath);
// filesMap.set(absolutePath, absolutePath);
});
}
}
const updatedManifestFiles: { [name: string]: FileBlob } = {};
@@ -1091,58 +1110,41 @@ export async function serverBuild({
}
}
let launcherData = group.isAppRouter ? appLauncher : launcher;
let preloadChunks: string[] = [];
const commonFiles = new Set<string>();
const restFiles = new Set<string>();
if (process.env.VERCEL_NEXT_PRELOAD_COMMON === '1') {
const nextPackageDir = path.dirname(
resolveFrom(projectDir, 'next/package.json')
);
if (group.isPages) {
preloadChunks = PRELOAD_CHUNKS.PAGES_ROUTER_PAGES;
} else if (group.isApiLambda) {
preloadChunks = PRELOAD_CHUNKS.PAGES_ROUTER_API;
} else if (group.isAppRouter && !group.isAppRouteHandler) {
preloadChunks = PRELOAD_CHUNKS.APP_ROUTER_PAGES;
} else if (group.isAppRouteHandler) {
preloadChunks = PRELOAD_CHUNKS.APP_ROUTER_HANDLER;
}
const normalizedPreloadChunks: string[] = [];
for (const preloadChunk of preloadChunks) {
const absoluteChunk = preloadChunk.startsWith('.next')
? path.join(projectDir, preloadChunk)
: path.join(nextPackageDir, '..', preloadChunk);
// ensure the chunks are actually in this layer
if (
group.pseudoLayer[
path.join('.', path.relative(baseDir, absoluteChunk))
]
) {
normalizedPreloadChunks.push(
// relative files need to be prefixed with ./ for require
preloadChunk.startsWith('.next')
? `./${preloadChunk}`
: preloadChunk
);
for (const [file, count] of filesCounter) {
const relative = filesMap.get(file)!;
if (
file.endsWith('.js') ||
file.endsWith('.mjs') ||
file.endsWith('.json') ||
file.endsWith('.cjs')
) {
if (count === group.pages.length) {
commonFiles.add(relative);
} else {
restFiles.add(relative);
}
}
if (normalizedPreloadChunks.length > 0) {
launcherData = launcherData.replace(
'// @preserve next-server-preload-target',
normalizedPreloadChunks
.map(name => `require('${name}');`)
.join('\n')
);
}
}
let launcherContent = group.isAppRouter ? appLauncher : launcher;
launcherContent = launcherContent.replace(
'const commonChunks = __COMMON_CHUNKS__',
`const commonChunks = ${JSON.stringify(
Array.from(commonFiles.values())
)};`
);
launcherContent = launcherContent.replace(
'const restChunks = __REST_CHUNKS__',
`const restChunks = ${JSON.stringify(Array.from(restFiles.values()))};`
);
const launcherFiles: { [name: string]: FileFsRef | FileBlob } = {
[path.join(path.relative(baseDir, projectDir), '___next_launcher.cjs')]:
new FileBlob({ data: launcherData }),
new FileBlob({ data: launcherContent }),
};
const operationType = getOperationType({ group, prerenderManifest });
@@ -1162,7 +1164,6 @@ export async function serverBuild({
maxDuration: group.maxDuration,
isStreaming: group.isStreaming,
nextVersion,
experimentalAllowBundling,
};
const lambda = await createLambdaFromPseudoLayers(options);
@@ -1176,7 +1177,7 @@ export async function serverBuild({
// lambda for the page for revalidation.
let revalidate: NodejsLambda | undefined;
if (isPPR) {
if (!options.isStreaming) {
if (isPPR && !options.isStreaming) {
throw new Error("Invariant: PPR lambda isn't streaming");
}
@@ -1188,28 +1189,24 @@ export async function serverBuild({
});
}
for (const pageFilename of group.pages) {
// This is the name of the page, where the root is `index`.
const pageName = pageFilename.replace(/\.js$/, '');
// This is the name of the page prefixed with a `/`, where the root is
// `/index`.
const pagePath = path.posix.join('/', pageName);
// This is the routable pathname for the page, where the root is `/`.
const pagePathname = pagePath === '/index' ? '/' : pagePath;
let isPrerender = prerenderRoutes.has(pagePathname);
for (const page of group.pages) {
const pageNoExt = page.replace(/\.js$/, '');
let isPrerender = prerenderRoutes.has(
path.join('/', pageNoExt === 'index' ? '' : pageNoExt)
);
if (!isPrerender && routesManifest?.i18n) {
isPrerender = routesManifest.i18n.locales.some(locale => {
return prerenderRoutes.has(
path.join('/', locale, pageName === 'index' ? '' : pageName)
path.join('/', locale, pageNoExt === 'index' ? '' : pageNoExt)
);
});
}
let outputName = path.posix.join(entryDirectory, pageName);
let outputName = normalizeIndexOutput(
path.posix.join(entryDirectory, pageNoExt),
true
);
// If this is a PPR page, then we should prefix the output name.
if (isPPR) {
@@ -1217,56 +1214,24 @@ export async function serverBuild({
throw new Error("Invariant: PPR lambda isn't set");
}
// Assign the revalidate lambda to the output name. That's used to
// perform the initial static shell render.
// Get the get the base path prefixed route, without the index
// normalization.
outputName = path.posix.join(entryDirectory, pageNoExt);
lambdas[outputName] = revalidate;
// If this isn't an omitted page, then we should add the link from the
// page to the postpone resume lambda.
if (!omittedPrerenderRoutes.has(pagePathname)) {
const key = getPostponeResumePathname(entryDirectory, pageName);
lambdas[key] = lambda;
// We want to add the `experimentalStreamingLambdaPath` to this
// output.
experimentalStreamingLambdaPaths.set(outputName, key);
} else {
// As this is an omitted page, we should generate the experimental
// partial prerendering resume route for each of these routes that
// support partial prerendering. This is because the routes that
// haven't been omitted will have rewrite rules in place to rewrite
// the original request `/blog/my-slug` to the dynamic path
// `/blog/[slug]?nxtPslug=my-slug`.
for (const [
routePathname,
{ srcRoute, experimentalPPR },
] of Object.entries(prerenderManifest.staticRoutes)) {
// If the srcRoute doesn't match or this doesn't support
// experimental partial prerendering, then we can skip this route.
if (srcRoute !== pagePathname || !experimentalPPR) continue;
// If this route is the same as the page route, then we can skip
// it, because we've already added the lambda to the output.
if (routePathname === pagePathname) continue;
const key = getPostponeResumePathname(
entryDirectory,
routePathname
);
lambdas[key] = lambda;
outputName = path.posix.join(entryDirectory, routePathname);
experimentalStreamingLambdaPaths.set(outputName, key);
}
}
const pprOutputName = path.posix.join(
entryDirectory,
'/_next/postponed/resume',
pageNoExt
);
lambdas[pprOutputName] = lambda;
// We want to add the `experimentalStreamingLambdaPath` to this
// output.
experimentalStreamingLambdaPaths.set(outputName, pprOutputName);
continue;
}
if (!group.isAppRouter && !group.isAppRouteHandler) {
outputName = normalizeIndexOutput(outputName, true);
}
// we add locale prefixed outputs for SSR pages,
// this is handled in onPrerenderRoute for SSG pages
if (
@@ -1274,7 +1239,7 @@ export async function serverBuild({
!isPrerender &&
!group.isAppRouter &&
(!isCorrectLocaleAPIRoutes ||
!(pageName === 'api' || pageName.startsWith('api/')))
!(pageNoExt === 'api' || pageNoExt.startsWith('api/')))
) {
for (const locale of i18n.locales) {
lambdas[
@@ -1282,7 +1247,7 @@ export async function serverBuild({
path.posix.join(
entryDirectory,
locale,
pageName === 'index' ? '' : pageName
pageNoExt === 'index' ? '' : pageNoExt
),
true
)
@@ -1315,7 +1280,6 @@ export async function serverBuild({
hasPages404: routesManifest.pages404,
isCorrectNotFoundRoutes,
isEmptyAllowQueryForPrendered,
omittedPrerenderRoutes,
});
await Promise.all(
@@ -1323,13 +1287,11 @@ export async function serverBuild({
prerenderRoute(route, {})
)
);
await Promise.all(
Object.keys(prerenderManifest.fallbackRoutes).map(route =>
prerenderRoute(route, { isFallback: true })
)
);
await Promise.all(
Object.keys(prerenderManifest.blockingFallbackRoutes).map(route =>
prerenderRoute(route, { isBlocking: true })
@@ -1338,9 +1300,9 @@ export async function serverBuild({
if (static404Page && canUsePreviewMode) {
await Promise.all(
Array.from(omittedPrerenderRoutes).map(route =>
prerenderRoute(route, { isOmitted: true })
)
[...omittedPrerenderRoutes].map(route => {
return prerenderRoute(route, { isOmitted: true });
})
);
}
@@ -1349,7 +1311,6 @@ export async function serverBuild({
if (routesManifest?.i18n) {
route = normalizeLocalePath(route, routesManifest.i18n.locales).pathname;
}
delete lambdas[
normalizeIndexOutput(
path.posix.join('./', entryDirectory, route === '/' ? '/index' : route),
@@ -1373,19 +1334,19 @@ export async function serverBuild({
middleware.staticRoutes.length > 0 &&
semver.gte(nextVersion, NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION);
const dynamicRoutes = await getDynamicRoutes({
const dynamicRoutes = await getDynamicRoutes(
entryPath,
entryDirectory,
dynamicPages,
isDev: false,
false,
routesManifest,
omittedRoutes: omittedPrerenderRoutes,
omittedPrerenderRoutes,
canUsePreviewMode,
bypassToken: prerenderManifest.bypassToken || '',
isServerMode: true,
dynamicMiddlewareRouteMap: middleware.dynamicRouteMap,
experimentalPPRRoutes,
}).then(arr =>
prerenderManifest.bypassToken || '',
true,
middleware.dynamicRouteMap,
experimental.ppr
).then(arr =>
localizeDynamicRoutes(
arr,
dynamicPrefix,
@@ -1556,10 +1517,9 @@ export async function serverBuild({
continue;
}
const pathname = path.posix.join(
'./',
entryDirectory,
route === '/' ? '/index' : route
const pathname = normalizeIndexOutput(
path.posix.join('./', entryDirectory, route === '/' ? '/index' : route),
true
);
if (lambdas[pathname]) {
@@ -1591,46 +1551,6 @@ export async function serverBuild({
throw new Error("Invariant: cannot use PPR without 'rsc.prefetchHeader'");
}
// If we're using the Experimental Partial Prerendering, we should ensure that
// all the routes that support it (and are listed) have configured lambdas.
// This only applies to routes that do not have fallbacks enabled (these are
// routes that have `dynamicParams = false` defined.
if (experimental.ppr) {
for (const { srcRoute, dataRoute, experimentalPPR } of Object.values(
prerenderManifest.staticRoutes
)) {
// Only apply this to the routes that support experimental PPR and
// that also have their `dataRoute` and `srcRoute` defined.
if (!experimentalPPR || !dataRoute || !srcRoute) continue;
// If the srcRoute is not omitted, then we don't need to do anything. This
// is the indicator that a route should only have it's prerender defined
// and not a lambda.
if (!omittedPrerenderRoutes.has(srcRoute)) continue;
// The lambda paths have their leading `/` stripped.
const srcPathname = srcRoute.substring(1);
const dataPathname = dataRoute.substring(1);
// If we already have an associated lambda for the `.rsc` route, then
// we can skip this.
const dataPathnameExists = dataPathname in lambdas;
if (dataPathnameExists) continue;
// We require that the source route has a lambda associated with it. If
// it doesn't this is an error.
const srcPathnameExists = srcPathname in lambdas;
if (!srcPathnameExists) {
throw new Error(
`Invariant: Expected to have a lambda for the source route: ${srcPathname}`
);
}
// Associate the data pathname with the source pathname's lambda.
lambdas[dataPathname] = lambdas[srcPathname];
}
}
return {
wildcard: wildcardConfig,
images: getImagesConfig(imagesManifest),

View File

@@ -1,4 +1,4 @@
import { IncomingMessage, ServerResponse } from 'http';
import type { IncomingMessage, ServerResponse } from 'http';
// The Next.js builder can emit the project in a subdirectory depending on how
// many folder levels of `node_modules` are traced. To ensure `process.cwd()`
// returns the proper path, we change the directory to the folder with the
@@ -23,8 +23,6 @@ if (process.env.NODE_ENV !== 'production' && region !== 'dev1') {
// eslint-disable-next-line
const NextServer = require('__NEXT_SERVER_PATH__').default;
// @preserve next-server-preload-target
// __NEXT_CONFIG__ value is injected
declare const __NEXT_CONFIG__: any;
const conf = __NEXT_CONFIG__;
@@ -55,6 +53,56 @@ const serve =
// The default handler method should be exported as a function on the module.
module.exports = serve(nextServer.getRequestHandler());
declare const __COMMON_CHUNKS__: string[];
const commonChunks = __COMMON_CHUNKS__;
module.exports.preload = async () => {
// const start = performance.now();
// const failed: string[] = [];
// for (const chunk of commonChunks) {
// try {
// require(chunk);
// } catch (e: any) {
// failed.push(e.message);
// }
// }
// performance.measure('vc:common-chunks', {
// detail: 'Next.js common chunks',
// end: performance.now(),
// start,
// });
// if (failed.length > 0) {
// throw new Error(
// `Failed to preload chunks: ${failed.length}: ${failed.join(', ')}`
// );
// }
};
declare const __REST_CHUNKS__: string[];
const restChunks = __REST_CHUNKS__;
module.exports.postload = async () => {
// @ts-ignore
const precompile = globalThis[Symbol.for('@vercel/node-module-precompile')];
const files = [...commonChunks, ...restChunks];
const failed: string[] = [];
const success: string[] = [];
for (const file of files) {
try {
precompile(require.resolve(file));
success.push(file);
} catch (error: any) {
failed.push(error.message);
}
}
console.log('Precompiled files:', success.join(', \n'));
if (failed.length > 0) {
throw new Error(
`Failed to postload chunks: ${failed.length}: ${failed.join(',\n')}`
);
}
};
// If available, add `getRequestHandlerWithMetadata` to the export if it's
// required by the configuration.
if (

View File

@@ -304,31 +304,19 @@ export async function getRoutesManifest(
return routesManifest;
}
export async function getDynamicRoutes({
entryPath,
entryDirectory,
dynamicPages,
isDev,
routesManifest,
omittedRoutes,
canUsePreviewMode,
bypassToken,
isServerMode,
dynamicMiddlewareRouteMap,
experimentalPPRRoutes,
}: {
entryPath: string;
entryDirectory: string;
dynamicPages: string[];
isDev?: boolean;
routesManifest?: RoutesManifest;
omittedRoutes?: ReadonlySet<string>;
canUsePreviewMode?: boolean;
bypassToken?: string;
isServerMode?: boolean;
dynamicMiddlewareRouteMap?: ReadonlyMap<string, RouteWithSrc>;
experimentalPPRRoutes: ReadonlySet<string>;
}): Promise<RouteWithSrc[]> {
export async function getDynamicRoutes(
entryPath: string,
entryDirectory: string,
dynamicPages: string[],
isDev?: boolean,
routesManifest?: RoutesManifest,
omittedRoutes?: Set<string>,
canUsePreviewMode?: boolean,
bypassToken?: string,
isServerMode?: boolean,
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>,
experimentalPPR?: boolean
): Promise<RouteWithSrc[]> {
if (routesManifest) {
switch (routesManifest.version) {
case 1:
@@ -401,7 +389,7 @@ export async function getDynamicRoutes({
];
}
if (experimentalPPRRoutes.has(page)) {
if (experimentalPPR) {
let dest = route.dest?.replace(/($|\?)/, '.prefetch.rsc$1');
if (page === '/' || page === '/index') {
@@ -454,9 +442,7 @@ export async function getDynamicRoutes({
let getRouteRegex: ((pageName: string) => { re: RegExp }) | undefined =
undefined;
let getSortedRoutes:
| ((normalizedPages: ReadonlyArray<string>) => string[])
| undefined;
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
try {
const resolved = require_.resolve('next-server/dist/lib/router/utils', {
@@ -659,10 +645,10 @@ export function filterStaticPages(
}
export function getFilesMapFromReasons(
fileList: ReadonlySet<string>,
fileList: Set<string>,
reasons: NodeFileTraceReasons,
ignoreFn?: (file: string, parent?: string) => boolean
): ReadonlyMap<string, Set<string>> {
) {
// this uses the reasons tree to collect files specific to a
// certain parent allowing us to not have to trace each parent
// separately
@@ -818,7 +804,6 @@ export interface CreateLambdaFromPseudoLayersOptions
layers: PseudoLayer[];
isStreaming?: boolean;
nextVersion?: string;
experimentalAllowBundling?: boolean;
}
// measured with 1, 2, 5, 10, and `os.cpus().length || 5`
@@ -830,7 +815,6 @@ export async function createLambdaFromPseudoLayers({
layers,
isStreaming,
nextVersion,
experimentalAllowBundling,
...lambdaOptions
}: CreateLambdaFromPseudoLayersOptions) {
await createLambdaSema.acquire();
@@ -878,7 +862,6 @@ export async function createLambdaFromPseudoLayers({
slug: 'nextjs',
version: nextVersion,
},
experimentalAllowBundling,
});
}
@@ -931,10 +914,6 @@ export type NextPrerenderedRoutes = {
};
};
/**
* Routes that have their fallback behavior is disabled. All routes would've
* been provided in the top-level `routes` key (`staticRoutes`).
*/
omittedRoutes: {
[route: string]: {
routeRegex: string;
@@ -1314,6 +1293,8 @@ export async function getPrerenderManifest(
prefetchDataRouteRegex,
};
} else {
// Fallback behavior is disabled, all routes would've been provided
// in the top-level `routes` key (`staticRoutes`).
ret.omittedRoutes[lazyRoute] = {
experimentalBypassFor,
experimentalPPR,
@@ -1380,7 +1361,7 @@ async function getSourceFilePathFromPage({
}: {
workPath: string;
page: string;
pageExtensions?: ReadonlyArray<string>;
pageExtensions?: string[];
}) {
const usesSrcDir = await usesSrcDirectory(workPath);
const extensionsToTry = pageExtensions || ['js', 'jsx', 'ts', 'tsx'];
@@ -1521,14 +1502,13 @@ export async function getPageLambdaGroups({
internalPages,
pageExtensions,
inversedAppPathManifest,
experimentalAllowBundling,
}: {
entryPath: string;
config: Config;
functionsConfigManifest?: FunctionsConfigManifestV1;
pages: ReadonlyArray<string>;
prerenderRoutes: ReadonlySet<string>;
experimentalPPRRoutes: ReadonlySet<string> | undefined;
pages: string[];
prerenderRoutes: Set<string>;
experimentalPPRRoutes: Set<string> | undefined;
pageTraces: {
[page: string]: {
[key: string]: FileFsRef;
@@ -1541,10 +1521,9 @@ export async function getPageLambdaGroups({
initialPseudoLayer: PseudoLayerResult;
initialPseudoLayerUncompressed: number;
lambdaCompressedByteLimit: number;
internalPages: ReadonlyArray<string>;
pageExtensions?: ReadonlyArray<string>;
internalPages: string[];
pageExtensions?: string[];
inversedAppPathManifest?: Record<string, string>;
experimentalAllowBundling?: boolean;
}) {
const groups: Array<LambdaGroup> = [];
@@ -1584,46 +1563,42 @@ export async function getPageLambdaGroups({
opts = { ...vercelConfigOpts, ...opts };
}
let matchingGroup = experimentalAllowBundling
? undefined
: groups.find(group => {
const matches =
group.maxDuration === opts.maxDuration &&
group.memory === opts.memory &&
group.isPrerenders === isPrerenderRoute &&
group.isExperimentalPPR === isExperimentalPPR;
let matchingGroup = groups.find(group => {
const matches =
group.maxDuration === opts.maxDuration &&
group.memory === opts.memory &&
group.isPrerenders === isPrerenderRoute &&
group.isExperimentalPPR === isExperimentalPPR;
if (matches) {
let newTracedFilesSize = group.pseudoLayerBytes;
let newTracedFilesUncompressedSize =
group.pseudoLayerUncompressedBytes;
if (matches) {
let newTracedFilesSize = group.pseudoLayerBytes;
let newTracedFilesUncompressedSize = group.pseudoLayerUncompressedBytes;
for (const newPage of newPages) {
Object.keys(pageTraces[newPage] || {}).map(file => {
if (!group.pseudoLayer[file]) {
const item = tracedPseudoLayer[file] as PseudoFile;
for (const newPage of newPages) {
Object.keys(pageTraces[newPage] || {}).map(file => {
if (!group.pseudoLayer[file]) {
const item = tracedPseudoLayer[file] as PseudoFile;
newTracedFilesSize += item.compBuffer?.byteLength || 0;
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
}
});
newTracedFilesSize +=
compressedPages[newPage].compBuffer.byteLength;
newTracedFilesUncompressedSize +=
compressedPages[newPage].uncompressedSize;
newTracedFilesSize += item.compBuffer?.byteLength || 0;
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
}
});
newTracedFilesSize += compressedPages[newPage].compBuffer.byteLength;
newTracedFilesUncompressedSize +=
compressedPages[newPage].uncompressedSize;
}
const underUncompressedLimit =
newTracedFilesUncompressedSize <
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
const underCompressedLimit =
newTracedFilesSize <
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
const underUncompressedLimit =
newTracedFilesUncompressedSize <
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
const underCompressedLimit =
newTracedFilesSize <
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
return underUncompressedLimit && underCompressedLimit;
}
return false;
});
return underUncompressedLimit && underCompressedLimit;
}
return false;
});
if (matchingGroup) {
matchingGroup.pages.push(page);
@@ -1931,13 +1906,12 @@ type OnPrerenderRouteArgs = {
isServerMode: boolean;
canUsePreviewMode: boolean;
lambdas: { [key: string]: Lambda };
experimentalStreamingLambdaPaths: ReadonlyMap<string, string> | undefined;
experimentalStreamingLambdaPaths: Map<string, string> | undefined;
prerenders: { [key: string]: Prerender | File };
pageLambdaMap: { [key: string]: string };
routesManifest?: RoutesManifest;
isCorrectNotFoundRoutes?: boolean;
isEmptyAllowQueryForPrendered?: boolean;
omittedPrerenderRoutes: ReadonlySet<string>;
};
let prerenderGroup = 1;
@@ -1974,7 +1948,6 @@ export const onPrerenderRoute =
routesManifest,
isCorrectNotFoundRoutes,
isEmptyAllowQueryForPrendered,
omittedPrerenderRoutes,
} = prerenderRouteArgs;
if (isBlocking && isFallback) {
@@ -2080,11 +2053,6 @@ export const onPrerenderRoute =
let isAppPathRoute = false;
// experimentalPPR signals app path route
if (appDir && experimentalPPR) {
isAppPathRoute = true;
}
// TODO: leverage manifest to determine app paths more accurately
if (appDir && srcRoute && (!dataRoute || dataRoute?.endsWith('.rsc'))) {
isAppPathRoute = true;
@@ -2216,6 +2184,7 @@ export const onPrerenderRoute =
if (routeKey !== '/index' && routeKey.endsWith('/index')) {
routeKey = `${routeKey}/index`;
routeFileNoExt = routeKey;
origRouteFileNoExt = routeKey;
}
}
@@ -2286,20 +2255,15 @@ export const onPrerenderRoute =
const lambdaId = pageLambdaMap[outputSrcPathPage];
lambda = lambdas[lambdaId];
} else {
let outputSrcPathPage =
const outputSrcPathPage = normalizeIndexOutput(
srcRoute == null
? outputPathPageOrig
: path.posix.join(
entryDirectory,
srcRoute === '/' ? '/index' : srcRoute
);
if (!isAppPathRoute) {
outputSrcPathPage = normalizeIndexOutput(
outputSrcPathPage,
isServerMode
);
}
),
isServerMode
);
lambda = lambdas[outputSrcPathPage];
}
@@ -2399,31 +2363,25 @@ export const onPrerenderRoute =
sourcePath = srcRoute;
}
let experimentalStreamingLambdaPath: string | undefined;
if (experimentalPPR) {
if (!experimentalStreamingLambdaPaths) {
throw new Error(
"Invariant: experimentalStreamingLambdaPaths doesn't exist"
);
// The `experimentalStreamingLambdaPaths` stores the page without the
// leading `/` and with the `/` rewritten to be `index`. We should
// normalize the key so that it matches that key in the map.
let key = srcRoute || routeKey;
if (key === '/') {
key = 'index';
} else {
if (!key.startsWith('/')) {
throw new Error("Invariant: key doesn't start with /");
}
// If a source route exists, and it's not listed as an omitted route,
// then use the src route as the basis for the experimental streaming
// lambda path. If the route doesn't have a source route or it's not
// omitted, then use the more specific `routeKey` as the basis.
if (srcRoute && !omittedPrerenderRoutes.has(srcRoute)) {
experimentalStreamingLambdaPath =
experimentalStreamingLambdaPaths.get(
pathnameToOutputName(entryDirectory, srcRoute)
);
} else {
experimentalStreamingLambdaPath =
experimentalStreamingLambdaPaths.get(
pathnameToOutputName(entryDirectory, routeKey)
);
}
key = key.substring(1);
}
key = path.posix.join(entryDirectory, key);
const experimentalStreamingLambdaPath =
experimentalStreamingLambdaPaths?.get(key);
prerenders[outputPathPage] = new Prerender({
expiration: initialRevalidate,
lambda,
@@ -2506,18 +2464,11 @@ export const onPrerenderRoute =
routesManifest,
locale
);
let localeOutputPathPage = path.posix.join(
entryDirectory,
localeRouteFileNoExt
const localeOutputPathPage = normalizeIndexOutput(
path.posix.join(entryDirectory, localeRouteFileNoExt),
isServerMode
);
if (!isAppPathRoute) {
localeOutputPathPage = normalizeIndexOutput(
localeOutputPathPage,
isServerMode
);
}
const origPrerenderPage = prerenders[outputPathPage];
prerenders[localeOutputPathPage] = {
...origPrerenderPage,
@@ -2626,10 +2577,6 @@ export async function getStaticFiles(
};
}
/**
* Strips the trailing `/index` from the output name if it's not the root if
* the server mode is enabled.
*/
export function normalizeIndexOutput(
outputName: string,
isServerMode: boolean
@@ -2650,19 +2597,6 @@ export function getNextServerPath(nextVersion: string) {
: 'next/dist/next-server/server';
}
export function pathnameToOutputName(entryDirectory: string, pathname: string) {
if (pathname === '/') pathname = '/index';
return path.posix.join(entryDirectory, pathname);
}
export function getPostponeResumePathname(
entryDirectory: string,
pathname: string
): string {
if (pathname === '/') pathname = '/index';
return path.posix.join(entryDirectory, '_next/postponed/resume', pathname);
}
// update to leverage
export function updateRouteSrc(
route: Route,
@@ -2871,7 +2805,7 @@ export async function getMiddlewareBundle({
appPathRoutesManifest: Record<string, string>;
}): Promise<{
staticRoutes: Route[];
dynamicRouteMap: ReadonlyMap<string, RouteWithSrc>;
dynamicRouteMap: Map<string, RouteWithSrc>;
edgeFunctions: Record<string, EdgeFunction>;
}> {
const middlewareManifest = await getMiddlewareManifest(
@@ -3035,17 +2969,14 @@ export async function getMiddlewareBundle({
}
if (routesManifest?.basePath) {
const isAppPathRoute = !!appPathRoutesManifest[shortPath];
shortPath = path.posix.join(
'./',
routesManifest?.basePath,
shortPath.replace(/^\//, '')
shortPath = normalizeIndexOutput(
path.posix.join(
'./',
routesManifest?.basePath,
shortPath.replace(/^\//, '')
),
true
);
if (!isAppPathRoute) {
shortPath = normalizeIndexOutput(shortPath, true);
}
}
worker.edgeFunction.name = shortPath;
@@ -3307,8 +3238,7 @@ export function isApiPage(page: string | undefined) {
.match(/(serverless|server)\/pages\/api(\/|\.js$)/);
}
/** @deprecated */
export type VariantsManifestLegacy = Record<
export type VariantsManifest = Record<
string,
{
defaultValue?: unknown;
@@ -3319,7 +3249,7 @@ export type VariantsManifestLegacy = Record<
export async function getVariantsManifest(
entryPath: string,
outputDirectory: string
): Promise<null | VariantsManifestLegacy> {
): Promise<null | VariantsManifest> {
const pathVariantsManifest = path.join(
entryPath,
outputDirectory,
@@ -3333,7 +3263,7 @@ export async function getVariantsManifest(
if (!hasVariantsManifest) return null;
const variantsManifest: VariantsManifestLegacy = await fs.readJSON(
const variantsManifest: VariantsManifest = await fs.readJSON(
pathVariantsManifest
);
@@ -3355,7 +3285,7 @@ export async function getServerlessPages(params: {
glob('**/route.js', appDir),
glob('**/_not-found.js', appDir),
]).then(items => Object.assign(...items))
: Promise.resolve({} as Record<string, FileFsRef>),
: Promise.resolve({}),
getMiddlewareManifest(params.entryPath, params.outputDirectory),
]);

View File

@@ -22,7 +22,7 @@
"redirect": "manual"
},
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -46,7 +46,7 @@
"redirect": "manual"
},
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -70,7 +70,7 @@
"redirect": "manual"
},
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"

View File

@@ -1,7 +0,0 @@
export default function Page() {
return (
<>
<p>dynamic-index</p>
</>
)
}

View File

@@ -18,22 +18,7 @@
}
],
"probes": [
{
"path": "/dynamic-index/hello/index",
"status": 200,
"mustContain": "dynamic-index"
},
{
"path": "/dynamic-index/hello/index",
"status": 200,
"mustContain": ":",
"mustNotContain": "<html",
"headers": {
"RSC": 1,
"Next-Router-Prefetch": 1
}
},
{
{
"path": "/rewritten-to-dashboard",
"status": 200,
"mustContain": "html"

View File

@@ -1,18 +0,0 @@
import React, { Suspense } from 'react'
import { Dynamic } from '../../../components/dynamic'
export const dynamicParams = false;
const slugs = ['a', 'b', 'c'];
export function generateStaticParams() {
return slugs.map((slug) => ({ slug }));
}
export default function NoFallbackPage({ params: { slug } }) {
return (
<Suspense fallback={<Dynamic pathname={`/no-fallback/${slug}`} fallback />}>
<Dynamic pathname={`/no-fallback/${slug}`} />
</Suspense>
)
}

View File

@@ -19,23 +19,11 @@ const pages = [
{ pathname: '/no-suspense/nested/a', dynamic: true },
{ pathname: '/no-suspense/nested/b', dynamic: true },
{ pathname: '/no-suspense/nested/c', dynamic: true },
{ pathname: '/no-fallback/a', dynamic: true },
{ pathname: '/no-fallback/b', dynamic: true },
{ pathname: '/no-fallback/c', dynamic: true },
// TODO: uncomment when we've fixed the 404 case for force-dynamic pages
// { pathname: '/dynamic/force-dynamic', dynamic: 'force-dynamic' },
{ pathname: '/dynamic/force-static', dynamic: 'force-static' },
];
const cases = {
404: [
// For routes that do not support fallback (they had `dynamicParams` set to
// `false`), we shouldn't see any fallback behavior for routes not defined
// in `getStaticParams`.
{ pathname: '/no-fallback/non-existent' },
],
};
const ctx = {};
describe(`${__dirname.split(path.sep).pop()}`, () => {
@@ -61,14 +49,6 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
expect(html).toContain('</html>');
}
);
it.each(cases[404])(
'should return 404 for $pathname',
async ({ pathname }) => {
const res = await fetch(`${ctx.deploymentUrl}${pathname}`);
expect(res.status).toEqual(404);
}
);
});
describe('prefetch RSC payloads should return', () => {
@@ -108,16 +88,6 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
}
}
);
it.each(cases[404])(
'should return 404 for $pathname',
async ({ pathname }) => {
const res = await fetch(`${ctx.deploymentUrl}${pathname}`, {
headers: { RSC: 1, 'Next-Router-Prefetch': '1' },
});
expect(res.status).toEqual(404);
}
);
});
describe('dynamic RSC payloads should return', () => {
@@ -152,15 +122,5 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
expect(text).not.toContain(expected);
}
});
it.each(cases[404])(
'should return 404 for $pathname',
async ({ pathname }) => {
const res = await fetch(`${ctx.deploymentUrl}${pathname}`, {
headers: { RSC: 1 },
});
expect(res.status).toEqual(404);
}
);
});
});

View File

@@ -1,8 +1,8 @@
{
"dependencies": {
"next": "canary",
"react": "18.2.0",
"react-dom": "18.2.0"
"react": "experimental",
"react-dom": "experimental"
},
"ignoreNextjsUpdates": true
}

View File

@@ -1,7 +0,0 @@
export default function Page() {
return (
<>
<p>dynamic-index</p>
</>
)
}

View File

@@ -18,21 +18,6 @@
}
],
"probes": [
{
"path": "/dynamic-index/hello/index",
"status": 200,
"mustContain": "dynamic-index"
},
{
"path": "/dynamic-index/hello/index",
"status": 200,
"mustContain": ":",
"mustNotContain": "<html",
"headers": {
"RSC": 1,
"Next-Router-Prefetch": 1
}
},
{
"path": "/rewritten-to-dashboard",
"status": 200,
@@ -147,7 +132,7 @@
"status": 200,
"mustContain": "hello from /ssg",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
@@ -163,7 +148,7 @@
"path": "/ssg",
"status": 200,
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -175,7 +160,7 @@
"path": "/ssg",
"status": 200,
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch",
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url",
"content-type": "text/x-component"
},
"headers": {
@@ -210,14 +195,14 @@
"status": 200,
"mustContain": "hello from app/dashboard/deployments/[id]/settings",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
"path": "/dashboard/deployments/123/settings",
"status": 200,
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -230,14 +215,14 @@
"status": 200,
"mustContain": "catchall",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
"path": "/dashboard/deployments/catchall/something",
"status": 200,
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -250,7 +235,7 @@
"status": 200,
"mustContain": "hello from app/dashboard",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
@@ -270,7 +255,7 @@
},
"responseHeaders": {
"content-type": "text/x-component",
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{

View File

@@ -11,7 +11,7 @@
"status": 200,
"mustContain": "hello from app/dashboard",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
@@ -31,7 +31,7 @@
},
"responseHeaders": {
"content-type": "text/x-component",
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{

View File

@@ -47,14 +47,14 @@
"status": 200,
"mustContain": "hello from /ssg",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
"path": "/ssg/",
"status": 200,
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -87,14 +87,14 @@
"status": 200,
"mustContain": "hello from app/dashboard/deployments/[id]/settings",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
"path": "/dashboard/deployments/123/settings/",
"status": 200,
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -107,14 +107,14 @@
"status": 200,
"mustContain": "catchall",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
"path": "/dashboard/deployments/catchall/something/",
"status": 200,
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
},
"headers": {
"RSC": "1"
@@ -127,7 +127,7 @@
"status": 200,
"mustContain": "hello from app/dashboard",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{
@@ -147,7 +147,7 @@
},
"responseHeaders": {
"content-type": "text/x-component",
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{

View File

@@ -11,7 +11,7 @@
"status": 200,
"mustContain": "about",
"responseHeaders": {
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
}
},
{

View File

@@ -1,38 +1,5 @@
# @vercel/node
## 3.0.21
### Patch Changes
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
- @vercel/build-utils@7.7.1
## 3.0.20
### Patch Changes
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
## 3.0.19
### Patch Changes
- build: upgrade edge-runtime ([#11148](https://github.com/vercel/vercel/pull/11148))
- refactor: simplify content-length check ([#11150](https://github.com/vercel/vercel/pull/11150))
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
- @vercel/build-utils@7.7.0
## 3.0.18
### Patch Changes
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
- @vercel/build-utils@7.6.0
## 3.0.17
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/node",
"version": "3.0.21",
"version": "3.0.17",
"license": "Apache-2.0",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
@@ -20,18 +20,16 @@
"dist"
],
"dependencies": {
"@edge-runtime/node-utils": "2.3.0",
"@edge-runtime/primitives": "4.1.0",
"@edge-runtime/vm": "3.2.0",
"@edge-runtime/node-utils": "2.2.1",
"@edge-runtime/primitives": "4.0.5",
"@edge-runtime/vm": "3.1.7",
"@types/node": "14.18.33",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"@vercel/error-utils": "2.0.2",
"@vercel/nft": "0.26.4",
"@vercel/nft": "0.26.2",
"@vercel/static-config": "3.0.0",
"async-listen": "3.0.0",
"cjs-module-lexer": "1.2.3",
"edge-runtime": "2.5.9",
"es-module-lexer": "1.4.1",
"edge-runtime": "2.5.7",
"esbuild": "0.14.47",
"etag": "1.8.1",
"node-fetch": "2.6.9",

View File

@@ -11,17 +11,11 @@ import type { VercelProxyResponse } from './types.js';
import { Config } from '@vercel/build-utils';
import { createEdgeEventHandler } from './edge-functions/edge-handler.mjs';
import { createServer, IncomingMessage, ServerResponse } from 'http';
import {
createServerlessEventHandler,
HTTP_METHODS,
} from './serverless-functions/serverless-handler.mjs';
import { createServerlessEventHandler } from './serverless-functions/serverless-handler.mjs';
import { isEdgeRuntime, logError, validateConfiguredRuntime } from './utils.js';
import { init, parse as parseEsm } from 'es-module-lexer';
import { parse as parseCjs } from 'cjs-module-lexer';
import { getConfig } from '@vercel/static-config';
import { Project } from 'ts-morph';
import { listen } from 'async-listen';
import { readFile } from 'fs/promises';
const parseConfig = (entryPointPath: string) =>
getConfig(new Project(), entryPointPath);
@@ -52,31 +46,12 @@ async function createEventHandler(
);
}
const content = await readFile(entrypointPath, 'utf8');
const isStreaming =
staticConfig?.supportsResponseStreaming ||
(await hasWebHandlers(async () => parseCjs(content).exports)) ||
(await hasWebHandlers(async () =>
init.then(() => parseEsm(content)[1].map(specifier => specifier.n))
));
return createServerlessEventHandler(entrypointPath, {
mode: isStreaming ? 'streaming' : 'buffer',
mode: staticConfig?.supportsResponseStreaming ? 'streaming' : 'buffer',
shouldAddHelpers: options.shouldAddHelpers,
});
}
async function hasWebHandlers(getExports: () => Promise<string[]>) {
const exports = await getExports().catch(() => []);
for (const name of exports) {
if (HTTP_METHODS.includes(name)) {
return true;
}
}
return false;
}
let handleEvent: (request: IncomingMessage) => Promise<VercelProxyResponse>;
let handlerEventError: Error;
let onExit: (() => Promise<void>) | undefined;

View File

@@ -230,10 +230,8 @@ export async function createEdgeEventHandler(
}
const body: Buffer | string | undefined = await serializeBody(request);
if (body !== undefined && body.length) {
if (body !== undefined)
request.headers['content-length'] = String(body.length);
}
const url = new URL(request.url ?? '/', server.url);
const response = await undiciRequest(url, {

View File

@@ -1,7 +1,26 @@
import type { ServerResponse, IncomingMessage } from 'http';
import type { NodeHandler } from '@edge-runtime/node-utils';
import { buildToNodeHandler } from '@edge-runtime/node-utils';
import Edge from '@edge-runtime/primitives';
class FetchEvent {
public request: Request;
public awaiting: Set<Promise<void>>;
public response: Response | null;
constructor(request: Request) {
this.request = request;
this.response = null;
this.awaiting = new Set();
}
respondWith(response: Response) {
this.response = response;
}
waitUntil() {
throw new Error('waitUntil is not implemented yet for Node.js');
}
}
const webHandlerToNodeHandler = buildToNodeHandler(
{
@@ -13,8 +32,8 @@ const webHandlerToNodeHandler = buildToNodeHandler(
super(input, addDuplexToInit(init));
}
},
Uint8Array,
FetchEvent: Edge.FetchEvent,
Uint8Array: Uint8Array,
FetchEvent: FetchEvent,
},
{ defaultOrigin: 'https://vercel.com' }
);

View File

@@ -27,7 +27,7 @@ type ServerlessFunctionSignature = (
const [NODE_MAJOR] = process.versions.node.split('.').map(v => Number(v));
/* https://nextjs.org/docs/app/building-your-application/routing/router-handlers#supported-http-methods */
export const HTTP_METHODS = [
const HTTP_METHODS = [
'GET',
'HEAD',
'OPTIONS',

View File

@@ -0,0 +1,10 @@
/* global Response */
const baseUrl = ({ headers }) =>
`${headers.get('x-forwarded-proto')}://${headers.get('x-forwarded-host')}`;
export function GET(request) {
const { searchParams } = new URL(request.url, baseUrl(request));
const name = searchParams.get('name');
return new Response(`Greetings, ${name}`);
}

View File

@@ -1,50 +0,0 @@
/* global ReadableStream, TextEncoderStream, Response */
export const config = { runtime: 'edge' };
const DEFER_MS = 10;
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
const streaming =
text =>
(_, { waitUntil }) => {
const DATA = text.split(' ');
let index = 0;
const readable = new ReadableStream({
async start(controller) {
while (index < DATA.length) {
const data = DATA[index++];
let chunk = data;
if (index !== DATA.length) chunk += ' ';
controller.enqueue(chunk);
await wait(DEFER_MS);
}
controller.close();
},
}).pipeThrough(new TextEncoderStream());
waitUntil(wait(DATA.length * DEFER_MS));
return new Response(readable, {
headers: {
'Content-Type': 'text/plain',
'x-web-handler': text,
},
});
};
export const GET = streaming('Web handler using GET');
export const HEAD = streaming('Web handler using HEAD');
export const OPTIONS = streaming('Web handler using OPTIONS');
export const POST = streaming('Web handler using POST');
export const PUT = streaming('Web handler using PUT');
export const DELETE = streaming('Web handler using DELETE');
export const PATCH = streaming('Web handler using PATCH');

View File

@@ -1,48 +0,0 @@
/* global ReadableStream, TextEncoderStream, Response */
const DEFER_MS = 10;
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
const streaming =
text =>
(_, { waitUntil }) => {
const DATA = text.split(' ');
let index = 0;
const readable = new ReadableStream({
async start(controller) {
while (index < DATA.length) {
const data = DATA[index++];
let chunk = data;
if (index !== DATA.length) chunk += ' ';
controller.enqueue(chunk);
await wait(DEFER_MS);
}
controller.close();
},
}).pipeThrough(new TextEncoderStream());
waitUntil(wait(DATA.length * DEFER_MS));
return new Response(readable, {
headers: {
'Content-Type': 'text/plain',
'x-web-handler': text,
},
});
};
export const GET = streaming('Web handler using GET');
export const HEAD = streaming('Web handler using HEAD');
export const OPTIONS = streaming('Web handler using OPTIONS');
export const POST = streaming('Web handler using POST');
export const PUT = streaming('Web handler using PUT');
export const DELETE = streaming('Web handler using DELETE');
export const PATCH = streaming('Web handler using PATCH');

View File

@@ -30,9 +30,9 @@ function testForkDevServer(entrypoint: string) {
}
(NODE_MAJOR < 18 ? test.skip : test)(
'web handlers for node runtime',
'runs an serverless function that exports GET',
async () => {
const child = testForkDevServer('./web-handlers-node.js');
const child = testForkDevServer('./serverless-response.js');
try {
const result = await readMessage(child);
if (result.state !== 'message') {
@@ -43,251 +43,20 @@ function testForkDevServer(entrypoint: string) {
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-node`,
{ method: 'GET' }
`http://${address}:${port}/api/serverless-response?name=Vercel`
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using GET',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using GET',
});
}).toEqual({ status: 200, body: 'Greetings, Vercel' });
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-node`,
{ method: 'POST' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using POST',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using POST',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-node`,
{ method: 'DELETE' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using DELETE',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using DELETE',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-node`,
{ method: 'PUT' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using PUT',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using PUT',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-node`,
{ method: 'PATCH' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using PATCH',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using PATCH',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-node`,
`http://${address}:${port}/api/serverless-response?name=Vercel`,
{ method: 'HEAD' }
);
expect({
status: response.status,
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
'x-web-handler': 'Web handler using HEAD',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-node`,
{ method: 'OPTIONS' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using OPTIONS',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using OPTIONS',
});
}
} finally {
child.kill(9);
}
}
);
(NODE_MAJOR < 18 ? test.skip : test)(
'web handlers for edge runtime',
async () => {
const child = testForkDevServer('./web-handlers-edge.js');
try {
const result = await readMessage(child);
if (result.state !== 'message') {
throw new Error('Exited. error: ' + JSON.stringify(result.value));
}
const { address, port } = result.value;
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-edge`,
{ method: 'GET' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using GET',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using GET',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-edge`,
{ method: 'POST' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using POST',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using POST',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-edge`,
{ method: 'DELETE' }
);
console.log(response);
expect({
status: response.status,
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using DELETE',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-edge`,
{ method: 'PUT' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using PUT',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using PUT',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-edge`,
{ method: 'PATCH' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using PATCH',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using PATCH',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-edge`,
{ method: 'HEAD' }
);
expect({
status: response.status,
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
'x-web-handler': 'Web handler using HEAD',
});
}
{
const response = await fetch(
`http://${address}:${port}/api/web-handlers-edge`,
{ method: 'OPTIONS' }
);
expect({
status: response.status,
body: await response.text(),
transferEncoding: response.headers.get('transfer-encoding'),
'x-web-handler': response.headers.get('x-web-handler'),
}).toEqual({
status: 200,
body: 'Web handler using OPTIONS',
transferEncoding: 'chunked',
'x-web-handler': 'Web handler using OPTIONS',
});
expect({ status: response.status }).toEqual({ status: 405 });
}
} finally {
child.kill(9);

View File

@@ -26,7 +26,7 @@
"@types/jest": "27.4.1",
"@types/node": "14.18.33",
"@types/which": "3.0.0",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"execa": "^1.0.0",
"fs-extra": "11.1.1",
"jest-junit": "16.0.0",

View File

@@ -1,17 +1,5 @@
# @vercel/redwood
## 2.0.8
### Patch Changes
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
## 2.0.7
### Patch Changes
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
## 2.0.6
### Patch Changes

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/redwood",
"version": "2.0.8",
"version": "2.0.6",
"main": "./dist/index.js",
"license": "Apache-2.0",
"homepage": "https://vercel.com/docs",
@@ -20,7 +20,7 @@
"type-check": "tsc --noEmit"
},
"dependencies": {
"@vercel/nft": "0.26.4",
"@vercel/nft": "0.26.2",
"@vercel/routing-utils": "3.1.0",
"semver": "6.3.1"
},
@@ -28,7 +28,7 @@
"@types/aws-lambda": "8.10.19",
"@types/node": "14.18.33",
"@types/semver": "6.0.0",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"execa": "3.2.0",
"fs-extra": "11.1.0",
"jest-junit": "16.0.0"

View File

@@ -1,45 +1,5 @@
# @vercel/remix-builder
## 2.1.1
### Patch Changes
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
- Remove usage of `ensureResolvable()` in Vite builds ([#11213](https://github.com/vercel/vercel/pull/11213))
- Update `@remix-run/dev` fork to v2.8.0 ([#11206](https://github.com/vercel/vercel/pull/11206))
- Ensure the symlink directory exists in `ensureSymlink()` ([#11205](https://github.com/vercel/vercel/pull/11205))
## 2.1.0
### Minor Changes
- Remix Vite plugin support ([#11031](https://github.com/vercel/vercel/pull/11031))
## 2.0.20
### Patch Changes
- Don't install Remix fork when not using split configuration ([#11152](https://github.com/vercel/vercel/pull/11152))
- Add `serverBundles` post-build sanity check and fallback ([#11153](https://github.com/vercel/vercel/pull/11153))
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
- Update `@remix-run/dev` fork to v2.6.0 ([#11162](https://github.com/vercel/vercel/pull/11162))
- Update `@remix-run/dev` fork to v2.7.0 ([#11180](https://github.com/vercel/vercel/pull/11180))
- Update `@remix-run/dev` fork to v2.7.2 ([#11186](https://github.com/vercel/vercel/pull/11186))
## 2.0.19
### Patch Changes
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
## 2.0.18
### Patch Changes

View File

@@ -1,10 +1,9 @@
{
"name": "@vercel/remix-builder",
"version": "2.1.1",
"version": "2.0.18",
"license": "Apache-2.0",
"main": "./dist/index.js",
"homepage": "https://vercel.com/docs",
"sideEffects": false,
"repository": {
"type": "git",
"url": "https://github.com/vercel/vercel.git",
@@ -14,7 +13,7 @@
"build": "node ../../utils/build-builder.mjs",
"test": "jest --reporters=default --reporters=jest-junit --env node --verbose --bail --runInBand",
"test-unit": "pnpm test test/unit.*test.*",
"test-e2e": "pnpm test test/integration-*.test.ts",
"test-e2e": "pnpm test test/integration.test.ts",
"type-check": "tsc --noEmit"
},
"files": [
@@ -22,17 +21,16 @@
"defaults"
],
"dependencies": {
"@vercel/error-utils": "2.0.2",
"@vercel/nft": "0.26.4",
"@vercel/nft": "0.26.2",
"@vercel/static-config": "3.0.0",
"ts-morph": "12.0.0"
},
"devDependencies": {
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.8.0",
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.5.1",
"@types/jest": "27.5.1",
"@types/node": "14.18.33",
"@types/semver": "7.3.13",
"@vercel/build-utils": "7.7.1",
"@vercel/build-utils": "7.5.1",
"jest-junit": "16.0.0",
"path-to-regexp": "6.2.1",
"semver": "7.5.2"

View File

@@ -1,814 +0,0 @@
import { Project } from 'ts-morph';
import { readFileSync, promises as fs, existsSync } from 'fs';
import { basename, dirname, extname, join, posix, relative, sep } from 'path';
import {
debug,
download,
execCommand,
FileBlob,
FileFsRef,
getEnvForPackageManager,
getNodeVersion,
getSpawnOptions,
glob,
EdgeFunction,
NodejsLambda,
rename,
runNpmInstall,
runPackageJsonScript,
scanParentDirs,
} from '@vercel/build-utils';
import { getConfig } from '@vercel/static-config';
import { nodeFileTrace } from '@vercel/nft';
import type {
BuildV2,
Files,
NodeVersion,
PackageJson,
BuildResultV2Typical,
} from '@vercel/build-utils';
import type { ConfigRoute } from '@remix-run/dev/dist/config/routes';
import type { BaseFunctionConfig } from '@vercel/static-config';
import {
calculateRouteConfigHash,
findConfig,
getPathFromRoute,
getRegExpFromPath,
getResolvedRouteConfig,
isLayoutRoute,
ResolvedRouteConfig,
ResolvedNodeRouteConfig,
ResolvedEdgeRouteConfig,
findEntry,
chdirAndReadConfig,
resolveSemverMinMax,
ensureResolvable,
isESM,
} from './utils';
import { patchHydrogenServer } from './hydrogen';
interface ServerBundle {
serverBuildPath: string;
routes: string[];
}
const remixBuilderPkg = JSON.parse(
readFileSync(join(__dirname, '../package.json'), 'utf8')
);
const remixRunDevForkVersion =
remixBuilderPkg.devDependencies['@remix-run/dev'];
const DEFAULTS_PATH = join(__dirname, '../defaults');
const edgeServerSrcPromise = fs.readFile(
join(DEFAULTS_PATH, 'server-edge.mjs'),
'utf-8'
);
const nodeServerSrcPromise = fs.readFile(
join(DEFAULTS_PATH, 'server-node.mjs'),
'utf-8'
);
// Minimum supported version of the `@vercel/remix` package
const VERCEL_REMIX_MIN_VERSION = '1.10.0';
// Minimum supported version of the `@vercel/remix-run-dev` forked compiler
const REMIX_RUN_DEV_MIN_VERSION = '1.15.0';
// Maximum version of `@vercel/remix-run-dev` fork
// (and also `@vercel/remix` since they get published at the same time)
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice(
remixRunDevForkVersion.lastIndexOf('@') + 1
);
export const build: BuildV2 = async ({
entrypoint,
files,
workPath,
repoRootPath,
config,
meta = {},
}) => {
const { installCommand, buildCommand } = config;
await download(files, workPath, meta);
const mountpoint = dirname(entrypoint);
const entrypointFsDirname = join(workPath, mountpoint);
// Run "Install Command"
const nodeVersion = await getNodeVersion(
entrypointFsDirname,
undefined,
config,
meta
);
const { cliType, packageJsonPath, lockfileVersion, lockfilePath } =
await scanParentDirs(entrypointFsDirname);
if (!packageJsonPath) {
throw new Error('Failed to locate `package.json` file in your project');
}
const [lockfileRaw, pkgRaw] = await Promise.all([
lockfilePath ? fs.readFile(lockfilePath) : null,
fs.readFile(packageJsonPath, 'utf8'),
]);
const pkg = JSON.parse(pkgRaw);
const spawnOpts = getSpawnOptions(meta, nodeVersion);
if (!spawnOpts.env) {
spawnOpts.env = {};
}
spawnOpts.env = getEnvForPackageManager({
cliType,
lockfileVersion,
nodeVersion,
env: spawnOpts.env,
});
if (typeof installCommand === 'string') {
if (installCommand.trim()) {
console.log(`Running "install" command: \`${installCommand}\`...`);
await execCommand(installCommand, {
...spawnOpts,
cwd: entrypointFsDirname,
});
} else {
console.log(`Skipping "install" command...`);
}
} else {
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
}
const isHydrogen2 = Boolean(
pkg.dependencies?.['@shopify/remix-oxygen'] ||
pkg.devDependencies?.['@shopify/remix-oxygen']
);
// Determine the version of Remix based on the `@remix-run/dev`
// package version.
const remixRunDevPath = await ensureResolvable(
entrypointFsDirname,
repoRootPath,
'@remix-run/dev'
);
const remixRunDevPkg = JSON.parse(
readFileSync(join(remixRunDevPath, 'package.json'), 'utf8')
);
const remixVersion = remixRunDevPkg.version;
const remixConfig = await chdirAndReadConfig(
remixRunDevPath,
entrypointFsDirname,
packageJsonPath
);
const { serverEntryPoint, appDirectory } = remixConfig;
const remixRoutes = Object.values(remixConfig.routes);
let depsModified = false;
const remixRunDevPkgVersion: string | undefined =
pkg.dependencies?.['@remix-run/dev'] ||
pkg.devDependencies?.['@remix-run/dev'];
const serverBundlesMap = new Map<string, ConfigRoute[]>();
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
// Read the `export const config` (if any) for each route
const project = new Project();
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
for (const route of remixRoutes) {
const routePath = join(remixConfig.appDirectory, route.file);
let staticConfig = getConfig(project, routePath);
if (staticConfig && isHydrogen2) {
console.log(
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
);
staticConfig = null;
}
staticConfigsMap.set(route, staticConfig);
}
for (const route of remixRoutes) {
const config = getResolvedRouteConfig(
route,
remixConfig.routes,
staticConfigsMap,
isHydrogen2
);
resolvedConfigsMap.set(route, config);
}
// Figure out which routes belong to which server bundles
// based on having common static config properties
for (const route of remixRoutes) {
if (isLayoutRoute(route.id, remixRoutes)) continue;
const config = resolvedConfigsMap.get(route);
if (!config) {
throw new Error(`Expected resolved config for "${route.id}"`);
}
const hash = calculateRouteConfigHash(config);
let routesForHash = serverBundlesMap.get(hash);
if (!Array.isArray(routesForHash)) {
routesForHash = [];
serverBundlesMap.set(hash, routesForHash);
}
routesForHash.push(route);
}
let serverBundles: ServerBundle[] = Array.from(
serverBundlesMap.entries()
).map(([hash, routes]) => {
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
return {
serverBuildPath: isHydrogen2
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
: `${relative(
entrypointFsDirname,
dirname(remixConfig.serverBuildPath)
)}/build-${runtime}-${hash}.js`,
routes: routes.map(r => r.id),
};
});
// If the project is *not* relying on split configurations, then set
// the `serverBuildPath` to the default Remix path, since the forked
// Remix compiler will not be used
if (!isHydrogen2 && serverBundles.length === 1) {
// `serverBuildTarget` and `serverBuildPath` are undefined with
// our remix config modifications, so use the default build path
serverBundles[0].serverBuildPath = 'build/index.js';
}
// If the project is relying on split configurations, then override
// the official `@remix-run/dev` package with the Vercel fork,
// which supports the `serverBundles` config
if (
serverBundles.length > 1 &&
!isHydrogen2 &&
remixRunDevPkg.name !== '@vercel/remix-run-dev' &&
!remixRunDevPkgVersion?.startsWith('https:')
) {
const remixDevForkVersion = resolveSemverMinMax(
REMIX_RUN_DEV_MIN_VERSION,
REMIX_RUN_DEV_MAX_VERSION,
remixVersion
);
// Remove `@remix-run/dev`, add `@vercel/remix-run-dev`
if (pkg.devDependencies['@remix-run/dev']) {
delete pkg.devDependencies['@remix-run/dev'];
pkg.devDependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
} else {
delete pkg.dependencies['@remix-run/dev'];
pkg.dependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
}
depsModified = true;
}
// `app/entry.server.tsx` and `app/entry.client.tsx` are optional in Remix,
// so if either of those files are missing then add our own versions.
const userEntryServerFile = findEntry(appDirectory, 'entry.server');
if (!userEntryServerFile) {
await fs.copyFile(
join(DEFAULTS_PATH, 'entry.server.jsx'),
join(appDirectory, 'entry.server.jsx')
);
if (!pkg.dependencies['@vercel/remix']) {
// Dependency version resolution logic
// 1. Users app is on 1.9.0 -> we install the 1.10.0 (minimum) version of `@vercel/remix`.
// 2. Users app is on 1.11.0 (a version greater than 1.10.0 and less than the known max
// published version) -> we install the (matching) 1.11.0 version of `@vercel/remix`.
// 3. Users app is on something greater than our latest version of the fork -> we install
// the latest known published version of `@vercel/remix`.
const vercelRemixVersion = resolveSemverMinMax(
VERCEL_REMIX_MIN_VERSION,
REMIX_RUN_DEV_MAX_VERSION,
remixVersion
);
pkg.dependencies['@vercel/remix'] = vercelRemixVersion;
depsModified = true;
}
}
if (depsModified) {
await fs.writeFile(packageJsonPath, JSON.stringify(pkg, null, 2) + '\n');
// Bypass `--frozen-lockfile` enforcement by removing
// env vars that are considered to be CI
const nonCiEnv = { ...spawnOpts.env };
delete nonCiEnv.CI;
delete nonCiEnv.VERCEL;
delete nonCiEnv.NOW_BUILDER;
// Purposefully not passing `meta` here to avoid
// the optimization that prevents `npm install`
// from running a second time
await runNpmInstall(
entrypointFsDirname,
[],
{
...spawnOpts,
env: nonCiEnv,
},
undefined,
nodeVersion
);
}
const userEntryClientFile = findEntry(
remixConfig.appDirectory,
'entry.client'
);
if (!userEntryClientFile) {
await fs.copyFile(
join(DEFAULTS_PATH, 'entry.client.react.jsx'),
join(appDirectory, 'entry.client.jsx')
);
}
let remixConfigWrapped = false;
let serverEntryPointAbs: string | undefined;
let originalServerEntryPoint: string | undefined;
const remixConfigPath = findConfig(entrypointFsDirname, 'remix.config');
const renamedRemixConfigPath = remixConfigPath
? `${remixConfigPath}.original${extname(remixConfigPath)}`
: undefined;
try {
// We need to patch the `remix.config.js` file to force some values necessary
// for a build that works on either Node.js or the Edge runtime
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) {
await fs.rename(remixConfigPath, renamedRemixConfigPath);
let patchedConfig: string;
// Figure out if the `remix.config` file is using ESM syntax
if (isESM(renamedRemixConfigPath)) {
patchedConfig = `import config from './${basename(
renamedRemixConfigPath
)}';
config.serverBuildTarget = undefined;
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
config.serverPlatform = 'node';
config.serverBuildPath = undefined;
config.serverBundles = ${JSON.stringify(serverBundles)};
export default config;`;
} else {
patchedConfig = `const config = require('./${basename(
renamedRemixConfigPath
)}');
config.serverBuildTarget = undefined;
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
config.serverPlatform = 'node';
config.serverBuildPath = undefined;
config.serverBundles = ${JSON.stringify(serverBundles)};
module.exports = config;`;
}
await fs.writeFile(remixConfigPath, patchedConfig);
remixConfigWrapped = true;
}
// For Hydrogen v2, patch the `server.ts` file to be Vercel-compatible
if (isHydrogen2) {
if (remixConfig.serverEntryPoint) {
serverEntryPointAbs = join(
entrypointFsDirname,
remixConfig.serverEntryPoint
);
originalServerEntryPoint = await fs.readFile(
serverEntryPointAbs,
'utf8'
);
const patchedServerEntryPoint = patchHydrogenServer(
project,
serverEntryPointAbs
);
if (patchedServerEntryPoint) {
debug(
`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}`
);
await fs.writeFile(serverEntryPointAbs, patchedServerEntryPoint);
}
} else {
console.log('WARN: No "server" field found in Remix config');
}
}
// Make `remix build` output production mode
spawnOpts.env.NODE_ENV = 'production';
// Run "Build Command"
if (buildCommand) {
debug(`Executing build command "${buildCommand}"`);
await execCommand(buildCommand, {
...spawnOpts,
cwd: entrypointFsDirname,
});
} else {
if (hasScript('vercel-build', pkg)) {
debug(`Executing "yarn vercel-build"`);
await runPackageJsonScript(
entrypointFsDirname,
'vercel-build',
spawnOpts
);
} else if (hasScript('build', pkg)) {
debug(`Executing "yarn build"`);
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
} else {
await execCommand('remix build', {
...spawnOpts,
cwd: entrypointFsDirname,
});
}
}
} finally {
const cleanupOps: Promise<void>[] = [];
// Clean up our patched `remix.config.js` to be polite
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) {
cleanupOps.push(
fs
.rename(renamedRemixConfigPath, remixConfigPath)
.then(() => debug(`Restored original "${remixConfigPath}" file`))
);
}
// Restore original server entrypoint if it was modified (for Hydrogen v2)
if (serverEntryPointAbs && originalServerEntryPoint) {
cleanupOps.push(
fs
.writeFile(serverEntryPointAbs, originalServerEntryPoint)
.then(() => debug(`Restored original "${serverEntryPointAbs}" file`))
);
}
// Restore original `package.json` file and lockfile
if (depsModified) {
cleanupOps.push(
fs
.writeFile(packageJsonPath, pkgRaw)
.then(() => debug(`Restored original "${packageJsonPath}" file`))
);
if (lockfilePath && lockfileRaw) {
cleanupOps.push(
fs
.writeFile(lockfilePath, lockfileRaw)
.then(() => debug(`Restored original "${lockfilePath}" file`))
);
}
}
await Promise.all(cleanupOps);
}
// This needs to happen before we run NFT to create the Node/Edge functions
await Promise.all([
ensureResolvable(
entrypointFsDirname,
repoRootPath,
'@remix-run/server-runtime'
),
!isHydrogen2
? ensureResolvable(entrypointFsDirname, repoRootPath, '@remix-run/node')
: null,
]);
const staticDir = join(entrypointFsDirname, 'public');
// Do a sanity check to ensure that the server bundles `serverBuildPath` was actually created.
// If it was not, then that usually means the Vercel forked Remix compiler was not used and
// thus only a singular server bundle was produced.
const serverBundlesRespected = existsSync(
join(entrypointFsDirname, serverBundles[0].serverBuildPath)
);
if (!serverBundlesRespected) {
console.warn(
'WARN: `serverBundles` configuration failed. Falling back to a singular server bundle.'
);
serverBundles = [
{
serverBuildPath: 'build/index.js',
routes: serverBundles.flatMap(b => b.routes),
},
];
}
const [staticFiles, buildAssets, ...functions] = await Promise.all([
glob('**', staticDir),
glob('**', remixConfig.assetsBuildDirectory),
...serverBundles.map(bundle => {
const firstRoute = remixConfig.routes[bundle.routes[0]];
const config = resolvedConfigsMap.get(firstRoute) ?? {
runtime: 'nodejs',
};
const serverBuildPath = join(entrypointFsDirname, bundle.serverBuildPath);
if (config.runtime === 'edge') {
return createRenderEdgeFunction(
entrypointFsDirname,
repoRootPath,
serverBuildPath,
serverEntryPoint,
remixVersion,
config
);
}
return createRenderNodeFunction(
nodeVersion,
entrypointFsDirname,
repoRootPath,
serverBuildPath,
serverEntryPoint,
remixVersion,
config
);
}),
]);
const transformedBuildAssets = rename(buildAssets, name => {
return posix.join('./', remixConfig.publicPath, name);
});
const output: BuildResultV2Typical['output'] = {
...staticFiles,
...transformedBuildAssets,
};
const routes: any[] = [
{
src: `^/${remixConfig.publicPath.replace(/^\/|\/$/g, '')}/(.*)$`,
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
continue: true,
},
{
handle: 'filesystem',
},
];
for (const route of remixRoutes) {
// Layout routes don't get a function / route added
if (isLayoutRoute(route.id, remixRoutes)) continue;
const { path, rePath } = getPathFromRoute(route, remixConfig.routes);
// If the route is a pathless layout route (at the root level)
// and doesn't have any sub-routes, then a function should not be created.
if (!path) {
continue;
}
const funcIndex = serverBundles.findIndex(bundle => {
return bundle.routes.includes(route.id);
});
const func = functions[funcIndex];
if (!func) {
throw new Error(`Could not determine server bundle for "${route.id}"`);
}
output[path] = func;
// If this is a dynamic route then add a Vercel route
const re = getRegExpFromPath(rePath);
if (re) {
routes.push({
src: re.source,
dest: path,
});
}
}
// Add a 404 path for not found pages to be server-side rendered by Remix.
// Use an edge function bundle if one was generated, otherwise use Node.js.
if (!output['404']) {
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex(
routes => {
const runtime = resolvedConfigsMap.get(routes[0])?.runtime;
return runtime === 'edge';
}
);
const func =
edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0];
output['404'] = func;
}
routes.push({
src: '/(.*)',
dest: '/404',
});
return { routes, output, framework: { version: remixVersion } };
};
function hasScript(scriptName: string, pkg: PackageJson | null) {
const scripts = (pkg && pkg.scripts) || {};
return typeof scripts[scriptName] === 'string';
}
async function createRenderNodeFunction(
nodeVersion: NodeVersion,
entrypointDir: string,
rootDir: string,
serverBuildPath: string,
serverEntryPoint: string | undefined,
remixVersion: string,
config: ResolvedNodeRouteConfig
): Promise<NodejsLambda> {
const files: Files = {};
let handler = relative(rootDir, serverBuildPath);
let handlerPath = join(rootDir, handler);
if (!serverEntryPoint) {
const baseServerBuildPath = basename(serverBuildPath, '.js');
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
handlerPath = join(rootDir, handler);
// Copy the `server-node.mjs` file into the "build" directory
const nodeServerSrc = await nodeServerSrcPromise;
await writeEntrypointFile(
handlerPath,
nodeServerSrc.replace(
'@remix-run/dev/server-build',
`./${baseServerBuildPath}.js`
),
rootDir
);
}
// Trace the handler with `@vercel/nft`
const trace = await nodeFileTrace([handlerPath], {
base: rootDir,
processCwd: entrypointDir,
});
for (const warning of trace.warnings) {
debug(`Warning from trace: ${warning.message}`);
}
for (const file of trace.fileList) {
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
}
const fn = new NodejsLambda({
files,
handler,
runtime: nodeVersion.runtime,
shouldAddHelpers: false,
shouldAddSourcemapSupport: false,
operationType: 'SSR',
supportsResponseStreaming: true,
regions: config.regions,
memory: config.memory,
maxDuration: config.maxDuration,
framework: {
slug: 'remix',
version: remixVersion,
},
});
return fn;
}
async function createRenderEdgeFunction(
entrypointDir: string,
rootDir: string,
serverBuildPath: string,
serverEntryPoint: string | undefined,
remixVersion: string,
config: ResolvedEdgeRouteConfig
): Promise<EdgeFunction> {
const files: Files = {};
let handler = relative(rootDir, serverBuildPath);
let handlerPath = join(rootDir, handler);
if (!serverEntryPoint) {
const baseServerBuildPath = basename(serverBuildPath, '.js');
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
handlerPath = join(rootDir, handler);
// Copy the `server-edge.mjs` file into the "build" directory
const edgeServerSrc = await edgeServerSrcPromise;
await writeEntrypointFile(
handlerPath,
edgeServerSrc.replace(
'@remix-run/dev/server-build',
`./${baseServerBuildPath}.js`
),
rootDir
);
}
let remixRunVercelPkgJson: string | undefined;
// Trace the handler with `@vercel/nft`
const trace = await nodeFileTrace([handlerPath], {
base: rootDir,
processCwd: entrypointDir,
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
async readFile(fsPath) {
let source: Buffer | string;
try {
source = await fs.readFile(fsPath);
} catch (err: any) {
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
return null;
}
throw err;
}
if (basename(fsPath) === 'package.json') {
// For Edge Functions, patch "main" field to prefer "browser" or "module"
const pkgJson = JSON.parse(source.toString());
// When `@remix-run/vercel` is detected, we need to modify the `package.json`
// to include the "browser" field so that the proper Edge entrypoint file
// is used. This is a temporary stop gap until this PR is merged:
// https://github.com/remix-run/remix/pull/5537
if (pkgJson.name === '@remix-run/vercel') {
pkgJson.browser = 'dist/edge.js';
pkgJson.dependencies['@remix-run/server-runtime'] =
pkgJson.dependencies['@remix-run/node'];
if (!remixRunVercelPkgJson) {
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + '\n';
// Copy in the edge entrypoint so that NFT can properly resolve it
const vercelEdgeEntrypointPath = join(
DEFAULTS_PATH,
'vercel-edge-entrypoint.js'
);
const vercelEdgeEntrypointDest = join(
dirname(fsPath),
'dist/edge.js'
);
await fs.copyFile(
vercelEdgeEntrypointPath,
vercelEdgeEntrypointDest
);
}
}
for (const prop of ['browser', 'module']) {
const val = pkgJson[prop];
if (typeof val === 'string') {
pkgJson.main = val;
// Return the modified `package.json` to nft
source = JSON.stringify(pkgJson);
break;
}
}
}
return source;
},
});
for (const warning of trace.warnings) {
debug(`Warning from trace: ${warning.message}`);
}
for (const file of trace.fileList) {
if (
remixRunVercelPkgJson &&
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
) {
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
} else {
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
}
}
const fn = new EdgeFunction({
files,
deploymentTarget: 'v8-worker',
entrypoint: handler,
regions: config.regions,
framework: {
slug: 'remix',
version: remixVersion,
},
});
return fn;
}
async function writeEntrypointFile(
path: string,
data: string,
rootDir: string
) {
try {
await fs.writeFile(path, data);
} catch (err: any) {
if (err.code === 'ENOENT') {
throw new Error(
`The "${relative(
rootDir,
dirname(path)
)}" directory does not exist. Please contact support at https://vercel.com/help.`
);
}
throw err;
}
}

View File

@@ -1,452 +0,0 @@
import { readFileSync, promises as fs, statSync, existsSync } from 'fs';
import { basename, dirname, join, relative, sep } from 'path';
import { isErrnoException } from '@vercel/error-utils';
import { nodeFileTrace } from '@vercel/nft';
import {
BuildResultV2Typical,
debug,
execCommand,
getEnvForPackageManager,
getNodeVersion,
getSpawnOptions,
glob,
runNpmInstall,
runPackageJsonScript,
scanParentDirs,
FileBlob,
FileFsRef,
EdgeFunction,
NodejsLambda,
} from '@vercel/build-utils';
import {
getPathFromRoute,
getRegExpFromPath,
getRemixVersion,
hasScript,
logNftWarnings,
} from './utils';
import type { BuildV2, Files, NodeVersion } from '@vercel/build-utils';
const DEFAULTS_PATH = join(__dirname, '../defaults');
const edgeServerSrcPromise = fs.readFile(
join(DEFAULTS_PATH, 'server-edge.mjs'),
'utf-8'
);
const nodeServerSrcPromise = fs.readFile(
join(DEFAULTS_PATH, 'server-node.mjs'),
'utf-8'
);
interface RemixBuildResult {
buildManifest: {
serverBundles?: Record<
string,
{ id: string; file: string; config: Record<string, unknown> }
>;
routeIdToServerBundleId?: Record<string, string>;
routes: Record<
string,
{
id: string;
file: string;
path?: string;
index?: boolean;
parentId?: string;
config: Record<string, unknown>;
}
>;
};
remixConfig: {
buildDirectory: string;
};
viteConfig?: {
build?: {
assetsDir: string;
};
};
}
export const build: BuildV2 = async ({
entrypoint,
workPath,
repoRootPath,
config,
meta = {},
}) => {
const { installCommand, buildCommand } = config;
const mountpoint = dirname(entrypoint);
const entrypointFsDirname = join(workPath, mountpoint);
// Run "Install Command"
const nodeVersion = await getNodeVersion(
entrypointFsDirname,
undefined,
config,
meta
);
const { cliType, lockfileVersion, packageJson } = await scanParentDirs(
entrypointFsDirname,
true
);
const spawnOpts = getSpawnOptions(meta, nodeVersion);
if (!spawnOpts.env) {
spawnOpts.env = {};
}
spawnOpts.env = getEnvForPackageManager({
cliType,
lockfileVersion,
nodeVersion,
env: spawnOpts.env,
});
if (typeof installCommand === 'string') {
if (installCommand.trim()) {
console.log(`Running "install" command: \`${installCommand}\`...`);
await execCommand(installCommand, {
...spawnOpts,
cwd: entrypointFsDirname,
});
} else {
console.log(`Skipping "install" command...`);
}
} else {
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
}
// Determine the version of Remix based on the `@remix-run/dev`
// package version.
const remixVersion = await getRemixVersion(entrypointFsDirname, repoRootPath);
// Run "Build Command"
if (buildCommand) {
debug(`Executing build command "${buildCommand}"`);
await execCommand(buildCommand, {
...spawnOpts,
cwd: entrypointFsDirname,
});
} else {
if (hasScript('vercel-build', packageJson)) {
debug(`Executing "vercel-build" script`);
await runPackageJsonScript(
entrypointFsDirname,
'vercel-build',
spawnOpts
);
} else if (hasScript('build', packageJson)) {
debug(`Executing "build" script`);
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
} else {
await execCommand('remix build', {
...spawnOpts,
cwd: entrypointFsDirname,
});
}
}
const remixBuildResultPath = join(
entrypointFsDirname,
'.vercel/remix-build-result.json'
);
let remixBuildResult: RemixBuildResult | undefined;
try {
const remixBuildResultContents = readFileSync(remixBuildResultPath, 'utf8');
remixBuildResult = JSON.parse(remixBuildResultContents);
} catch (err: unknown) {
if (!isErrnoException(err) || err.code !== 'ENOENT') {
throw err;
}
// The project has not configured the `vercelPreset()`
// Preset in the "vite.config" file. Attempt to check
// for the default build output directory.
const buildDirectory = join(entrypointFsDirname, 'build');
if (statSync(buildDirectory).isDirectory()) {
console.warn('WARN: The `vercelPreset()` Preset was not detected.');
remixBuildResult = {
buildManifest: {
routes: {
root: {
path: '',
id: 'root',
file: 'app/root.tsx',
config: {},
},
'routes/_index': {
file: 'app/routes/_index.tsx',
id: 'routes/_index',
index: true,
parentId: 'root',
config: {},
},
},
},
remixConfig: {
buildDirectory,
},
};
// Detect if a server build exists (won't be the case when `ssr: false`)
const serverPath = 'build/server/index.js';
if (existsSync(join(entrypointFsDirname, serverPath))) {
remixBuildResult.buildManifest.routeIdToServerBundleId = {
'routes/_index': '',
};
remixBuildResult.buildManifest.serverBundles = {
'': {
id: '',
file: serverPath,
config: {},
},
};
}
}
}
if (!remixBuildResult) {
throw new Error(
'Could not determine build output directory. Please configure the `vercelPreset()` Preset from the `@vercel/remix` npm package'
);
}
const { buildManifest, remixConfig, viteConfig } = remixBuildResult;
const staticDir = join(remixConfig.buildDirectory, 'client');
const serverBundles = Object.values(buildManifest.serverBundles ?? {});
const [staticFiles, ...functions] = await Promise.all([
glob('**', staticDir),
...serverBundles.map(bundle => {
if (bundle.config.runtime === 'edge') {
return createRenderEdgeFunction(
entrypointFsDirname,
repoRootPath,
join(entrypointFsDirname, bundle.file),
undefined,
remixVersion,
bundle.config
);
}
return createRenderNodeFunction(
nodeVersion,
entrypointFsDirname,
repoRootPath,
join(entrypointFsDirname, bundle.file),
undefined,
remixVersion,
bundle.config
);
}),
]);
const functionsMap = new Map<string, EdgeFunction | NodejsLambda>();
for (let i = 0; i < serverBundles.length; i++) {
functionsMap.set(serverBundles[i].id, functions[i]);
}
const output: BuildResultV2Typical['output'] = staticFiles;
const assetsDir = viteConfig?.build?.assetsDir || 'assets';
const routes: any[] = [
{
src: `^/${assetsDir}/(.*)$`,
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
continue: true,
},
{
handle: 'filesystem',
},
];
for (const [id, functionId] of Object.entries(
buildManifest.routeIdToServerBundleId ?? {}
)) {
const route = buildManifest.routes[id];
const { path, rePath } = getPathFromRoute(route, buildManifest.routes);
// If the route is a pathless layout route (at the root level)
// and doesn't have any sub-routes, then a function should not be created.
if (!path) {
continue;
}
const func = functionsMap.get(functionId);
if (!func) {
throw new Error(`Could not determine server bundle for "${id}"`);
}
output[path] = func;
// If this is a dynamic route then add a Vercel route
const re = getRegExpFromPath(rePath);
if (re) {
routes.push({
src: re.source,
dest: path,
});
}
}
// For the 404 case, invoke the Function (or serve the static file
// for `ssr: false` mode) at the `/` path. Remix will serve its 404 route.
routes.push({
src: '/(.*)',
dest: '/',
});
return { routes, output, framework: { version: remixVersion } };
};
async function createRenderNodeFunction(
nodeVersion: NodeVersion,
entrypointDir: string,
rootDir: string,
serverBuildPath: string,
serverEntryPoint: string | undefined,
remixVersion: string,
config: /*TODO: ResolvedNodeRouteConfig*/ any
): Promise<NodejsLambda> {
const files: Files = {};
let handler = relative(rootDir, serverBuildPath);
let handlerPath = join(rootDir, handler);
if (!serverEntryPoint) {
const baseServerBuildPath = basename(serverBuildPath, '.js');
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
handlerPath = join(rootDir, handler);
// Copy the `server-node.mjs` file into the "build" directory
const nodeServerSrc = await nodeServerSrcPromise;
await fs.writeFile(
handlerPath,
nodeServerSrc.replace(
'@remix-run/dev/server-build',
`./${baseServerBuildPath}.js`
)
);
}
// Trace the handler with `@vercel/nft`
const trace = await nodeFileTrace([handlerPath], {
base: rootDir,
processCwd: entrypointDir,
});
logNftWarnings(trace.warnings, '@remix-run/node');
for (const file of trace.fileList) {
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
}
const fn = new NodejsLambda({
files,
handler,
runtime: nodeVersion.runtime,
shouldAddHelpers: false,
shouldAddSourcemapSupport: false,
operationType: 'SSR',
supportsResponseStreaming: true,
regions: config.regions,
memory: config.memory,
maxDuration: config.maxDuration,
framework: {
slug: 'remix',
version: remixVersion,
},
});
return fn;
}
async function createRenderEdgeFunction(
entrypointDir: string,
rootDir: string,
serverBuildPath: string,
serverEntryPoint: string | undefined,
remixVersion: string,
config: /* TODO: ResolvedEdgeRouteConfig*/ any
): Promise<EdgeFunction> {
const files: Files = {};
let handler = relative(rootDir, serverBuildPath);
let handlerPath = join(rootDir, handler);
if (!serverEntryPoint) {
const baseServerBuildPath = basename(serverBuildPath, '.js');
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
handlerPath = join(rootDir, handler);
// Copy the `server-edge.mjs` file into the "build" directory
const edgeServerSrc = await edgeServerSrcPromise;
await fs.writeFile(
handlerPath,
edgeServerSrc.replace(
'@remix-run/dev/server-build',
`./${baseServerBuildPath}.js`
)
);
}
let remixRunVercelPkgJson: string | undefined;
// Trace the handler with `@vercel/nft`
const trace = await nodeFileTrace([handlerPath], {
base: rootDir,
processCwd: entrypointDir,
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
async readFile(fsPath) {
let source: Buffer | string;
try {
source = await fs.readFile(fsPath);
} catch (err: any) {
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
return null;
}
throw err;
}
if (basename(fsPath) === 'package.json') {
// For Edge Functions, patch "main" field to prefer "browser" or "module"
const pkgJson = JSON.parse(source.toString());
for (const prop of ['browser', 'module']) {
const val = pkgJson[prop];
if (typeof val === 'string') {
pkgJson.main = val;
// Return the modified `package.json` to nft
source = JSON.stringify(pkgJson);
break;
}
}
}
return source;
},
});
logNftWarnings(trace.warnings, '@remix-run/server-runtime');
for (const file of trace.fileList) {
if (
remixRunVercelPkgJson &&
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
) {
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
} else {
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
}
}
const fn = new EdgeFunction({
files,
deploymentTarget: 'v8-worker',
entrypoint: handler,
regions: config.regions,
framework: {
slug: 'remix',
version: remixVersion,
},
});
return fn;
}

View File

@@ -1,9 +1,786 @@
import { build as buildVite } from './build-vite';
import { build as buildLegacy } from './build-legacy';
import { findConfig } from './utils';
import type { BuildV2 } from '@vercel/build-utils';
import { Project } from 'ts-morph';
import { readFileSync, promises as fs } from 'fs';
import { basename, dirname, extname, join, posix, relative, sep } from 'path';
import {
debug,
download,
execCommand,
FileBlob,
FileFsRef,
getEnvForPackageManager,
getNodeVersion,
getSpawnOptions,
glob,
EdgeFunction,
NodejsLambda,
rename,
runNpmInstall,
runPackageJsonScript,
scanParentDirs,
} from '@vercel/build-utils';
import { getConfig } from '@vercel/static-config';
import { nodeFileTrace } from '@vercel/nft';
import type {
BuildV2,
Files,
NodeVersion,
PackageJson,
BuildResultV2Typical,
} from '@vercel/build-utils';
import type { ConfigRoute } from '@remix-run/dev/dist/config/routes';
import type { BaseFunctionConfig } from '@vercel/static-config';
import {
calculateRouteConfigHash,
findConfig,
getPathFromRoute,
getRegExpFromPath,
getResolvedRouteConfig,
isLayoutRoute,
ResolvedRouteConfig,
ResolvedNodeRouteConfig,
ResolvedEdgeRouteConfig,
findEntry,
chdirAndReadConfig,
resolveSemverMinMax,
ensureResolvable,
isESM,
} from './utils';
import { patchHydrogenServer } from './hydrogen';
export const build: BuildV2 = opts => {
const isLegacy = findConfig(opts.workPath, 'remix.config');
return isLegacy ? buildLegacy(opts) : buildVite(opts);
interface ServerBundle {
serverBuildPath: string;
routes: string[];
}
const remixBuilderPkg = JSON.parse(
readFileSync(join(__dirname, '../package.json'), 'utf8')
);
const remixRunDevForkVersion =
remixBuilderPkg.devDependencies['@remix-run/dev'];
const DEFAULTS_PATH = join(__dirname, '../defaults');
const edgeServerSrcPromise = fs.readFile(
join(DEFAULTS_PATH, 'server-edge.mjs'),
'utf-8'
);
const nodeServerSrcPromise = fs.readFile(
join(DEFAULTS_PATH, 'server-node.mjs'),
'utf-8'
);
// Minimum supported version of the `@vercel/remix` package
const VERCEL_REMIX_MIN_VERSION = '1.10.0';
// Minimum supported version of the `@vercel/remix-run-dev` forked compiler
const REMIX_RUN_DEV_MIN_VERSION = '1.15.0';
// Maximum version of `@vercel/remix-run-dev` fork
// (and also `@vercel/remix` since they get published at the same time)
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice(
remixRunDevForkVersion.lastIndexOf('@') + 1
);
export const build: BuildV2 = async ({
entrypoint,
files,
workPath,
repoRootPath,
config,
meta = {},
}) => {
const { installCommand, buildCommand } = config;
await download(files, workPath, meta);
const mountpoint = dirname(entrypoint);
const entrypointFsDirname = join(workPath, mountpoint);
// Run "Install Command"
const nodeVersion = await getNodeVersion(
entrypointFsDirname,
undefined,
config,
meta
);
const { cliType, packageJsonPath, lockfileVersion, lockfilePath } =
await scanParentDirs(entrypointFsDirname);
if (!packageJsonPath) {
throw new Error('Failed to locate `package.json` file in your project');
}
const [lockfileRaw, pkgRaw] = await Promise.all([
lockfilePath ? fs.readFile(lockfilePath) : null,
fs.readFile(packageJsonPath, 'utf8'),
]);
const pkg = JSON.parse(pkgRaw);
const spawnOpts = getSpawnOptions(meta, nodeVersion);
if (!spawnOpts.env) {
spawnOpts.env = {};
}
spawnOpts.env = getEnvForPackageManager({
cliType,
lockfileVersion,
nodeVersion,
env: spawnOpts.env,
});
if (typeof installCommand === 'string') {
if (installCommand.trim()) {
console.log(`Running "install" command: \`${installCommand}\`...`);
await execCommand(installCommand, {
...spawnOpts,
cwd: entrypointFsDirname,
});
} else {
console.log(`Skipping "install" command...`);
}
} else {
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
}
const isHydrogen2 = Boolean(
pkg.dependencies?.['@shopify/remix-oxygen'] ||
pkg.devDependencies?.['@shopify/remix-oxygen']
);
// Determine the version of Remix based on the `@remix-run/dev`
// package version.
const remixRunDevPath = await ensureResolvable(
entrypointFsDirname,
repoRootPath,
'@remix-run/dev'
);
const remixRunDevPkg = JSON.parse(
readFileSync(join(remixRunDevPath, 'package.json'), 'utf8')
);
const remixVersion = remixRunDevPkg.version;
const remixConfig = await chdirAndReadConfig(
remixRunDevPath,
entrypointFsDirname,
packageJsonPath
);
const { serverEntryPoint, appDirectory } = remixConfig;
const remixRoutes = Object.values(remixConfig.routes);
let depsModified = false;
const remixRunDevPkgVersion: string | undefined =
pkg.dependencies?.['@remix-run/dev'] ||
pkg.devDependencies?.['@remix-run/dev'];
// Override the official `@remix-run/dev` package with the
// Vercel fork, which supports the `serverBundles` config
if (
!isHydrogen2 &&
remixRunDevPkg.name !== '@vercel/remix-run-dev' &&
!remixRunDevPkgVersion?.startsWith('https:')
) {
const remixDevForkVersion = resolveSemverMinMax(
REMIX_RUN_DEV_MIN_VERSION,
REMIX_RUN_DEV_MAX_VERSION,
remixVersion
);
// Remove `@remix-run/dev`, add `@vercel/remix-run-dev`
if (pkg.devDependencies['@remix-run/dev']) {
delete pkg.devDependencies['@remix-run/dev'];
pkg.devDependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
} else {
delete pkg.dependencies['@remix-run/dev'];
pkg.dependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
}
depsModified = true;
}
// `app/entry.server.tsx` and `app/entry.client.tsx` are optional in Remix,
// so if either of those files are missing then add our own versions.
const userEntryServerFile = findEntry(appDirectory, 'entry.server');
if (!userEntryServerFile) {
await fs.copyFile(
join(DEFAULTS_PATH, 'entry.server.jsx'),
join(appDirectory, 'entry.server.jsx')
);
if (!pkg.dependencies['@vercel/remix']) {
// Dependency version resolution logic
// 1. Users app is on 1.9.0 -> we install the 1.10.0 (minimum) version of `@vercel/remix`.
// 2. Users app is on 1.11.0 (a version greater than 1.10.0 and less than the known max
// published version) -> we install the (matching) 1.11.0 version of `@vercel/remix`.
// 3. Users app is on something greater than our latest version of the fork -> we install
// the latest known published version of `@vercel/remix`.
const vercelRemixVersion = resolveSemverMinMax(
VERCEL_REMIX_MIN_VERSION,
REMIX_RUN_DEV_MAX_VERSION,
remixVersion
);
pkg.dependencies['@vercel/remix'] = vercelRemixVersion;
depsModified = true;
}
}
if (depsModified) {
await fs.writeFile(packageJsonPath, JSON.stringify(pkg, null, 2) + '\n');
// Bypass `--frozen-lockfile` enforcement by removing
// env vars that are considered to be CI
const nonCiEnv = { ...spawnOpts.env };
delete nonCiEnv.CI;
delete nonCiEnv.VERCEL;
delete nonCiEnv.NOW_BUILDER;
// Purposefully not passing `meta` here to avoid
// the optimization that prevents `npm install`
// from running a second time
await runNpmInstall(
entrypointFsDirname,
[],
{
...spawnOpts,
env: nonCiEnv,
},
undefined,
nodeVersion
);
}
const userEntryClientFile = findEntry(
remixConfig.appDirectory,
'entry.client'
);
if (!userEntryClientFile) {
await fs.copyFile(
join(DEFAULTS_PATH, 'entry.client.react.jsx'),
join(appDirectory, 'entry.client.jsx')
);
}
let remixConfigWrapped = false;
let serverEntryPointAbs: string | undefined;
let originalServerEntryPoint: string | undefined;
const remixConfigPath = findConfig(entrypointFsDirname, 'remix.config');
const renamedRemixConfigPath = remixConfigPath
? `${remixConfigPath}.original${extname(remixConfigPath)}`
: undefined;
// These get populated inside the try/catch below
let serverBundles: ServerBundle[];
const serverBundlesMap = new Map<string, ConfigRoute[]>();
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
try {
// Read the `export const config` (if any) for each route
const project = new Project();
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
for (const route of remixRoutes) {
const routePath = join(remixConfig.appDirectory, route.file);
let staticConfig = getConfig(project, routePath);
if (staticConfig && isHydrogen2) {
console.log(
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
);
staticConfig = null;
}
staticConfigsMap.set(route, staticConfig);
}
for (const route of remixRoutes) {
const config = getResolvedRouteConfig(
route,
remixConfig.routes,
staticConfigsMap,
isHydrogen2
);
resolvedConfigsMap.set(route, config);
}
// Figure out which routes belong to which server bundles
// based on having common static config properties
for (const route of remixRoutes) {
if (isLayoutRoute(route.id, remixRoutes)) continue;
const config = resolvedConfigsMap.get(route);
if (!config) {
throw new Error(`Expected resolved config for "${route.id}"`);
}
const hash = calculateRouteConfigHash(config);
let routesForHash = serverBundlesMap.get(hash);
if (!Array.isArray(routesForHash)) {
routesForHash = [];
serverBundlesMap.set(hash, routesForHash);
}
routesForHash.push(route);
}
serverBundles = Array.from(serverBundlesMap.entries()).map(
([hash, routes]) => {
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
return {
serverBuildPath: isHydrogen2
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
: `${relative(
entrypointFsDirname,
dirname(remixConfig.serverBuildPath)
)}/build-${runtime}-${hash}.js`,
routes: routes.map(r => r.id),
};
}
);
// We need to patch the `remix.config.js` file to force some values necessary
// for a build that works on either Node.js or the Edge runtime
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) {
await fs.rename(remixConfigPath, renamedRemixConfigPath);
let patchedConfig: string;
// Figure out if the `remix.config` file is using ESM syntax
if (isESM(renamedRemixConfigPath)) {
patchedConfig = `import config from './${basename(
renamedRemixConfigPath
)}';
config.serverBuildTarget = undefined;
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
config.serverPlatform = 'node';
config.serverBuildPath = undefined;
config.serverBundles = ${JSON.stringify(serverBundles)};
export default config;`;
} else {
patchedConfig = `const config = require('./${basename(
renamedRemixConfigPath
)}');
config.serverBuildTarget = undefined;
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
config.serverPlatform = 'node';
config.serverBuildPath = undefined;
config.serverBundles = ${JSON.stringify(serverBundles)};
module.exports = config;`;
}
await fs.writeFile(remixConfigPath, patchedConfig);
remixConfigWrapped = true;
}
// For Hydrogen v2, patch the `server.ts` file to be Vercel-compatible
if (isHydrogen2) {
if (remixConfig.serverEntryPoint) {
serverEntryPointAbs = join(
entrypointFsDirname,
remixConfig.serverEntryPoint
);
originalServerEntryPoint = await fs.readFile(
serverEntryPointAbs,
'utf8'
);
const patchedServerEntryPoint = patchHydrogenServer(
project,
serverEntryPointAbs
);
if (patchedServerEntryPoint) {
debug(
`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}`
);
await fs.writeFile(serverEntryPointAbs, patchedServerEntryPoint);
}
} else {
console.log('WARN: No "server" field found in Remix config');
}
}
// Make `remix build` output production mode
spawnOpts.env.NODE_ENV = 'production';
// Run "Build Command"
if (buildCommand) {
debug(`Executing build command "${buildCommand}"`);
await execCommand(buildCommand, {
...spawnOpts,
cwd: entrypointFsDirname,
});
} else {
if (hasScript('vercel-build', pkg)) {
debug(`Executing "yarn vercel-build"`);
await runPackageJsonScript(
entrypointFsDirname,
'vercel-build',
spawnOpts
);
} else if (hasScript('build', pkg)) {
debug(`Executing "yarn build"`);
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
} else {
await execCommand('remix build', {
...spawnOpts,
cwd: entrypointFsDirname,
});
}
}
} finally {
const cleanupOps: Promise<void>[] = [];
// Clean up our patched `remix.config.js` to be polite
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) {
cleanupOps.push(
fs
.rename(renamedRemixConfigPath, remixConfigPath)
.then(() => debug(`Restored original "${remixConfigPath}" file`))
);
}
// Restore original server entrypoint if it was modified (for Hydrogen v2)
if (serverEntryPointAbs && originalServerEntryPoint) {
cleanupOps.push(
fs
.writeFile(serverEntryPointAbs, originalServerEntryPoint)
.then(() => debug(`Restored original "${serverEntryPointAbs}" file`))
);
}
// Restore original `package.json` file and lockfile
if (depsModified) {
cleanupOps.push(
fs
.writeFile(packageJsonPath, pkgRaw)
.then(() => debug(`Restored original "${packageJsonPath}" file`))
);
if (lockfilePath && lockfileRaw) {
cleanupOps.push(
fs
.writeFile(lockfilePath, lockfileRaw)
.then(() => debug(`Restored original "${lockfilePath}" file`))
);
}
}
await Promise.all(cleanupOps);
}
// This needs to happen before we run NFT to create the Node/Edge functions
await Promise.all([
ensureResolvable(
entrypointFsDirname,
repoRootPath,
'@remix-run/server-runtime'
),
!isHydrogen2
? ensureResolvable(entrypointFsDirname, repoRootPath, '@remix-run/node')
: null,
]);
const staticDir = join(entrypointFsDirname, 'public');
const [staticFiles, buildAssets, ...functions] = await Promise.all([
glob('**', staticDir),
glob('**', remixConfig.assetsBuildDirectory),
...serverBundles.map(bundle => {
const firstRoute = remixConfig.routes[bundle.routes[0]];
const config = resolvedConfigsMap.get(firstRoute) ?? {
runtime: 'nodejs',
};
if (config.runtime === 'edge') {
return createRenderEdgeFunction(
entrypointFsDirname,
repoRootPath,
join(entrypointFsDirname, bundle.serverBuildPath),
serverEntryPoint,
remixVersion,
config
);
}
return createRenderNodeFunction(
nodeVersion,
entrypointFsDirname,
repoRootPath,
join(entrypointFsDirname, bundle.serverBuildPath),
serverEntryPoint,
remixVersion,
config
);
}),
]);
const transformedBuildAssets = rename(buildAssets, name => {
return posix.join('./', remixConfig.publicPath, name);
});
const output: BuildResultV2Typical['output'] = {
...staticFiles,
...transformedBuildAssets,
};
const routes: any[] = [
{
src: `^/${remixConfig.publicPath.replace(/^\/|\/$/g, '')}/(.*)$`,
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
continue: true,
},
{
handle: 'filesystem',
},
];
for (const route of remixRoutes) {
// Layout routes don't get a function / route added
if (isLayoutRoute(route.id, remixRoutes)) continue;
const { path, rePath } = getPathFromRoute(route, remixConfig.routes);
// If the route is a pathless layout route (at the root level)
// and doesn't have any sub-routes, then a function should not be created.
if (!path) {
continue;
}
const funcIndex = serverBundles.findIndex(bundle => {
return bundle.routes.includes(route.id);
});
const func = functions[funcIndex];
if (!func) {
throw new Error(`Could not determine server bundle for "${route.id}"`);
}
output[path] = func;
// If this is a dynamic route then add a Vercel route
const re = getRegExpFromPath(rePath);
if (re) {
routes.push({
src: re.source,
dest: path,
});
}
}
// Add a 404 path for not found pages to be server-side rendered by Remix.
// Use an edge function bundle if one was generated, otherwise use Node.js.
if (!output['404']) {
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex(
routes => {
const runtime = resolvedConfigsMap.get(routes[0])?.runtime;
return runtime === 'edge';
}
);
const func =
edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0];
output['404'] = func;
}
routes.push({
src: '/(.*)',
dest: '/404',
});
return { routes, output, framework: { version: remixVersion } };
};
function hasScript(scriptName: string, pkg: PackageJson | null) {
const scripts = (pkg && pkg.scripts) || {};
return typeof scripts[scriptName] === 'string';
}
async function createRenderNodeFunction(
nodeVersion: NodeVersion,
entrypointDir: string,
rootDir: string,
serverBuildPath: string,
serverEntryPoint: string | undefined,
remixVersion: string,
config: ResolvedNodeRouteConfig
): Promise<NodejsLambda> {
const files: Files = {};
let handler = relative(rootDir, serverBuildPath);
let handlerPath = join(rootDir, handler);
if (!serverEntryPoint) {
const baseServerBuildPath = basename(serverBuildPath, '.js');
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
handlerPath = join(rootDir, handler);
// Copy the `server-node.mjs` file into the "build" directory
const nodeServerSrc = await nodeServerSrcPromise;
await writeEntrypointFile(
handlerPath,
nodeServerSrc.replace(
'@remix-run/dev/server-build',
`./${baseServerBuildPath}.js`
),
rootDir
);
}
// Trace the handler with `@vercel/nft`
const trace = await nodeFileTrace([handlerPath], {
base: rootDir,
processCwd: entrypointDir,
});
for (const warning of trace.warnings) {
debug(`Warning from trace: ${warning.message}`);
}
for (const file of trace.fileList) {
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
}
const fn = new NodejsLambda({
files,
handler,
runtime: nodeVersion.runtime,
shouldAddHelpers: false,
shouldAddSourcemapSupport: false,
operationType: 'SSR',
supportsResponseStreaming: true,
regions: config.regions,
memory: config.memory,
maxDuration: config.maxDuration,
framework: {
slug: 'remix',
version: remixVersion,
},
});
return fn;
}
async function createRenderEdgeFunction(
entrypointDir: string,
rootDir: string,
serverBuildPath: string,
serverEntryPoint: string | undefined,
remixVersion: string,
config: ResolvedEdgeRouteConfig
): Promise<EdgeFunction> {
const files: Files = {};
let handler = relative(rootDir, serverBuildPath);
let handlerPath = join(rootDir, handler);
if (!serverEntryPoint) {
const baseServerBuildPath = basename(serverBuildPath, '.js');
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
handlerPath = join(rootDir, handler);
// Copy the `server-edge.mjs` file into the "build" directory
const edgeServerSrc = await edgeServerSrcPromise;
await writeEntrypointFile(
handlerPath,
edgeServerSrc.replace(
'@remix-run/dev/server-build',
`./${baseServerBuildPath}.js`
),
rootDir
);
}
let remixRunVercelPkgJson: string | undefined;
// Trace the handler with `@vercel/nft`
const trace = await nodeFileTrace([handlerPath], {
base: rootDir,
processCwd: entrypointDir,
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
async readFile(fsPath) {
let source: Buffer | string;
try {
source = await fs.readFile(fsPath);
} catch (err: any) {
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
return null;
}
throw err;
}
if (basename(fsPath) === 'package.json') {
// For Edge Functions, patch "main" field to prefer "browser" or "module"
const pkgJson = JSON.parse(source.toString());
// When `@remix-run/vercel` is detected, we need to modify the `package.json`
// to include the "browser" field so that the proper Edge entrypoint file
// is used. This is a temporary stop gap until this PR is merged:
// https://github.com/remix-run/remix/pull/5537
if (pkgJson.name === '@remix-run/vercel') {
pkgJson.browser = 'dist/edge.js';
pkgJson.dependencies['@remix-run/server-runtime'] =
pkgJson.dependencies['@remix-run/node'];
if (!remixRunVercelPkgJson) {
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + '\n';
// Copy in the edge entrypoint so that NFT can properly resolve it
const vercelEdgeEntrypointPath = join(
DEFAULTS_PATH,
'vercel-edge-entrypoint.js'
);
const vercelEdgeEntrypointDest = join(
dirname(fsPath),
'dist/edge.js'
);
await fs.copyFile(
vercelEdgeEntrypointPath,
vercelEdgeEntrypointDest
);
}
}
for (const prop of ['browser', 'module']) {
const val = pkgJson[prop];
if (typeof val === 'string') {
pkgJson.main = val;
// Return the modified `package.json` to nft
source = JSON.stringify(pkgJson);
break;
}
}
}
return source;
},
});
for (const warning of trace.warnings) {
debug(`Warning from trace: ${warning.message}`);
}
for (const file of trace.fileList) {
if (
remixRunVercelPkgJson &&
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
) {
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
} else {
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
}
}
const fn = new EdgeFunction({
files,
deploymentTarget: 'v8-worker',
entrypoint: handler,
regions: config.regions,
framework: {
slug: 'remix',
version: remixVersion,
},
});
return fn;
}
async function writeEntrypointFile(
path: string,
data: string,
rootDir: string
) {
try {
await fs.writeFile(path, data);
} catch (err: any) {
if (err.code === 'ENOENT') {
throw new Error(
`The "${relative(
rootDir,
dirname(path)
)}" directory does not exist. Please contact support at https://vercel.com/help.`
);
}
throw err;
}
}

View File

@@ -2,7 +2,7 @@ import semver from 'semver';
import { existsSync, promises as fs } from 'fs';
import { basename, dirname, join, relative, resolve, sep } from 'path';
import { pathToRegexp, Key } from 'path-to-regexp';
import { debug, type PackageJson } from '@vercel/build-utils';
import { debug } from '@vercel/build-utils';
import { walkParentDirs } from '@vercel/build-utils';
import { createRequire } from 'module';
import type {
@@ -58,12 +58,8 @@ export function findEntry(dir: string, basename: string): string | undefined {
const configExts = ['.js', '.cjs', '.mjs'];
export function findConfig(
dir: string,
basename: string,
exts = configExts
): string | undefined {
for (const ext of exts) {
export function findConfig(dir: string, basename: string): string | undefined {
for (const ext of configExts) {
const name = basename + ext;
const file = join(dir, name);
if (existsSync(file)) return file;
@@ -359,7 +355,6 @@ async function ensureSymlink(
}
}
await fs.mkdir(symlinkDir, { recursive: true });
await fs.symlink(relativeTarget, symlinkPath);
debug(`Created symlink for "${pkgName}"`);
}
@@ -374,49 +369,3 @@ export function isESM(path: string): boolean {
}
return isESM;
}
export function hasScript(scriptName: string, pkg?: PackageJson) {
const scripts = pkg?.scripts || {};
return typeof scripts[scriptName] === 'string';
}
export async function getRemixVersion(
dir: string,
base: string
): Promise<string> {
const resolvedPath = require_.resolve('@remix-run/dev', { paths: [dir] });
const pkgPath = await walkParentDirs({
base,
start: dirname(resolvedPath),
filename: 'package.json',
});
if (!pkgPath) {
throw new Error(
`Failed to find \`package.json\` file for "@remix-run/dev"`
);
}
const { version } = JSON.parse(
await fs.readFile(pkgPath, 'utf8')
) as PackageJson;
if (typeof version !== 'string') {
throw new Error(`Missing "version" field`);
}
return version;
}
export function logNftWarnings(warnings: Set<Error>, required?: string) {
for (const warning of warnings) {
const m = warning.message.match(/^Failed to resolve dependency "(.+)"/);
if (m) {
if (m[1] === required) {
throw new Error(
`Missing required "${required}" package. Please add it to your \`package.json\` file.`
);
} else {
console.warn(`WARN: ${m[0]}`);
}
} else {
debug(`Warning from trace: ${warning.message}`);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,30 +0,0 @@
node_modules
/.cache
/build
/public/build
/api/_build
.eslintcache
tsconfig.tsbuildinfo
.vercel
.env
fly.wireguard.conf
pc-api-8703296620103690660-58-0d0aff733697.json
/.vscode/**/*
tailwind.css
.cache/*
.cache
.data/*
.data
.turbo
.turbo/*
turbo-build.log
turbo-*.log
.expo
pc-api-8703296620103690660-58-a47abfbe9daa.json
dist

View File

@@ -1,12 +0,0 @@
# Expo doesn't play nice with pnpm by default.
# The symbolic links of pnpm break the rules of Expo monorepos.
# @link https://docs.expo.dev/guides/monorepos/#common-issues
node-linker=hoisted
# In order to cache Prisma correctly
public-hoist-pattern[]=*prisma*
public-hoist-pattern[]=*uuid*
# FIXME: @prisma/client is required by the @acme/auth,
# but we don't want it installed there since it's already
# installed in the @acme/db package
strict-peer-dependencies=false

View File

@@ -1,9 +0,0 @@
node_modules
/build
/dist
/public/build
.env
/postgres-data

View File

@@ -1,9 +0,0 @@
{
"trailingComma": "all",
"tabWidth": 2,
"semi": false,
"singleQuote": false,
"printWidth": 130,
"plugins": ["prettier-plugin-tailwindcss"],
"tailwindFunctions": ["merge", "join", "cva"]
}

View File

@@ -1,6 +0,0 @@
/**
* @type {import('@types/eslint').Linter.BaseConfig}
*/
module.exports = {
extends: ["@ramble/eslint-config/commit", "./.eslintrc.js"],
}

View File

@@ -1,32 +0,0 @@
const OFF = "off"
const ERROR = "error"
/**
* @type {import('@types/eslint').Linter.BaseConfig}
*/
module.exports = {
root: true,
parser: "@typescript-eslint/parser",
plugins: ["@typescript-eslint"],
extends: [
"plugin:@typescript-eslint/recommended",
"plugin:react/recommended",
"plugin:react-hooks/recommended",
"plugin:react/jsx-runtime",
"prettier",
],
rules: {
"@typescript-eslint/no-non-null-assertion": OFF,
"@typescript-eslint/no-var-requires": ERROR,
"@typescript-eslint/no-non-null-asserted-optional-chain": OFF,
"react/function-component-definition": ERROR,
"@typescript-eslint/no-unused-vars": [ERROR, { args: "none", argsIgnorePattern: "^_", varsIgnorePattern: "^_" }],
"react/prop-types": OFF,
"react/no-unescaped-entities": OFF,
},
settings: {
react: {
version: "detect",
},
},
}

Some files were not shown because too many files have changed in this diff Show More