mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
59 Commits
@vercel/py
...
@vercel/bu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
96117d3f17 | ||
|
|
20237d4f7b | ||
|
|
293770a2f6 | ||
|
|
c9c0a203cc | ||
|
|
5064dd404d | ||
|
|
1a45731c92 | ||
|
|
168f9578cf | ||
|
|
c2728ef9c0 | ||
|
|
c5fe7c2bea | ||
|
|
b1d8b83abb | ||
|
|
24ec5c5aca | ||
|
|
37b193c845 | ||
|
|
f8fab639bf | ||
|
|
6ed0fe6fb1 | ||
|
|
de2738ba06 | ||
|
|
1333071a3a | ||
|
|
c2d99855ea | ||
|
|
0d112c848a | ||
|
|
d17abf463a | ||
|
|
440ef3ba98 | ||
|
|
38c5e93625 | ||
|
|
4d51d777fe | ||
|
|
1fee87e76f | ||
|
|
ea0e9aeaec | ||
|
|
7910f2f307 | ||
|
|
670441620f | ||
|
|
bfc01fd98f | ||
|
|
6d74b9b61a | ||
|
|
9483d49f72 | ||
|
|
6740f9b155 | ||
|
|
b663f813e1 | ||
|
|
e318a0eea5 | ||
|
|
644721a90d | ||
|
|
e109e3325a | ||
|
|
92b2fbe372 | ||
|
|
e50fe2b37c | ||
|
|
678ebbe525 | ||
|
|
10e200e0bf | ||
|
|
24c3dd282d | ||
|
|
142a397d8e | ||
|
|
0dd9a27859 | ||
|
|
b6ed28b9b1 | ||
|
|
0d034b6820 | ||
|
|
05c8be1a6d | ||
|
|
d21bb9f87e | ||
|
|
ab24444660 | ||
|
|
20080d4ae7 | ||
|
|
c32a909afc | ||
|
|
abaa700cea | ||
|
|
8ba0ce9324 | ||
|
|
4027a18337 | ||
|
|
3bad73401b | ||
|
|
50e135ea47 | ||
|
|
d05e41eeaf | ||
|
|
de63e35622 | ||
|
|
4d1ab422d3 | ||
|
|
a03cfa1040 | ||
|
|
eaae86d776 | ||
|
|
77bc00f92e |
@@ -38,7 +38,7 @@ packages/static-build/test/cache-fixtures
|
||||
packages/redwood/test/fixtures
|
||||
|
||||
# remix
|
||||
packages/remix/test/fixtures
|
||||
packages/remix/test/fixtures-*
|
||||
|
||||
# gatsby-plugin-vercel-analytics
|
||||
packages/gatsby-plugin-vercel-analytics
|
||||
|
||||
79
.github/dependabot.yml
vendored
Normal file
79
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/angular-v17
|
||||
allow:
|
||||
- dependency-name: '@angular*'
|
||||
ignore:
|
||||
- dependency-name: '@angular*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- '@angular*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/astro-v4
|
||||
allow:
|
||||
- dependency-name: 'astro*'
|
||||
ignore:
|
||||
- dependency-name: 'astro*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- 'astro*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/hydrogen-v2023
|
||||
allow:
|
||||
- dependency-name: '@remix-run*'
|
||||
- dependency-name: '@shopify*'
|
||||
ignore:
|
||||
- dependency-name: '@remix-run*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
- dependency-name: '@shopify*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- '@remix-run*'
|
||||
- '@shopify*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
@@ -29,6 +29,7 @@ turbo-cache-key.json
|
||||
packages/*/dist
|
||||
packages/*/node_modules
|
||||
packages/**/test/fixtures
|
||||
packages/**/test/fixtures-*
|
||||
packages/**/test/dev/fixtures
|
||||
packages/**/test/build-fixtures
|
||||
packages/**/test/cache-fixtures
|
||||
|
||||
@@ -27,3 +27,6 @@ end
|
||||
|
||||
# Performance-booster for watching directories on Windows
|
||||
gem "wdm", "~> 0.1.1", :platforms => [:mingw, :x64_mingw, :mswin]
|
||||
|
||||
# Webrick not installed by default in Ruby 3.0+
|
||||
gem "webrick"
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
addressable (2.7.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
addressable (2.8.6)
|
||||
public_suffix (>= 2.0.2, < 6.0)
|
||||
colorator (1.1.0)
|
||||
concurrent-ruby (1.1.8)
|
||||
em-websocket (0.5.2)
|
||||
concurrent-ruby (1.2.3)
|
||||
em-websocket (0.5.3)
|
||||
eventmachine (>= 0.12.9)
|
||||
http_parser.rb (~> 0.6.0)
|
||||
http_parser.rb (~> 0)
|
||||
eventmachine (1.2.7)
|
||||
ffi (1.14.2)
|
||||
ffi (1.16.3)
|
||||
forwardable-extended (2.6.0)
|
||||
http_parser.rb (0.6.0)
|
||||
i18n (1.8.9)
|
||||
http_parser.rb (0.8.0)
|
||||
i18n (1.14.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jekyll (4.2.0)
|
||||
jekyll (4.2.2)
|
||||
addressable (~> 2.4)
|
||||
colorator (~> 1.0)
|
||||
em-websocket (~> 0.5)
|
||||
@@ -29,20 +29,20 @@ GEM
|
||||
rouge (~> 3.0)
|
||||
safe_yaml (~> 1.0)
|
||||
terminal-table (~> 2.0)
|
||||
jekyll-feed (0.15.1)
|
||||
jekyll-feed (0.17.0)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-sass-converter (2.1.0)
|
||||
jekyll-sass-converter (2.2.0)
|
||||
sassc (> 2.0.1, < 3.0)
|
||||
jekyll-seo-tag (2.7.1)
|
||||
jekyll-seo-tag (2.8.0)
|
||||
jekyll (>= 3.8, < 5.0)
|
||||
jekyll-watch (2.2.1)
|
||||
listen (~> 3.0)
|
||||
kramdown (2.3.0)
|
||||
kramdown (2.4.0)
|
||||
rexml
|
||||
kramdown-parser-gfm (1.1.0)
|
||||
kramdown (~> 2.0)
|
||||
liquid (4.0.3)
|
||||
listen (3.4.1)
|
||||
liquid (4.0.4)
|
||||
listen (3.9.0)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
mercenary (0.4.0)
|
||||
@@ -52,21 +52,22 @@ GEM
|
||||
jekyll-seo-tag (~> 2.1)
|
||||
pathutil (0.16.2)
|
||||
forwardable-extended (~> 2.6)
|
||||
public_suffix (4.0.6)
|
||||
rb-fsevent (0.10.4)
|
||||
public_suffix (5.0.4)
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.10.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.2.4)
|
||||
rouge (3.26.0)
|
||||
rexml (3.2.6)
|
||||
rouge (3.30.0)
|
||||
safe_yaml (1.0.5)
|
||||
sassc (2.4.0)
|
||||
ffi (~> 1.9)
|
||||
terminal-table (2.0.0)
|
||||
unicode-display_width (~> 1.1, >= 1.1.1)
|
||||
unicode-display_width (1.7.0)
|
||||
unicode-display_width (1.8.0)
|
||||
webrick (1.8.1)
|
||||
|
||||
PLATFORMS
|
||||
x86_64-linux
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
jekyll (~> 4.2.0)
|
||||
@@ -75,6 +76,7 @@ DEPENDENCIES
|
||||
tzinfo (~> 1.2)
|
||||
tzinfo-data
|
||||
wdm (~> 0.1.1)
|
||||
webrick
|
||||
|
||||
BUNDLED WITH
|
||||
2.2.4
|
||||
2.5.6
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
source 'https://rubygems.org'
|
||||
|
||||
gem 'middleman', '~> 4.2'
|
||||
gem 'middleman-autoprefixer', '~> 2.7'
|
||||
gem 'middleman', '~> 4.5'
|
||||
gem 'middleman-autoprefixer', '~> 3.0'
|
||||
gem 'tzinfo-data', platforms: [:mswin, :mingw, :jruby, :x64_mingw]
|
||||
gem 'wdm', '~> 0.1', platforms: [:mswin, :mingw, :x64_mingw]
|
||||
|
||||
@@ -1,59 +1,60 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activesupport (5.2.4.5)
|
||||
activesupport (7.0.8.1)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
minitest (~> 5.1)
|
||||
tzinfo (~> 1.1)
|
||||
addressable (2.7.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
autoprefixer-rails (9.8.6.5)
|
||||
execjs
|
||||
backports (3.20.2)
|
||||
i18n (>= 1.6, < 2)
|
||||
minitest (>= 5.1)
|
||||
tzinfo (~> 2.0)
|
||||
addressable (2.8.6)
|
||||
public_suffix (>= 2.0.2, < 6.0)
|
||||
autoprefixer-rails (10.4.16.0)
|
||||
execjs (~> 2)
|
||||
backports (3.24.1)
|
||||
coffee-script (2.4.1)
|
||||
coffee-script-source
|
||||
execjs
|
||||
coffee-script-source (1.12.2)
|
||||
concurrent-ruby (1.1.8)
|
||||
contracts (0.13.0)
|
||||
dotenv (2.7.6)
|
||||
concurrent-ruby (1.2.3)
|
||||
contracts (0.16.1)
|
||||
dotenv (3.1.0)
|
||||
erubis (2.7.0)
|
||||
execjs (2.7.0)
|
||||
fast_blank (1.0.0)
|
||||
fastimage (2.2.2)
|
||||
ffi (1.14.2)
|
||||
haml (5.2.1)
|
||||
temple (>= 0.8.0)
|
||||
execjs (2.9.1)
|
||||
fast_blank (1.0.1)
|
||||
fastimage (2.3.0)
|
||||
ffi (1.16.3)
|
||||
haml (6.3.0)
|
||||
temple (>= 0.8.2)
|
||||
thor
|
||||
tilt
|
||||
hamster (3.0.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
hashie (3.6.0)
|
||||
i18n (0.9.5)
|
||||
i18n (1.6.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
kramdown (2.3.0)
|
||||
kramdown (2.4.0)
|
||||
rexml
|
||||
listen (3.0.8)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
listen (3.9.0)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
memoist (0.16.2)
|
||||
middleman (4.3.11)
|
||||
middleman (4.5.1)
|
||||
coffee-script (~> 2.2)
|
||||
haml (>= 4.0.5)
|
||||
kramdown (>= 2.3.0)
|
||||
middleman-cli (= 4.3.11)
|
||||
middleman-core (= 4.3.11)
|
||||
middleman-autoprefixer (2.10.1)
|
||||
autoprefixer-rails (~> 9.1)
|
||||
middleman-core (>= 3.3.3)
|
||||
middleman-cli (4.3.11)
|
||||
thor (>= 0.17.0, < 2.0)
|
||||
middleman-core (4.3.11)
|
||||
activesupport (>= 4.2, < 6.0)
|
||||
addressable (~> 2.3)
|
||||
middleman-cli (= 4.5.1)
|
||||
middleman-core (= 4.5.1)
|
||||
middleman-autoprefixer (3.0.0)
|
||||
autoprefixer-rails (~> 10.0)
|
||||
middleman-core (>= 4.0.0)
|
||||
middleman-cli (4.5.1)
|
||||
thor (>= 0.17.0, < 1.3.0)
|
||||
middleman-core (4.5.1)
|
||||
activesupport (>= 6.1, < 7.1)
|
||||
addressable (~> 2.4)
|
||||
backports (~> 3.6)
|
||||
bundler
|
||||
contracts (~> 0.13.0)
|
||||
bundler (~> 2.0)
|
||||
contracts (~> 0.13, < 0.17)
|
||||
dotenv
|
||||
erubis
|
||||
execjs (~> 2.0)
|
||||
@@ -61,48 +62,52 @@ GEM
|
||||
fastimage (~> 2.0)
|
||||
hamster (~> 3.0)
|
||||
hashie (~> 3.4)
|
||||
i18n (~> 0.9.0)
|
||||
listen (~> 3.0.0)
|
||||
i18n (~> 1.6.0)
|
||||
listen (~> 3.0)
|
||||
memoist (~> 0.14)
|
||||
padrino-helpers (~> 0.13.0)
|
||||
padrino-helpers (~> 0.15.0)
|
||||
parallel
|
||||
rack (>= 1.4.5, < 3)
|
||||
sassc (~> 2.0)
|
||||
servolux
|
||||
tilt (~> 2.0.9)
|
||||
toml
|
||||
uglifier (~> 3.0)
|
||||
minitest (5.14.3)
|
||||
padrino-helpers (0.13.3.4)
|
||||
i18n (~> 0.6, >= 0.6.7)
|
||||
padrino-support (= 0.13.3.4)
|
||||
webrick
|
||||
minitest (5.22.2)
|
||||
padrino-helpers (0.15.3)
|
||||
i18n (>= 0.6.7, < 2)
|
||||
padrino-support (= 0.15.3)
|
||||
tilt (>= 1.4.1, < 3)
|
||||
padrino-support (0.13.3.4)
|
||||
activesupport (>= 3.1)
|
||||
parallel (1.20.1)
|
||||
public_suffix (4.0.6)
|
||||
rack (2.2.3)
|
||||
rb-fsevent (0.10.4)
|
||||
padrino-support (0.15.3)
|
||||
parallel (1.24.0)
|
||||
parslet (2.0.0)
|
||||
public_suffix (5.0.4)
|
||||
rack (2.2.8.1)
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.10.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.2.4)
|
||||
rexml (3.2.6)
|
||||
sassc (2.4.0)
|
||||
ffi (~> 1.9)
|
||||
servolux (0.13.0)
|
||||
temple (0.8.2)
|
||||
thor (1.1.0)
|
||||
thread_safe (0.3.6)
|
||||
tilt (2.0.10)
|
||||
tzinfo (1.2.9)
|
||||
thread_safe (~> 0.1)
|
||||
temple (0.10.3)
|
||||
thor (1.2.2)
|
||||
tilt (2.0.11)
|
||||
toml (0.3.0)
|
||||
parslet (>= 1.8.0, < 3.0.0)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
uglifier (3.2.0)
|
||||
execjs (>= 0.3.0, < 3)
|
||||
webrick (1.8.1)
|
||||
|
||||
PLATFORMS
|
||||
x86_64-linux
|
||||
|
||||
DEPENDENCIES
|
||||
middleman (~> 4.2)
|
||||
middleman-autoprefixer (~> 2.7)
|
||||
middleman (~> 4.5)
|
||||
middleman-autoprefixer (~> 3.0)
|
||||
tzinfo-data
|
||||
wdm (~> 0.1)
|
||||
|
||||
|
||||
907
examples/nextjs/package-lock.json
generated
907
examples/nextjs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,7 @@
|
||||
"dependencies": {
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"next": "14.1.0"
|
||||
"next": "14.1.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5",
|
||||
@@ -22,6 +22,6 @@
|
||||
"postcss": "^8",
|
||||
"tailwindcss": "^3.3.0",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "14.1.0"
|
||||
"eslint-config-next": "14.1.2"
|
||||
}
|
||||
}
|
||||
|
||||
2
examples/package.json
vendored
2
examples/package.json
vendored
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.4.1",
|
||||
"@vercel/frameworks": "2.0.6"
|
||||
"@vercel/frameworks": "3.0.0"
|
||||
},
|
||||
"version": null
|
||||
}
|
||||
|
||||
2
examples/remix/.gitignore
vendored
2
examples/remix/.gitignore
vendored
@@ -1,7 +1,5 @@
|
||||
node_modules
|
||||
|
||||
/.cache
|
||||
/build
|
||||
/public/build
|
||||
.env
|
||||
.vercel
|
||||
|
||||
@@ -1,20 +1,13 @@
|
||||
import { cssBundleHref } from "@remix-run/css-bundle";
|
||||
import {
|
||||
Links,
|
||||
LiveReload,
|
||||
Meta,
|
||||
Outlet,
|
||||
Scripts,
|
||||
ScrollRestoration,
|
||||
} from "@remix-run/react";
|
||||
import { Analytics } from "@vercel/analytics/react";
|
||||
import type { LinksFunction } from "@vercel/remix";
|
||||
|
||||
export const links: LinksFunction = () => [
|
||||
...(cssBundleHref ? [{ rel: "stylesheet", href: cssBundleHref }] : []),
|
||||
];
|
||||
|
||||
export default function App() {
|
||||
export function Layout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
@@ -24,12 +17,15 @@ export default function App() {
|
||||
<Links />
|
||||
</head>
|
||||
<body>
|
||||
<Outlet />
|
||||
{children}
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
<LiveReload />
|
||||
<Analytics />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
|
||||
export default function App() {
|
||||
return <Outlet />;
|
||||
}
|
||||
|
||||
@@ -4,29 +4,29 @@
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "remix build",
|
||||
"dev": "remix dev --manual",
|
||||
"start": "remix-serve ./build/index.js",
|
||||
"build": "remix vite:build",
|
||||
"dev": "remix vite:dev",
|
||||
"typecheck": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@remix-run/css-bundle": "^2.0.0",
|
||||
"@remix-run/node": "^2.0.0",
|
||||
"@remix-run/react": "^2.0.0",
|
||||
"@remix-run/serve": "^2.0.0",
|
||||
"@vercel/analytics": "^1.0.2",
|
||||
"@vercel/remix": "^2.0.0",
|
||||
"isbot": "^3.6.8",
|
||||
"@remix-run/node": "^2.8.0",
|
||||
"@remix-run/react": "^2.8.0",
|
||||
"@remix-run/server-runtime": "^2.8.0",
|
||||
"@vercel/analytics": "^1.2.2",
|
||||
"@vercel/remix": "^2.8.0",
|
||||
"isbot": "^4",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "^2.0.0",
|
||||
"@remix-run/eslint-config": "^2.0.0",
|
||||
"@remix-run/dev": "^2.8.0",
|
||||
"@remix-run/eslint-config": "^2.8.0",
|
||||
"@types/react": "^18.2.20",
|
||||
"@types/react-dom": "^18.2.7",
|
||||
"eslint": "^8.38.0",
|
||||
"typescript": "^5.1.6"
|
||||
"typescript": "^5.1.6",
|
||||
"vite": "^5.1.0",
|
||||
"vite-tsconfig-paths": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
|
||||
@@ -1,22 +1,25 @@
|
||||
{
|
||||
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"include": ["**/*.ts", "**/*.tsx"],
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2022"],
|
||||
"types": ["@vercel/remix", "node", "vite/client"],
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"resolveJsonModule": true,
|
||||
"target": "ES2022",
|
||||
"strict": true,
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
|
||||
// Remix takes care of building everything in `remix build`.
|
||||
// Vite takes care of building everything, not tsc.
|
||||
"noEmit": true
|
||||
}
|
||||
}
|
||||
|
||||
11
examples/remix/vite.config.ts
Normal file
11
examples/remix/vite.config.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { vitePlugin as remix } from "@remix-run/dev";
|
||||
import { installGlobals } from "@remix-run/node";
|
||||
import { defineConfig } from "vite";
|
||||
import { vercelPreset } from '@vercel/remix/vite';
|
||||
import tsconfigPaths from "vite-tsconfig-paths";
|
||||
|
||||
installGlobals();
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [remix({ presets: [vercelPreset()] }), tsconfigPaths()],
|
||||
});
|
||||
@@ -1,5 +1,26 @@
|
||||
# @vercel-internals/types
|
||||
|
||||
## 1.0.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 1.0.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 1.0.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 1.0.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
7
internals/types/index.d.ts
vendored
7
internals/types/index.d.ts
vendored
@@ -5,7 +5,12 @@ import type { Route } from '@vercel/routing-utils';
|
||||
import { PROJECT_ENV_TARGET } from '@vercel-internals/constants';
|
||||
|
||||
export type ProjectEnvTarget = typeof PROJECT_ENV_TARGET[number];
|
||||
export type ProjectEnvType = 'plain' | 'secret' | 'encrypted' | 'system';
|
||||
export type ProjectEnvType =
|
||||
| 'plain'
|
||||
| 'secret'
|
||||
| 'encrypted'
|
||||
| 'system'
|
||||
| 'sensitive';
|
||||
|
||||
export type ProjectSettings = import('@vercel/build-utils').ProjectSettings;
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "@vercel-internals/types",
|
||||
"version": "1.0.21",
|
||||
"version": "1.0.24",
|
||||
"types": "index.d.ts",
|
||||
"main": "index.d.ts",
|
||||
"files": [
|
||||
@@ -10,7 +10,7 @@
|
||||
"dependencies": {
|
||||
"@types/node": "14.14.31",
|
||||
"@vercel-internals/constants": "1.0.4",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"@vercel/routing-utils": "3.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"source-map-support": "0.5.12",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "29.1.0",
|
||||
"turbo": "1.11.3",
|
||||
"turbo": "1.12.4",
|
||||
"typescript": "4.9.5"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,31 @@
|
||||
# @vercel/build-utils
|
||||
|
||||
## 7.7.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
## 7.7.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Revert "Revert "Default ruby to only currently supported version (3.2.0)"" ([#11137](https://github.com/vercel/vercel/pull/11137))
|
||||
|
||||
## 7.6.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Revert "Default ruby to only currently supported version (3.2.0)" ([#11135](https://github.com/vercel/vercel/pull/11135))
|
||||
|
||||
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
|
||||
|
||||
- [build-utils] change default package manager when no lockfile detected from `yarn` to `npm` (gated behind feature flag) ([#11131](https://github.com/vercel/vercel/pull/11131))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update internal type for variants ([#11111](https://github.com/vercel/vercel/pull/11111))
|
||||
|
||||
## 7.5.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "7.5.1",
|
||||
"version": "7.7.1",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -302,7 +302,7 @@ export async function scanParentDirs(
|
||||
});
|
||||
let lockfilePath: string | undefined;
|
||||
let lockfileVersion: number | undefined;
|
||||
let cliType: CliType = 'yarn';
|
||||
let cliType: CliType;
|
||||
|
||||
const [hasYarnLock, packageLockJson, pnpmLockYaml, bunLockBin] =
|
||||
await Promise.all([
|
||||
@@ -338,6 +338,12 @@ export async function scanParentDirs(
|
||||
lockfilePath = bunLockPath;
|
||||
// TODO: read "bun-lockfile-format-v0"
|
||||
lockfileVersion = 0;
|
||||
} else {
|
||||
if (process.env.VERCEL_ENABLE_NPM_DEFAULT === '1') {
|
||||
cliType = 'npm';
|
||||
} else {
|
||||
cliType = 'yarn';
|
||||
}
|
||||
}
|
||||
|
||||
const packageJsonPath = pkgJsonPath || undefined;
|
||||
@@ -414,6 +420,14 @@ export async function runNpmInstall(
|
||||
destPath
|
||||
);
|
||||
|
||||
if (!packageJsonPath) {
|
||||
debug(
|
||||
`Skipping dependency installation because no package.json was found for ${destPath}`
|
||||
);
|
||||
runNpmInstallSema.release();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Only allow `runNpmInstall()` to run once per `package.json`
|
||||
// when doing a default install (no additional args)
|
||||
if (meta && packageJsonPath && args.length === 0) {
|
||||
|
||||
@@ -14,7 +14,7 @@ export const functionsSchema = {
|
||||
},
|
||||
memory: {
|
||||
minimum: 128,
|
||||
maximum: 3008,
|
||||
maximum: 3009,
|
||||
},
|
||||
maxDuration: {
|
||||
type: 'number',
|
||||
|
||||
@@ -440,7 +440,9 @@ export interface Cron {
|
||||
schedule: string;
|
||||
}
|
||||
|
||||
// TODO: Proper description once complete
|
||||
/**
|
||||
* @deprecated Replaced by Variants. Remove once fully replaced.
|
||||
*/
|
||||
export interface Flag {
|
||||
key: string;
|
||||
defaultValue?: unknown;
|
||||
@@ -471,7 +473,9 @@ export interface BuildResultV2Typical {
|
||||
framework?: {
|
||||
version: string;
|
||||
};
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags?: Flag[];
|
||||
variants?: Record<string, VariantDefinition>;
|
||||
}
|
||||
|
||||
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
|
||||
@@ -491,3 +495,28 @@ export type ShouldServe = (
|
||||
export type StartDevServer = (
|
||||
options: StartDevServerOptions
|
||||
) => Promise<StartDevServerResult>;
|
||||
|
||||
/**
|
||||
* TODO: The following types will eventually be exported by a more
|
||||
* relevant package.
|
||||
*/
|
||||
type VariantJSONArray = ReadonlyArray<VariantJSONValue>;
|
||||
|
||||
type VariantJSONValue =
|
||||
| string
|
||||
| boolean
|
||||
| number
|
||||
| null
|
||||
| VariantJSONArray
|
||||
| { [key: string]: VariantJSONValue };
|
||||
|
||||
type VariantOption = {
|
||||
value: VariantJSONValue;
|
||||
label?: string;
|
||||
};
|
||||
|
||||
export interface VariantDefinition {
|
||||
options?: VariantOption[];
|
||||
origin?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
4
packages/build-utils/test/fixtures/07-cross-install/api/yarn.lock
vendored
Normal file
4
packages/build-utils/test/fixtures/07-cross-install/api/yarn.lock
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
4
packages/build-utils/test/fixtures/07-cross-install/lib/yarn.lock
vendored
Normal file
4
packages/build-utils/test/fixtures/07-cross-install/lib/yarn.lock
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
4
packages/build-utils/test/fixtures/08-yarn-npm/with-yarn/yarn.lock
vendored
Normal file
4
packages/build-utils/test/fixtures/08-yarn-npm/with-yarn/yarn.lock
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
9
packages/build-utils/test/fixtures/40-no-lockfile/package.json
vendored
Normal file
9
packages/build-utils/test/fixtures/40-no-lockfile/package.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"build": "mkdir -p public && (printf \"npm version: \" && npm -v) > public/index.txt"
|
||||
},
|
||||
"dependencies": {
|
||||
"sharp": "0.33.2"
|
||||
}
|
||||
}
|
||||
3
packages/build-utils/test/fixtures/40-no-lockfile/probes.json
vendored
Normal file
3
packages/build-utils/test/fixtures/40-no-lockfile/probes.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 8" }]
|
||||
}
|
||||
7
packages/build-utils/test/fixtures/40-no-lockfile/vercel.json
vendored
Normal file
7
packages/build-utils/test/fixtures/40-no-lockfile/vercel.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"build": {
|
||||
"env": {
|
||||
"VERCEL_ENABLE_NPM_DEFAULT": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
46
packages/build-utils/test/unit.test.ts
vendored
46
packages/build-utils/test/unit.test.ts
vendored
@@ -576,6 +576,52 @@ it(
|
||||
ms('1m')
|
||||
);
|
||||
|
||||
it('should return cliType npm when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is set', async () => {
|
||||
const originalRepoLockfilePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'pnpm-lock.yaml'
|
||||
);
|
||||
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
|
||||
await fs.remove(originalRepoLockfilePath);
|
||||
try {
|
||||
process.env.VERCEL_ENABLE_NPM_DEFAULT = '1';
|
||||
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
|
||||
const result = await scanParentDirs(fixture);
|
||||
expect(result.cliType).toEqual('npm');
|
||||
expect(result.lockfileVersion).toEqual(undefined);
|
||||
expect(result.lockfilePath).toEqual(undefined);
|
||||
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
|
||||
} finally {
|
||||
delete process.env.VERCEL_ENABLE_NPM_DEFAULT;
|
||||
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return cliType yarn when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is not set', async () => {
|
||||
const originalRepoLockfilePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'pnpm-lock.yaml'
|
||||
);
|
||||
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
|
||||
await fs.remove(originalRepoLockfilePath);
|
||||
try {
|
||||
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
|
||||
const result = await scanParentDirs(fixture);
|
||||
expect(result.cliType).toEqual('yarn');
|
||||
expect(result.lockfileVersion).toEqual(undefined);
|
||||
expect(result.lockfilePath).toEqual(undefined);
|
||||
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
|
||||
} finally {
|
||||
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return cliType bun and correct lock file for bun v1', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '31-bun-v1-with-yarn-lock');
|
||||
const result = await scanParentDirs(fixture);
|
||||
|
||||
@@ -1,5 +1,76 @@
|
||||
# vercel
|
||||
|
||||
## 33.5.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
- Updated dependencies [[`b1d8b83ab`](https://github.com/vercel/vercel/commit/b1d8b83abbf23a3485aedb490992d0a3bf44573f), [`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f), [`20237d4f7`](https://github.com/vercel/vercel/commit/20237d4f7b55b0697b57db15636c11204cb0dc39), [`f8fab639b`](https://github.com/vercel/vercel/commit/f8fab639bf49a60389b8d0b7b265a737c17b4ae1), [`6ed0fe6fb`](https://github.com/vercel/vercel/commit/6ed0fe6fb1e487545a790ff5b9fc691cf625f005)]:
|
||||
- @vercel/next@4.1.4
|
||||
- @vercel/build-utils@7.7.1
|
||||
- @vercel/remix-builder@2.1.1
|
||||
- @vercel/static-build@2.4.1
|
||||
- @vercel/node@3.0.21
|
||||
|
||||
## 33.5.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`c2d99855e`](https://github.com/vercel/vercel/commit/c2d99855ea6132380434ed29643120680f95fad7), [`1333071a3`](https://github.com/vercel/vercel/commit/1333071a3a2d324679327bfdd4e872f8fd3521c6)]:
|
||||
- @vercel/next@4.1.3
|
||||
- @vercel/remix-builder@2.1.0
|
||||
|
||||
## 33.5.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`e109e3325`](https://github.com/vercel/vercel/commit/e109e3325ab5299da0903034175fabe72d486a4e), [`d17abf463`](https://github.com/vercel/vercel/commit/d17abf463acabf9e1e43b91200f18efd34e91f62), [`644721a90`](https://github.com/vercel/vercel/commit/644721a90da8cf98414d272be9da0a821a2ce217), [`ea0e9aeae`](https://github.com/vercel/vercel/commit/ea0e9aeaec8ddddb5a726be0d252df9cdbd84808), [`e318a0eea`](https://github.com/vercel/vercel/commit/e318a0eea55c9b8536b0874f66cfd03aca6f0adf), [`1fee87e76`](https://github.com/vercel/vercel/commit/1fee87e76f18d2f5e5524247cfce615fa1832e49), [`bfc01fd98`](https://github.com/vercel/vercel/commit/bfc01fd98f760a008d0d2e6c52b5216503b44b75), [`7910f2f30`](https://github.com/vercel/vercel/commit/7910f2f3070ff69742e845e795d4db77d598c181), [`440ef3ba9`](https://github.com/vercel/vercel/commit/440ef3ba98af8f05e7714c86c67c36dbda11e85c)]:
|
||||
- @vercel/remix-builder@2.0.20
|
||||
- @vercel/next@4.1.2
|
||||
- @vercel/node@3.0.20
|
||||
- @vercel/redwood@2.0.8
|
||||
|
||||
## 33.5.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- build: upgrade edge-runtime ([#11148](https://github.com/vercel/vercel/pull/11148))
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab), [`10e200e0b`](https://github.com/vercel/vercel/commit/10e200e0bf8f692b6740e098e0572b4e7de83850), [`678ebbe52`](https://github.com/vercel/vercel/commit/678ebbe5255766656bf2dddc574e86b2999f11c8)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
- @vercel/static-build@2.4.0
|
||||
- @vercel/node@3.0.19
|
||||
|
||||
## 33.5.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`c32a909af`](https://github.com/vercel/vercel/commit/c32a909afcedf0ee55777d5dcaecc0c8383dd8c8), [`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`d21bb9f87`](https://github.com/vercel/vercel/commit/d21bb9f87e1d837666fe8104d4e199b2590725d6), [`4027a1833`](https://github.com/vercel/vercel/commit/4027a1833718a92be74b2b3c5a4df23745d19a36), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c), [`3bad73401`](https://github.com/vercel/vercel/commit/3bad73401b4ec1f61e515965732cde8dcc052b17)]:
|
||||
- @vercel/next@4.1.1
|
||||
- @vercel/node@3.0.18
|
||||
- @vercel/redwood@2.0.7
|
||||
- @vercel/remix-builder@2.0.19
|
||||
- @vercel/build-utils@7.6.0
|
||||
- @vercel/static-build@2.3.0
|
||||
|
||||
## 33.4.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`d05e41eea`](https://github.com/vercel/vercel/commit/d05e41eeaf97a024157d2bd843782c95c39389be), [`de63e3562`](https://github.com/vercel/vercel/commit/de63e356223467447cda539ddc435a892303afc7)]:
|
||||
- @vercel/static-build@2.2.0
|
||||
|
||||
## 33.4.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Added a new option to add a sensitive environment variable ([#11033](https://github.com/vercel/vercel/pull/11033))
|
||||
|
||||
## 33.3.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "33.3.0",
|
||||
"version": "33.5.4",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -31,22 +31,22 @@
|
||||
"node": ">= 16"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"@vercel/fun": "1.1.0",
|
||||
"@vercel/go": "3.0.5",
|
||||
"@vercel/hydrogen": "1.0.2",
|
||||
"@vercel/next": "4.1.0",
|
||||
"@vercel/node": "3.0.17",
|
||||
"@vercel/next": "4.1.4",
|
||||
"@vercel/node": "3.0.21",
|
||||
"@vercel/python": "4.1.1",
|
||||
"@vercel/redwood": "2.0.6",
|
||||
"@vercel/remix-builder": "2.0.18",
|
||||
"@vercel/redwood": "2.0.8",
|
||||
"@vercel/remix-builder": "2.1.1",
|
||||
"@vercel/ruby": "2.0.5",
|
||||
"@vercel/static-build": "2.1.0",
|
||||
"@vercel/static-build": "2.4.1",
|
||||
"chokidar": "3.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@alex_neo/jest-expect-message": "1.0.5",
|
||||
"@edge-runtime/node-utils": "2.2.2",
|
||||
"@edge-runtime/node-utils": "2.3.0",
|
||||
"@next/env": "11.1.2",
|
||||
"@sentry/node": "5.5.0",
|
||||
"@sindresorhus/slugify": "0.11.0",
|
||||
@@ -88,11 +88,11 @@
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel-internals/constants": "1.0.4",
|
||||
"@vercel-internals/get-package-json": "1.0.0",
|
||||
"@vercel-internals/types": "1.0.21",
|
||||
"@vercel/client": "13.1.0",
|
||||
"@vercel-internals/types": "1.0.24",
|
||||
"@vercel/client": "13.1.4",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/frameworks": "2.0.6",
|
||||
"@vercel/fs-detectors": "5.1.6",
|
||||
"@vercel/frameworks": "3.0.0",
|
||||
"@vercel/fs-detectors": "5.2.1",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"ajv": "6.12.2",
|
||||
"alpha-sort": "2.0.1",
|
||||
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
Cron,
|
||||
validateNpmrc,
|
||||
Flag,
|
||||
VariantDefinition,
|
||||
} from '@vercel/build-utils';
|
||||
import {
|
||||
detectBuilders,
|
||||
@@ -95,7 +96,9 @@ interface BuildOutputConfig {
|
||||
version: string;
|
||||
};
|
||||
crons?: Cron[];
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags?: Flag[];
|
||||
variants?: Record<string, VariantDefinition>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -678,10 +681,13 @@ async function doBuild(
|
||||
overrides: mergedOverrides,
|
||||
framework,
|
||||
crons: mergedCrons,
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags: mergedFlags,
|
||||
};
|
||||
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
|
||||
|
||||
await writeVariantsJson(client, buildResults.values(), outputDir);
|
||||
|
||||
const relOutputDir = relative(cwd, outputDir);
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
@@ -826,6 +832,51 @@ function mergeFlags(
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the build output and writes all the variants into the `variants.json`
|
||||
* file. It'll skip variants that already exist.
|
||||
*/
|
||||
async function writeVariantsJson(
|
||||
{ output }: Client,
|
||||
buildResults: Iterable<BuildResult | BuildOutputConfig>,
|
||||
outputDir: string
|
||||
): Promise<void> {
|
||||
const variantsFilePath = join(outputDir, 'variants.json');
|
||||
|
||||
let hasVariants = true;
|
||||
|
||||
const variants = (await fs.readJSON(variantsFilePath).catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
hasVariants = false;
|
||||
return { definitions: {} };
|
||||
}
|
||||
|
||||
throw error;
|
||||
})) as { definitions: Record<string, VariantDefinition> };
|
||||
|
||||
for (const result of buildResults) {
|
||||
if (!('variants' in result) || !result.variants) continue;
|
||||
|
||||
for (const [key, defintion] of Object.entries(result.variants)) {
|
||||
if (result.variants[key]) {
|
||||
output.warn(
|
||||
`The variant "${key}" was found multiple times. Only its first occurrence will be considered.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
hasVariants = true;
|
||||
variants.definitions[key] = defintion;
|
||||
}
|
||||
}
|
||||
|
||||
// Only create the file when there are variants to write,
|
||||
// or when the file already exists.
|
||||
if (hasVariants) {
|
||||
await fs.writeJSON(variantsFilePath, variants, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
|
||||
async function writeBuildJson(buildsJson: BuildsManifest, outputDir: string) {
|
||||
await fs.writeJSON(join(outputDir, 'builds.json'), buildsJson, { spaces: 2 });
|
||||
}
|
||||
|
||||
5
packages/cli/src/commands/env/add.ts
vendored
5
packages/cli/src/commands/env/add.ts
vendored
@@ -19,6 +19,7 @@ import { isAPIError } from '../../util/errors-ts';
|
||||
|
||||
type Options = {
|
||||
'--debug': boolean;
|
||||
'--sensitive': boolean;
|
||||
};
|
||||
|
||||
export default async function add(
|
||||
@@ -144,6 +145,8 @@ export default async function add(
|
||||
envGitBranch = inputValue || '';
|
||||
}
|
||||
|
||||
const type = opts['--sensitive'] ? 'sensitive' : 'encrypted';
|
||||
|
||||
const addStamp = stamp();
|
||||
try {
|
||||
output.spinner('Saving');
|
||||
@@ -151,7 +154,7 @@ export default async function add(
|
||||
output,
|
||||
client,
|
||||
project.id,
|
||||
'encrypted',
|
||||
type,
|
||||
envName,
|
||||
envValue,
|
||||
envTargets,
|
||||
|
||||
15
packages/cli/src/commands/env/command.ts
vendored
15
packages/cli/src/commands/env/command.ts
vendored
@@ -34,7 +34,16 @@ export const envCommand: Command = {
|
||||
required: false,
|
||||
},
|
||||
],
|
||||
options: [],
|
||||
options: [
|
||||
{
|
||||
name: 'sensitive',
|
||||
description: 'Add a sensitive Environment Variable',
|
||||
shorthand: null,
|
||||
type: 'string',
|
||||
deprecated: false,
|
||||
multi: false,
|
||||
},
|
||||
],
|
||||
examples: [],
|
||||
},
|
||||
{
|
||||
@@ -117,6 +126,10 @@ export const envCommand: Command = {
|
||||
`${packageName} env add DB_PASS production`,
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Add a sensitive Environment Variable',
|
||||
value: `${packageName} env add API_TOKEN --sensitive`,
|
||||
},
|
||||
{
|
||||
name: 'Add a new variable for a specific Environment and Git Branch',
|
||||
value: [
|
||||
|
||||
1
packages/cli/src/commands/env/index.ts
vendored
1
packages/cli/src/commands/env/index.ts
vendored
@@ -34,6 +34,7 @@ export default async function main(client: Client) {
|
||||
'-y': '--yes',
|
||||
'--environment': String,
|
||||
'--git-branch': String,
|
||||
'--sensitive': Boolean,
|
||||
});
|
||||
} catch (error) {
|
||||
handleError(error);
|
||||
|
||||
@@ -367,7 +367,7 @@ export async function executeBuild(
|
||||
Code: { ZipFile },
|
||||
Handler: asset.handler,
|
||||
Runtime: asset.runtime,
|
||||
MemorySize: asset.memory || 3008,
|
||||
MemorySize: asset.memory || 3009,
|
||||
Environment: {
|
||||
Variables: {
|
||||
...vercelConfig.env,
|
||||
|
||||
@@ -382,6 +382,9 @@ module.exports = async function prepare(session, binaryPath, tmpFixturesDir) {
|
||||
'index.html': 'Hello',
|
||||
'vercel.json': '{"builds":[{"src":"*.html","use":"@vercel/static"}]}',
|
||||
},
|
||||
'project-sensitive-env-vars': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'dev-proxy-headers-and-env': {
|
||||
'package.json': JSON.stringify({}),
|
||||
'server.js': `require('http').createServer((req, res) => {
|
||||
|
||||
47
packages/cli/test/integration-2.test.ts
vendored
47
packages/cli/test/integration-2.test.ts
vendored
@@ -681,6 +681,53 @@ test('vercel env with unknown `VERCEL_ORG_ID` or `VERCEL_PROJECT_ID` should erro
|
||||
expect(output.stderr).toContain('Project not found');
|
||||
});
|
||||
|
||||
test('add a sensitive env var', async () => {
|
||||
const dir = await setupE2EFixture('project-sensitive-env-vars');
|
||||
const projectName = `project-sensitive-env-vars-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
// remove previously linked project if it exists
|
||||
await remove(path.join(dir, '.vercel'));
|
||||
|
||||
const vc = execCli(binaryPath, ['link'], {
|
||||
cwd: dir,
|
||||
env: {
|
||||
FORCE_TTY: '1',
|
||||
},
|
||||
});
|
||||
|
||||
await setupProject(vc, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
await vc;
|
||||
|
||||
const link = require(path.join(dir, '.vercel/project.json'));
|
||||
|
||||
const addEnvCommand = execCli(
|
||||
binaryPath,
|
||||
['env', 'add', 'envVarName', 'production', '--sensitive'],
|
||||
{
|
||||
env: {
|
||||
VERCEL_ORG_ID: link.orgId,
|
||||
VERCEL_PROJECT_ID: link.projectId,
|
||||
},
|
||||
}
|
||||
);
|
||||
|
||||
await waitForPrompt(addEnvCommand, /What’s the value of [^?]+\?/);
|
||||
addEnvCommand.stdin?.write('test\n');
|
||||
|
||||
const output = await addEnvCommand;
|
||||
|
||||
expect(output.exitCode, formatOutput(output)).toBe(0);
|
||||
expect(output.stderr).toContain(
|
||||
'Added Environment Variable envVarName to Project'
|
||||
);
|
||||
});
|
||||
|
||||
test('whoami with `VERCEL_ORG_ID` should favor `--scope` and should error', async () => {
|
||||
if (!token) {
|
||||
throw new Error('Shared state "token" not set.');
|
||||
|
||||
2
packages/cli/test/integration-3.test.ts
vendored
2
packages/cli/test/integration-3.test.ts
vendored
@@ -1175,7 +1175,7 @@ test('render build errors', async () => {
|
||||
const output = await execCli(binaryPath, [deploymentPath, '--yes']);
|
||||
|
||||
expect(output.exitCode, formatOutput(output)).toBe(1);
|
||||
expect(output.stderr).toMatch(/Command "yarn run build" exited with 1/gm);
|
||||
expect(output.stderr).toMatch(/Command "npm run build" exited with 1/gm);
|
||||
});
|
||||
|
||||
test('invalid deployment, projects and alias names', async () => {
|
||||
|
||||
@@ -253,12 +253,12 @@ describe('validateConfig', () => {
|
||||
const error = validateConfig({
|
||||
functions: {
|
||||
'api/test.js': {
|
||||
memory: 3009,
|
||||
memory: 3010,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(error!.message).toEqual(
|
||||
"Invalid vercel.json - `functions['api/test.js'].memory` should be <= 3008."
|
||||
"Invalid vercel.json - `functions['api/test.js'].memory` should be <= 3009."
|
||||
);
|
||||
expect(error!.link).toEqual(
|
||||
'https://vercel.com/docs/concepts/projects/project-configuration#functions'
|
||||
|
||||
@@ -1,5 +1,32 @@
|
||||
# @vercel/client
|
||||
|
||||
## 13.1.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 13.1.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 13.1.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 13.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- More helpful error message when `vc deploy --prebuilt` has missing files ([#11105](https://github.com/vercel/vercel/pull/11105))
|
||||
|
||||
## 13.1.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "13.1.0",
|
||||
"version": "13.1.4",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -37,7 +37,8 @@
|
||||
"typescript": "4.9.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { lstatSync } from 'fs-extra';
|
||||
import { isAbsolute, join, relative } from 'path';
|
||||
import { isAbsolute, join, relative, sep } from 'path';
|
||||
import { hash, hashes, mapToObject } from './utils/hashes';
|
||||
import { upload } from './upload';
|
||||
import { buildFileTree, createDebug } from './utils';
|
||||
import { DeploymentError } from './errors';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
import {
|
||||
VercelClientOptions,
|
||||
DeploymentOptions,
|
||||
@@ -90,27 +91,43 @@ export default function buildCreateDeployment() {
|
||||
|
||||
let files;
|
||||
|
||||
if (clientOptions.archive === 'tgz') {
|
||||
debug('Packing tarball');
|
||||
const tarStream = tar
|
||||
.pack(workPath, {
|
||||
entries: fileList.map(file => relative(workPath, file)),
|
||||
})
|
||||
.pipe(createGzip());
|
||||
const tarBuffer = await streamToBuffer(tarStream);
|
||||
debug('Packed tarball');
|
||||
files = new Map([
|
||||
[
|
||||
hash(tarBuffer),
|
||||
{
|
||||
names: [join(workPath, '.vercel/source.tgz')],
|
||||
data: tarBuffer,
|
||||
mode: 0o666,
|
||||
},
|
||||
],
|
||||
]);
|
||||
} else {
|
||||
files = await hashes(fileList);
|
||||
try {
|
||||
if (clientOptions.archive === 'tgz') {
|
||||
debug('Packing tarball');
|
||||
const tarStream = tar
|
||||
.pack(workPath, {
|
||||
entries: fileList.map(file => relative(workPath, file)),
|
||||
})
|
||||
.pipe(createGzip());
|
||||
const tarBuffer = await streamToBuffer(tarStream);
|
||||
debug('Packed tarball');
|
||||
files = new Map([
|
||||
[
|
||||
hash(tarBuffer),
|
||||
{
|
||||
names: [join(workPath, '.vercel/source.tgz')],
|
||||
data: tarBuffer,
|
||||
mode: 0o666,
|
||||
},
|
||||
],
|
||||
]);
|
||||
} else {
|
||||
files = await hashes(fileList);
|
||||
}
|
||||
} catch (err: unknown) {
|
||||
if (
|
||||
clientOptions.prebuilt &&
|
||||
isErrnoException(err) &&
|
||||
err.code === 'ENOENT' &&
|
||||
err.path
|
||||
) {
|
||||
const errPath = relative(workPath, err.path);
|
||||
err.message = `File does not exist: "${relative(workPath, errPath)}"`;
|
||||
if (errPath.split(sep).includes('node_modules')) {
|
||||
err.message = `Please ensure project dependencies have been installed:\n${err.message}`;
|
||||
}
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`);
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
# @vercel/frameworks
|
||||
|
||||
## 3.0.0
|
||||
|
||||
### Major Changes
|
||||
|
||||
- Make "remix" framework preset supersede "vite" ([#11031](https://github.com/vercel/vercel/pull/11031))
|
||||
|
||||
## 2.0.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "2.0.6",
|
||||
"version": "3.0.0",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
|
||||
@@ -202,11 +202,14 @@ export const frameworks = [
|
||||
description: 'A new Remix app — the result of running `npx create-remix`.',
|
||||
website: 'https://remix.run',
|
||||
sort: 6,
|
||||
supersedes: 'hydrogen',
|
||||
supersedes: ['hydrogen', 'vite'],
|
||||
useRuntime: { src: 'package.json', use: '@vercel/remix-builder' },
|
||||
ignoreRuntimes: ['@vercel/node'],
|
||||
detectors: {
|
||||
some: [
|
||||
{
|
||||
matchPackage: '@remix-run/dev',
|
||||
},
|
||||
{
|
||||
path: 'remix.config.js',
|
||||
},
|
||||
@@ -1734,7 +1737,7 @@ export const frameworks = [
|
||||
tagline: 'React framework for headless commerce',
|
||||
description: 'React framework for headless commerce',
|
||||
website: 'https://hydrogen.shopify.dev',
|
||||
supersedes: 'vite',
|
||||
supersedes: ['vite'],
|
||||
useRuntime: { src: 'package.json', use: '@vercel/hydrogen' },
|
||||
envPrefix: 'PUBLIC_',
|
||||
detectors: {
|
||||
|
||||
@@ -220,7 +220,7 @@ export interface Framework {
|
||||
*/
|
||||
defaultVersion?: string;
|
||||
/**
|
||||
* Slug of another framework preset in which this framework supersedes.
|
||||
* Array of slugs for other framework presets which this framework supersedes.
|
||||
*/
|
||||
supersedes?: string;
|
||||
supersedes?: string[];
|
||||
}
|
||||
|
||||
@@ -199,7 +199,7 @@ const Schema = {
|
||||
dependency: { type: 'string' },
|
||||
cachePattern: { type: 'string' },
|
||||
defaultVersion: { type: 'string' },
|
||||
supersedes: { type: 'string' },
|
||||
supersedes: { type: 'array', items: { type: 'string' } },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# @vercel/fs-detectors
|
||||
|
||||
## 5.2.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
## 5.2.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Make "remix" framework preset supersede "vite" ([#11031](https://github.com/vercel/vercel/pull/11031))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`1333071a3`](https://github.com/vercel/vercel/commit/1333071a3a2d324679327bfdd4e872f8fd3521c6)]:
|
||||
- @vercel/frameworks@3.0.0
|
||||
|
||||
## 5.1.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/fs-detectors",
|
||||
"version": "5.1.6",
|
||||
"version": "5.2.1",
|
||||
"description": "Vercel filesystem detectors",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/frameworks": "2.0.6",
|
||||
"@vercel/frameworks": "3.0.0",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"glob": "8.0.3",
|
||||
"js-yaml": "4.1.0",
|
||||
@@ -37,7 +37,7 @@
|
||||
"@types/minimatch": "3.0.5",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "7.3.10",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"jest-junit": "16.0.0",
|
||||
"typescript": "4.9.5"
|
||||
}
|
||||
|
||||
@@ -612,11 +612,11 @@ function validateFunctions({ functions = {} }: Options) {
|
||||
|
||||
if (
|
||||
func.memory !== undefined &&
|
||||
(func.memory < 128 || func.memory > 3008)
|
||||
(func.memory < 128 || func.memory > 3009)
|
||||
) {
|
||||
return {
|
||||
code: 'invalid_function_memory',
|
||||
message: 'Functions must have a memory value between 128 and 3008',
|
||||
message: 'Functions must have a memory value between 128 and 3009',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -143,7 +143,9 @@ function removeSupersededFramework(
|
||||
const framework = matches[index];
|
||||
if (framework) {
|
||||
if (framework.supersedes) {
|
||||
removeSupersededFramework(matches, framework.supersedes);
|
||||
for (const slug of framework.supersedes) {
|
||||
removeSupersededFramework(matches, slug);
|
||||
}
|
||||
}
|
||||
matches.splice(index, 1);
|
||||
}
|
||||
@@ -154,7 +156,9 @@ export function removeSupersededFrameworks(
|
||||
) {
|
||||
for (const match of matches.slice()) {
|
||||
if (match?.supersedes) {
|
||||
removeSupersededFramework(matches, match.supersedes);
|
||||
for (const slug of match.supersedes) {
|
||||
removeSupersededFramework(matches, slug);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -166,12 +166,12 @@ describe('removeSupersededFrameworks()', () => {
|
||||
const matches = [
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: ['vite'] },
|
||||
];
|
||||
removeSupersededFrameworks(matches);
|
||||
expect(matches).toEqual([
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'hydrogen', supersedes: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: ['vite'] },
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -179,13 +179,13 @@ describe('removeSupersededFrameworks()', () => {
|
||||
const matches = [
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: 'vite' },
|
||||
{ slug: 'remix', supersedes: 'hydrogen' },
|
||||
{ slug: 'hydrogen', supersedes: ['vite'] },
|
||||
{ slug: 'remix', supersedes: ['hydrogen'] },
|
||||
];
|
||||
removeSupersededFrameworks(matches);
|
||||
expect(matches).toEqual([
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'remix', supersedes: 'hydrogen' },
|
||||
{ slug: 'remix', supersedes: ['hydrogen'] },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -442,6 +442,20 @@ describe('detectFramework()', () => {
|
||||
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('storybook');
|
||||
});
|
||||
|
||||
it('Should detect Remix + Vite as `remix`', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'vite.config.ts': '',
|
||||
'package.json': JSON.stringify({
|
||||
dependencies: {
|
||||
'@remix-run/dev': 'latest',
|
||||
vite: 'latest',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('remix');
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectFrameworks()', () => {
|
||||
@@ -497,6 +511,23 @@ describe('detectFrameworks()', () => {
|
||||
expect(slugs).toEqual(['nextjs', 'storybook']);
|
||||
});
|
||||
|
||||
it('Should detect Remix + Vite as `remix`', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'vite.config.ts': '',
|
||||
'package.json': JSON.stringify({
|
||||
dependencies: {
|
||||
'@remix-run/dev': 'latest',
|
||||
vite: 'latest',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const slugs = (await detectFrameworks({ fs, frameworkList })).map(
|
||||
f => f.slug
|
||||
);
|
||||
expect(slugs).toEqual(['remix']);
|
||||
});
|
||||
|
||||
it('Should detect "hydrogen" template as `hydrogen`', async () => {
|
||||
const fs = new LocalFileSystemDetector(join(EXAMPLES_DIR, 'hydrogen'));
|
||||
|
||||
|
||||
@@ -1,5 +1,28 @@
|
||||
# @vercel/gatsby-plugin-vercel-builder
|
||||
|
||||
## 2.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 2.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 2.0.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [gatsby-plugin-vercel-builder] use --keep-names esbuild flag ([#11117](https://github.com/vercel/vercel/pull/11117))
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 2.0.16
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/gatsby-plugin-vercel-builder",
|
||||
"version": "2.0.16",
|
||||
"version": "2.0.19",
|
||||
"main": "dist/index.js",
|
||||
"files": [
|
||||
"dist",
|
||||
@@ -20,7 +20,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@sinclair/typebox": "0.25.24",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"esbuild": "0.14.47",
|
||||
"etag": "1.8.1",
|
||||
|
||||
@@ -35,6 +35,9 @@ export const writeHandler = async ({
|
||||
platform: 'node',
|
||||
bundle: true,
|
||||
minify: true,
|
||||
// prevents renaming edge cases from causing failures like:
|
||||
// https://github.com/node-fetch/node-fetch/issues/784
|
||||
keepNames: true,
|
||||
define: {
|
||||
'process.env.NODE_ENV': "'production'",
|
||||
vercel_pathPrefix: JSON.stringify(prefix),
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "6.1.5",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"async-retry": "1.3.3",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"execa": "3.2.0",
|
||||
"fs-extra": "11.1.0",
|
||||
"jest-junit": "16.0.0"
|
||||
|
||||
@@ -1,5 +1,43 @@
|
||||
# @vercel/next
|
||||
|
||||
## 4.1.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Enable partial prerendering support for pre-generated pages ([#11183](https://github.com/vercel/vercel/pull/11183))
|
||||
|
||||
## 4.1.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Fix manifest with experimental flag ([#11192](https://github.com/vercel/vercel/pull/11192))
|
||||
|
||||
## 4.1.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update experimental bundle flag ([#11187](https://github.com/vercel/vercel/pull/11187))
|
||||
|
||||
- [next] Add flag for experimental grouping ([#11177](https://github.com/vercel/vercel/pull/11177))
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
- fix: missing experimental field ([#11184](https://github.com/vercel/vercel/pull/11184))
|
||||
|
||||
## 4.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
- Load common chunks on module initialization ([#11126](https://github.com/vercel/vercel/pull/11126))
|
||||
|
||||
- Fix index normalizing for app outputs ([#11099](https://github.com/vercel/vercel/pull/11099))
|
||||
|
||||
- Mark `flags` as deprecated and replace them with `variants` ([#11098](https://github.com/vercel/vercel/pull/11098))
|
||||
|
||||
- Fix rewrite RSC handling with trailingSlash ([#11107](https://github.com/vercel/vercel/pull/11107))
|
||||
|
||||
## 4.1.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "4.1.0",
|
||||
"version": "4.1.4",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -23,7 +23,7 @@
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.2"
|
||||
"@vercel/nft": "0.26.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
@@ -40,7 +40,7 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
|
||||
@@ -511,7 +511,7 @@ export const build: BuildV2 = async ({
|
||||
entryPath,
|
||||
outputDirectory
|
||||
);
|
||||
const omittedPrerenderRoutes = new Set(
|
||||
const omittedPrerenderRoutes: ReadonlySet<string> = new Set(
|
||||
Object.keys(prerenderManifest.omittedRoutes)
|
||||
);
|
||||
|
||||
@@ -1142,6 +1142,10 @@ export const build: BuildV2 = async ({
|
||||
appPathRoutesManifest,
|
||||
});
|
||||
|
||||
/**
|
||||
* This is a detection for preview mode that's required for the pages
|
||||
* router.
|
||||
*/
|
||||
const canUsePreviewMode = Object.keys(pages).some(page =>
|
||||
isApiPage(pages[page].fsPath)
|
||||
);
|
||||
@@ -1316,6 +1320,22 @@ export const build: BuildV2 = async ({
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All of the routes that have `experimentalPPR` enabled.
|
||||
*/
|
||||
const experimentalPPRRoutes = new Set<string>();
|
||||
|
||||
for (const [route, { experimentalPPR }] of [
|
||||
...Object.entries(prerenderManifest.staticRoutes),
|
||||
...Object.entries(prerenderManifest.blockingFallbackRoutes),
|
||||
...Object.entries(prerenderManifest.fallbackRoutes),
|
||||
...Object.entries(prerenderManifest.omittedRoutes),
|
||||
]) {
|
||||
if (!experimentalPPR) continue;
|
||||
|
||||
experimentalPPRRoutes.add(route);
|
||||
}
|
||||
|
||||
if (requiredServerFilesManifest) {
|
||||
if (!routesManifest) {
|
||||
throw new Error(
|
||||
@@ -1371,6 +1391,7 @@ export const build: BuildV2 = async ({
|
||||
hasIsr404Page,
|
||||
hasIsr500Page,
|
||||
variantsManifest,
|
||||
experimentalPPRRoutes,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1883,17 +1904,18 @@ export const build: BuildV2 = async ({
|
||||
);
|
||||
}
|
||||
|
||||
dynamicRoutes = await getDynamicRoutes(
|
||||
dynamicRoutes = await getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
false,
|
||||
isDev: false,
|
||||
routesManifest,
|
||||
omittedPrerenderRoutes,
|
||||
omittedRoutes: omittedPrerenderRoutes,
|
||||
canUsePreviewMode,
|
||||
prerenderManifest.bypassToken || '',
|
||||
isServerMode
|
||||
).then(arr =>
|
||||
bypassToken: prerenderManifest.bypassToken || '',
|
||||
isServerMode,
|
||||
experimentalPPRRoutes,
|
||||
}).then(arr =>
|
||||
localizeDynamicRoutes(
|
||||
arr,
|
||||
dynamicPrefix,
|
||||
@@ -1912,17 +1934,18 @@ export const build: BuildV2 = async ({
|
||||
|
||||
// we need to include the prerenderManifest.omittedRoutes here
|
||||
// for the page to be able to be matched in the lambda for preview mode
|
||||
const completeDynamicRoutes = await getDynamicRoutes(
|
||||
const completeDynamicRoutes = await getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
false,
|
||||
isDev: false,
|
||||
routesManifest,
|
||||
undefined,
|
||||
omittedRoutes: undefined,
|
||||
canUsePreviewMode,
|
||||
prerenderManifest.bypassToken || '',
|
||||
isServerMode
|
||||
).then(arr =>
|
||||
bypassToken: prerenderManifest.bypassToken || '',
|
||||
isServerMode,
|
||||
experimentalPPRRoutes,
|
||||
}).then(arr =>
|
||||
arr.map(route => {
|
||||
route.src = route.src.replace('^', `^${dynamicPrefix}`);
|
||||
return route;
|
||||
@@ -2119,22 +2142,33 @@ export const build: BuildV2 = async ({
|
||||
appPathRoutesManifest,
|
||||
isSharedLambdas,
|
||||
canUsePreviewMode,
|
||||
omittedPrerenderRoutes,
|
||||
});
|
||||
|
||||
Object.keys(prerenderManifest.staticRoutes).forEach(route =>
|
||||
prerenderRoute(route, { isBlocking: false, isFallback: false })
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.staticRoutes).map(route =>
|
||||
prerenderRoute(route, {})
|
||||
)
|
||||
);
|
||||
Object.keys(prerenderManifest.fallbackRoutes).forEach(route =>
|
||||
prerenderRoute(route, { isBlocking: false, isFallback: true })
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.fallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isFallback: true })
|
||||
)
|
||||
);
|
||||
Object.keys(prerenderManifest.blockingFallbackRoutes).forEach(route =>
|
||||
prerenderRoute(route, { isBlocking: true, isFallback: false })
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.blockingFallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isBlocking: true })
|
||||
)
|
||||
);
|
||||
|
||||
if (static404Page && canUsePreviewMode) {
|
||||
omittedPrerenderRoutes.forEach(route => {
|
||||
prerenderRoute(route, { isOmitted: true });
|
||||
});
|
||||
await Promise.all(
|
||||
Array.from(omittedPrerenderRoutes).map(route =>
|
||||
prerenderRoute(route, { isOmitted: true })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// We still need to use lazyRoutes if the dataRoutes field
|
||||
|
||||
@@ -47,11 +47,12 @@ import {
|
||||
UnwrapPromise,
|
||||
getOperationType,
|
||||
FunctionsConfigManifestV1,
|
||||
VariantsManifest,
|
||||
VariantsManifestLegacy,
|
||||
RSC_CONTENT_TYPE,
|
||||
RSC_PREFETCH_SUFFIX,
|
||||
normalizePrefetches,
|
||||
CreateLambdaFromPseudoLayersOptions,
|
||||
getPostponeResumePathname,
|
||||
} from './utils';
|
||||
import {
|
||||
nodeFileTrace,
|
||||
@@ -70,6 +71,30 @@ const NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION = 'v12.1.7-canary.33';
|
||||
const EMPTY_ALLOW_QUERY_FOR_PRERENDERED_VERSION = 'v12.2.0';
|
||||
const CORRECTED_MANIFESTS_VERSION = 'v12.2.0';
|
||||
|
||||
// Ideally this should be in a Next.js manifest so we can change it in
|
||||
// the future but this also allows us to improve existing versions
|
||||
const PRELOAD_CHUNKS = {
|
||||
APP_ROUTER_PAGES: [
|
||||
'.next/server/webpack-runtime.js',
|
||||
'next/dist/client/components/action-async-storage.external.js',
|
||||
'next/dist/client/components/request-async-storage.external.js',
|
||||
'next/dist/client/components/static-generation-async-storage.external.js',
|
||||
'next/dist/compiled/next-server/app-page.runtime.prod.js',
|
||||
],
|
||||
APP_ROUTER_HANDLER: [
|
||||
'.next/server/webpack-runtime.js',
|
||||
'next/dist/compiled/next-server/app-route.runtime.prod.js',
|
||||
],
|
||||
PAGES_ROUTER_PAGES: [
|
||||
'.next/server/webpack-runtime.js',
|
||||
'next/dist/compiled/next-server/pages.runtime.prod.js',
|
||||
],
|
||||
PAGES_ROUTER_API: [
|
||||
'.next/server/webpack-api-runtime.js',
|
||||
'next/dist/compiled/next-server/pages-api.runtime.prod.js',
|
||||
],
|
||||
};
|
||||
|
||||
// related PR: https://github.com/vercel/next.js/pull/52997
|
||||
// and https://github.com/vercel/next.js/pull/56318
|
||||
const BUNDLED_SERVER_NEXT_VERSION = 'v13.5.4';
|
||||
@@ -118,6 +143,7 @@ export async function serverBuild({
|
||||
lambdaCompressedByteLimit,
|
||||
requiredServerFilesManifest,
|
||||
variantsManifest,
|
||||
experimentalPPRRoutes,
|
||||
}: {
|
||||
appPathRoutesManifest?: Record<string, string>;
|
||||
dynamicPages: string[];
|
||||
@@ -127,7 +153,7 @@ export async function serverBuild({
|
||||
pagesDir: string;
|
||||
baseDir: string;
|
||||
canUsePreviewMode: boolean;
|
||||
omittedPrerenderRoutes: Set<string>;
|
||||
omittedPrerenderRoutes: ReadonlySet<string>;
|
||||
localePrefixed404: boolean;
|
||||
staticPages: { [key: string]: FileFsRef };
|
||||
lambdaAppPaths: { [key: string]: FileFsRef };
|
||||
@@ -158,10 +184,15 @@ export async function serverBuild({
|
||||
imagesManifest?: NextImagesManifest;
|
||||
prerenderManifest: NextPrerenderedRoutes;
|
||||
requiredServerFilesManifest: NextRequiredServerFilesManifest;
|
||||
variantsManifest: VariantsManifest | null;
|
||||
variantsManifest: VariantsManifestLegacy | null;
|
||||
experimentalPPRRoutes: ReadonlySet<string>;
|
||||
}): Promise<BuildResult> {
|
||||
lambdaPages = Object.assign({}, lambdaPages, lambdaAppPaths);
|
||||
|
||||
const experimentalAllowBundling = Boolean(
|
||||
process.env.NEXT_EXPERIMENTAL_FUNCTION_BUNDLING
|
||||
);
|
||||
|
||||
const lambdas: { [key: string]: Lambda } = {};
|
||||
const prerenders: { [key: string]: Prerender } = {};
|
||||
const lambdaPageKeys = Object.keys(lambdaPages);
|
||||
@@ -217,7 +248,7 @@ export async function serverBuild({
|
||||
for (const rewrite of afterFilesRewrites) {
|
||||
if (rewrite.src && rewrite.dest) {
|
||||
rewrite.src = rewrite.src.replace(
|
||||
'(?:/)?',
|
||||
/\/?\(\?:\/\)\?/,
|
||||
'(?<rscsuff>(\\.prefetch)?\\.rsc)?(?:/)?'
|
||||
);
|
||||
let destQueryIndex = rewrite.dest.indexOf('?');
|
||||
@@ -242,10 +273,11 @@ export async function serverBuild({
|
||||
nextVersion,
|
||||
CORRECT_MIDDLEWARE_ORDER_VERSION
|
||||
);
|
||||
const isCorrectManifests = semver.gte(
|
||||
nextVersion,
|
||||
CORRECTED_MANIFESTS_VERSION
|
||||
);
|
||||
// experimental bundling prevents filtering manifests
|
||||
// as we don't know what to filter by at this stage
|
||||
const isCorrectManifests =
|
||||
!experimentalAllowBundling &&
|
||||
semver.gte(nextVersion, CORRECTED_MANIFESTS_VERSION);
|
||||
|
||||
let hasStatic500 = !!staticPages[path.posix.join(entryDirectory, '500')];
|
||||
|
||||
@@ -324,19 +356,7 @@ export async function serverBuild({
|
||||
internalPages.push('404.js');
|
||||
}
|
||||
|
||||
const experimentalPPRRoutes = new Set<string>();
|
||||
|
||||
for (const [route, { experimentalPPR }] of [
|
||||
...Object.entries(prerenderManifest.staticRoutes),
|
||||
...Object.entries(prerenderManifest.blockingFallbackRoutes),
|
||||
...Object.entries(prerenderManifest.fallbackRoutes),
|
||||
]) {
|
||||
if (!experimentalPPR) continue;
|
||||
|
||||
experimentalPPRRoutes.add(route);
|
||||
}
|
||||
|
||||
const prerenderRoutes = new Set<string>([
|
||||
const prerenderRoutes: ReadonlySet<string> = new Set<string>([
|
||||
...(canUsePreviewMode ? omittedPrerenderRoutes : []),
|
||||
...Object.keys(prerenderManifest.blockingFallbackRoutes),
|
||||
...Object.keys(prerenderManifest.fallbackRoutes),
|
||||
@@ -744,7 +764,7 @@ export async function serverBuild({
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
let traceResult: NodeFileTraceResult | undefined;
|
||||
let parentFilesMap: Map<string, Set<string>> | undefined;
|
||||
let parentFilesMap: ReadonlyMap<string, Set<string>> | undefined;
|
||||
|
||||
if (pathsToTrace.length > 0) {
|
||||
traceResult = await nodeFileTrace(pathsToTrace, {
|
||||
@@ -859,6 +879,7 @@ export async function serverBuild({
|
||||
const pageExtensions = requiredServerFilesManifest.config?.pageExtensions;
|
||||
|
||||
const pageLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -880,6 +901,7 @@ export async function serverBuild({
|
||||
}
|
||||
|
||||
const appRouterLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -898,6 +920,7 @@ export async function serverBuild({
|
||||
});
|
||||
|
||||
const appRouteHandlersLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -1068,9 +1091,58 @@ export async function serverBuild({
|
||||
}
|
||||
}
|
||||
|
||||
let launcherData = group.isAppRouter ? appLauncher : launcher;
|
||||
let preloadChunks: string[] = [];
|
||||
|
||||
if (process.env.VERCEL_NEXT_PRELOAD_COMMON === '1') {
|
||||
const nextPackageDir = path.dirname(
|
||||
resolveFrom(projectDir, 'next/package.json')
|
||||
);
|
||||
|
||||
if (group.isPages) {
|
||||
preloadChunks = PRELOAD_CHUNKS.PAGES_ROUTER_PAGES;
|
||||
} else if (group.isApiLambda) {
|
||||
preloadChunks = PRELOAD_CHUNKS.PAGES_ROUTER_API;
|
||||
} else if (group.isAppRouter && !group.isAppRouteHandler) {
|
||||
preloadChunks = PRELOAD_CHUNKS.APP_ROUTER_PAGES;
|
||||
} else if (group.isAppRouteHandler) {
|
||||
preloadChunks = PRELOAD_CHUNKS.APP_ROUTER_HANDLER;
|
||||
}
|
||||
const normalizedPreloadChunks: string[] = [];
|
||||
|
||||
for (const preloadChunk of preloadChunks) {
|
||||
const absoluteChunk = preloadChunk.startsWith('.next')
|
||||
? path.join(projectDir, preloadChunk)
|
||||
: path.join(nextPackageDir, '..', preloadChunk);
|
||||
|
||||
// ensure the chunks are actually in this layer
|
||||
if (
|
||||
group.pseudoLayer[
|
||||
path.join('.', path.relative(baseDir, absoluteChunk))
|
||||
]
|
||||
) {
|
||||
normalizedPreloadChunks.push(
|
||||
// relative files need to be prefixed with ./ for require
|
||||
preloadChunk.startsWith('.next')
|
||||
? `./${preloadChunk}`
|
||||
: preloadChunk
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (normalizedPreloadChunks.length > 0) {
|
||||
launcherData = launcherData.replace(
|
||||
'// @preserve next-server-preload-target',
|
||||
normalizedPreloadChunks
|
||||
.map(name => `require('${name}');`)
|
||||
.join('\n')
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const launcherFiles: { [name: string]: FileFsRef | FileBlob } = {
|
||||
[path.join(path.relative(baseDir, projectDir), '___next_launcher.cjs')]:
|
||||
new FileBlob({ data: group.isAppRouter ? appLauncher : launcher }),
|
||||
new FileBlob({ data: launcherData }),
|
||||
};
|
||||
const operationType = getOperationType({ group, prerenderManifest });
|
||||
|
||||
@@ -1090,6 +1162,7 @@ export async function serverBuild({
|
||||
maxDuration: group.maxDuration,
|
||||
isStreaming: group.isStreaming,
|
||||
nextVersion,
|
||||
experimentalAllowBundling,
|
||||
};
|
||||
|
||||
const lambda = await createLambdaFromPseudoLayers(options);
|
||||
@@ -1103,7 +1176,7 @@ export async function serverBuild({
|
||||
// lambda for the page for revalidation.
|
||||
let revalidate: NodejsLambda | undefined;
|
||||
if (isPPR) {
|
||||
if (isPPR && !options.isStreaming) {
|
||||
if (!options.isStreaming) {
|
||||
throw new Error("Invariant: PPR lambda isn't streaming");
|
||||
}
|
||||
|
||||
@@ -1115,24 +1188,28 @@ export async function serverBuild({
|
||||
});
|
||||
}
|
||||
|
||||
for (const page of group.pages) {
|
||||
const pageNoExt = page.replace(/\.js$/, '');
|
||||
let isPrerender = prerenderRoutes.has(
|
||||
path.join('/', pageNoExt === 'index' ? '' : pageNoExt)
|
||||
);
|
||||
for (const pageFilename of group.pages) {
|
||||
// This is the name of the page, where the root is `index`.
|
||||
const pageName = pageFilename.replace(/\.js$/, '');
|
||||
|
||||
// This is the name of the page prefixed with a `/`, where the root is
|
||||
// `/index`.
|
||||
const pagePath = path.posix.join('/', pageName);
|
||||
|
||||
// This is the routable pathname for the page, where the root is `/`.
|
||||
const pagePathname = pagePath === '/index' ? '/' : pagePath;
|
||||
|
||||
let isPrerender = prerenderRoutes.has(pagePathname);
|
||||
|
||||
if (!isPrerender && routesManifest?.i18n) {
|
||||
isPrerender = routesManifest.i18n.locales.some(locale => {
|
||||
return prerenderRoutes.has(
|
||||
path.join('/', locale, pageNoExt === 'index' ? '' : pageNoExt)
|
||||
path.join('/', locale, pageName === 'index' ? '' : pageName)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
let outputName = normalizeIndexOutput(
|
||||
path.posix.join(entryDirectory, pageNoExt),
|
||||
true
|
||||
);
|
||||
let outputName = path.posix.join(entryDirectory, pageName);
|
||||
|
||||
// If this is a PPR page, then we should prefix the output name.
|
||||
if (isPPR) {
|
||||
@@ -1140,24 +1217,56 @@ export async function serverBuild({
|
||||
throw new Error("Invariant: PPR lambda isn't set");
|
||||
}
|
||||
|
||||
// Get the get the base path prefixed route, without the index
|
||||
// normalization.
|
||||
outputName = path.posix.join(entryDirectory, pageNoExt);
|
||||
// Assign the revalidate lambda to the output name. That's used to
|
||||
// perform the initial static shell render.
|
||||
lambdas[outputName] = revalidate;
|
||||
|
||||
const pprOutputName = path.posix.join(
|
||||
entryDirectory,
|
||||
'/_next/postponed/resume',
|
||||
pageNoExt
|
||||
);
|
||||
lambdas[pprOutputName] = lambda;
|
||||
// If this isn't an omitted page, then we should add the link from the
|
||||
// page to the postpone resume lambda.
|
||||
if (!omittedPrerenderRoutes.has(pagePathname)) {
|
||||
const key = getPostponeResumePathname(entryDirectory, pageName);
|
||||
lambdas[key] = lambda;
|
||||
|
||||
// We want to add the `experimentalStreamingLambdaPath` to this
|
||||
// output.
|
||||
experimentalStreamingLambdaPaths.set(outputName, key);
|
||||
} else {
|
||||
// As this is an omitted page, we should generate the experimental
|
||||
// partial prerendering resume route for each of these routes that
|
||||
// support partial prerendering. This is because the routes that
|
||||
// haven't been omitted will have rewrite rules in place to rewrite
|
||||
// the original request `/blog/my-slug` to the dynamic path
|
||||
// `/blog/[slug]?nxtPslug=my-slug`.
|
||||
for (const [
|
||||
routePathname,
|
||||
{ srcRoute, experimentalPPR },
|
||||
] of Object.entries(prerenderManifest.staticRoutes)) {
|
||||
// If the srcRoute doesn't match or this doesn't support
|
||||
// experimental partial prerendering, then we can skip this route.
|
||||
if (srcRoute !== pagePathname || !experimentalPPR) continue;
|
||||
|
||||
// If this route is the same as the page route, then we can skip
|
||||
// it, because we've already added the lambda to the output.
|
||||
if (routePathname === pagePathname) continue;
|
||||
|
||||
const key = getPostponeResumePathname(
|
||||
entryDirectory,
|
||||
routePathname
|
||||
);
|
||||
lambdas[key] = lambda;
|
||||
|
||||
outputName = path.posix.join(entryDirectory, routePathname);
|
||||
experimentalStreamingLambdaPaths.set(outputName, key);
|
||||
}
|
||||
}
|
||||
|
||||
// We want to add the `experimentalStreamingLambdaPath` to this
|
||||
// output.
|
||||
experimentalStreamingLambdaPaths.set(outputName, pprOutputName);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!group.isAppRouter && !group.isAppRouteHandler) {
|
||||
outputName = normalizeIndexOutput(outputName, true);
|
||||
}
|
||||
|
||||
// we add locale prefixed outputs for SSR pages,
|
||||
// this is handled in onPrerenderRoute for SSG pages
|
||||
if (
|
||||
@@ -1165,7 +1274,7 @@ export async function serverBuild({
|
||||
!isPrerender &&
|
||||
!group.isAppRouter &&
|
||||
(!isCorrectLocaleAPIRoutes ||
|
||||
!(pageNoExt === 'api' || pageNoExt.startsWith('api/')))
|
||||
!(pageName === 'api' || pageName.startsWith('api/')))
|
||||
) {
|
||||
for (const locale of i18n.locales) {
|
||||
lambdas[
|
||||
@@ -1173,7 +1282,7 @@ export async function serverBuild({
|
||||
path.posix.join(
|
||||
entryDirectory,
|
||||
locale,
|
||||
pageNoExt === 'index' ? '' : pageNoExt
|
||||
pageName === 'index' ? '' : pageName
|
||||
),
|
||||
true
|
||||
)
|
||||
@@ -1206,6 +1315,7 @@ export async function serverBuild({
|
||||
hasPages404: routesManifest.pages404,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
omittedPrerenderRoutes,
|
||||
});
|
||||
|
||||
await Promise.all(
|
||||
@@ -1213,11 +1323,13 @@ export async function serverBuild({
|
||||
prerenderRoute(route, {})
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.fallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isFallback: true })
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.blockingFallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isBlocking: true })
|
||||
@@ -1226,9 +1338,9 @@ export async function serverBuild({
|
||||
|
||||
if (static404Page && canUsePreviewMode) {
|
||||
await Promise.all(
|
||||
[...omittedPrerenderRoutes].map(route => {
|
||||
return prerenderRoute(route, { isOmitted: true });
|
||||
})
|
||||
Array.from(omittedPrerenderRoutes).map(route =>
|
||||
prerenderRoute(route, { isOmitted: true })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1237,6 +1349,7 @@ export async function serverBuild({
|
||||
if (routesManifest?.i18n) {
|
||||
route = normalizeLocalePath(route, routesManifest.i18n.locales).pathname;
|
||||
}
|
||||
|
||||
delete lambdas[
|
||||
normalizeIndexOutput(
|
||||
path.posix.join('./', entryDirectory, route === '/' ? '/index' : route),
|
||||
@@ -1260,19 +1373,19 @@ export async function serverBuild({
|
||||
middleware.staticRoutes.length > 0 &&
|
||||
semver.gte(nextVersion, NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION);
|
||||
|
||||
const dynamicRoutes = await getDynamicRoutes(
|
||||
const dynamicRoutes = await getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
false,
|
||||
isDev: false,
|
||||
routesManifest,
|
||||
omittedPrerenderRoutes,
|
||||
omittedRoutes: omittedPrerenderRoutes,
|
||||
canUsePreviewMode,
|
||||
prerenderManifest.bypassToken || '',
|
||||
true,
|
||||
middleware.dynamicRouteMap,
|
||||
experimental.ppr
|
||||
).then(arr =>
|
||||
bypassToken: prerenderManifest.bypassToken || '',
|
||||
isServerMode: true,
|
||||
dynamicMiddlewareRouteMap: middleware.dynamicRouteMap,
|
||||
experimentalPPRRoutes,
|
||||
}).then(arr =>
|
||||
localizeDynamicRoutes(
|
||||
arr,
|
||||
dynamicPrefix,
|
||||
@@ -1443,9 +1556,10 @@ export async function serverBuild({
|
||||
continue;
|
||||
}
|
||||
|
||||
const pathname = normalizeIndexOutput(
|
||||
path.posix.join('./', entryDirectory, route === '/' ? '/index' : route),
|
||||
true
|
||||
const pathname = path.posix.join(
|
||||
'./',
|
||||
entryDirectory,
|
||||
route === '/' ? '/index' : route
|
||||
);
|
||||
|
||||
if (lambdas[pathname]) {
|
||||
@@ -1477,6 +1591,46 @@ export async function serverBuild({
|
||||
throw new Error("Invariant: cannot use PPR without 'rsc.prefetchHeader'");
|
||||
}
|
||||
|
||||
// If we're using the Experimental Partial Prerendering, we should ensure that
|
||||
// all the routes that support it (and are listed) have configured lambdas.
|
||||
// This only applies to routes that do not have fallbacks enabled (these are
|
||||
// routes that have `dynamicParams = false` defined.
|
||||
if (experimental.ppr) {
|
||||
for (const { srcRoute, dataRoute, experimentalPPR } of Object.values(
|
||||
prerenderManifest.staticRoutes
|
||||
)) {
|
||||
// Only apply this to the routes that support experimental PPR and
|
||||
// that also have their `dataRoute` and `srcRoute` defined.
|
||||
if (!experimentalPPR || !dataRoute || !srcRoute) continue;
|
||||
|
||||
// If the srcRoute is not omitted, then we don't need to do anything. This
|
||||
// is the indicator that a route should only have it's prerender defined
|
||||
// and not a lambda.
|
||||
if (!omittedPrerenderRoutes.has(srcRoute)) continue;
|
||||
|
||||
// The lambda paths have their leading `/` stripped.
|
||||
const srcPathname = srcRoute.substring(1);
|
||||
const dataPathname = dataRoute.substring(1);
|
||||
|
||||
// If we already have an associated lambda for the `.rsc` route, then
|
||||
// we can skip this.
|
||||
const dataPathnameExists = dataPathname in lambdas;
|
||||
if (dataPathnameExists) continue;
|
||||
|
||||
// We require that the source route has a lambda associated with it. If
|
||||
// it doesn't this is an error.
|
||||
const srcPathnameExists = srcPathname in lambdas;
|
||||
if (!srcPathnameExists) {
|
||||
throw new Error(
|
||||
`Invariant: Expected to have a lambda for the source route: ${srcPathname}`
|
||||
);
|
||||
}
|
||||
|
||||
// Associate the data pathname with the source pathname's lambda.
|
||||
lambdas[dataPathname] = lambdas[srcPathname];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
wildcard: wildcardConfig,
|
||||
images: getImagesConfig(imagesManifest),
|
||||
|
||||
@@ -23,6 +23,8 @@ if (process.env.NODE_ENV !== 'production' && region !== 'dev1') {
|
||||
// eslint-disable-next-line
|
||||
const NextServer = require('__NEXT_SERVER_PATH__').default;
|
||||
|
||||
// @preserve next-server-preload-target
|
||||
|
||||
// __NEXT_CONFIG__ value is injected
|
||||
declare const __NEXT_CONFIG__: any;
|
||||
const conf = __NEXT_CONFIG__;
|
||||
|
||||
@@ -304,19 +304,31 @@ export async function getRoutesManifest(
|
||||
return routesManifest;
|
||||
}
|
||||
|
||||
export async function getDynamicRoutes(
|
||||
entryPath: string,
|
||||
entryDirectory: string,
|
||||
dynamicPages: string[],
|
||||
isDev?: boolean,
|
||||
routesManifest?: RoutesManifest,
|
||||
omittedRoutes?: Set<string>,
|
||||
canUsePreviewMode?: boolean,
|
||||
bypassToken?: string,
|
||||
isServerMode?: boolean,
|
||||
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>,
|
||||
experimentalPPR?: boolean
|
||||
): Promise<RouteWithSrc[]> {
|
||||
export async function getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
isDev,
|
||||
routesManifest,
|
||||
omittedRoutes,
|
||||
canUsePreviewMode,
|
||||
bypassToken,
|
||||
isServerMode,
|
||||
dynamicMiddlewareRouteMap,
|
||||
experimentalPPRRoutes,
|
||||
}: {
|
||||
entryPath: string;
|
||||
entryDirectory: string;
|
||||
dynamicPages: string[];
|
||||
isDev?: boolean;
|
||||
routesManifest?: RoutesManifest;
|
||||
omittedRoutes?: ReadonlySet<string>;
|
||||
canUsePreviewMode?: boolean;
|
||||
bypassToken?: string;
|
||||
isServerMode?: boolean;
|
||||
dynamicMiddlewareRouteMap?: ReadonlyMap<string, RouteWithSrc>;
|
||||
experimentalPPRRoutes: ReadonlySet<string>;
|
||||
}): Promise<RouteWithSrc[]> {
|
||||
if (routesManifest) {
|
||||
switch (routesManifest.version) {
|
||||
case 1:
|
||||
@@ -389,7 +401,7 @@ export async function getDynamicRoutes(
|
||||
];
|
||||
}
|
||||
|
||||
if (experimentalPPR) {
|
||||
if (experimentalPPRRoutes.has(page)) {
|
||||
let dest = route.dest?.replace(/($|\?)/, '.prefetch.rsc$1');
|
||||
|
||||
if (page === '/' || page === '/index') {
|
||||
@@ -442,7 +454,9 @@ export async function getDynamicRoutes(
|
||||
let getRouteRegex: ((pageName: string) => { re: RegExp }) | undefined =
|
||||
undefined;
|
||||
|
||||
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
|
||||
let getSortedRoutes:
|
||||
| ((normalizedPages: ReadonlyArray<string>) => string[])
|
||||
| undefined;
|
||||
|
||||
try {
|
||||
const resolved = require_.resolve('next-server/dist/lib/router/utils', {
|
||||
@@ -645,10 +659,10 @@ export function filterStaticPages(
|
||||
}
|
||||
|
||||
export function getFilesMapFromReasons(
|
||||
fileList: Set<string>,
|
||||
fileList: ReadonlySet<string>,
|
||||
reasons: NodeFileTraceReasons,
|
||||
ignoreFn?: (file: string, parent?: string) => boolean
|
||||
) {
|
||||
): ReadonlyMap<string, Set<string>> {
|
||||
// this uses the reasons tree to collect files specific to a
|
||||
// certain parent allowing us to not have to trace each parent
|
||||
// separately
|
||||
@@ -804,6 +818,7 @@ export interface CreateLambdaFromPseudoLayersOptions
|
||||
layers: PseudoLayer[];
|
||||
isStreaming?: boolean;
|
||||
nextVersion?: string;
|
||||
experimentalAllowBundling?: boolean;
|
||||
}
|
||||
|
||||
// measured with 1, 2, 5, 10, and `os.cpus().length || 5`
|
||||
@@ -815,6 +830,7 @@ export async function createLambdaFromPseudoLayers({
|
||||
layers,
|
||||
isStreaming,
|
||||
nextVersion,
|
||||
experimentalAllowBundling,
|
||||
...lambdaOptions
|
||||
}: CreateLambdaFromPseudoLayersOptions) {
|
||||
await createLambdaSema.acquire();
|
||||
@@ -862,6 +878,7 @@ export async function createLambdaFromPseudoLayers({
|
||||
slug: 'nextjs',
|
||||
version: nextVersion,
|
||||
},
|
||||
experimentalAllowBundling,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -914,6 +931,10 @@ export type NextPrerenderedRoutes = {
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Routes that have their fallback behavior is disabled. All routes would've
|
||||
* been provided in the top-level `routes` key (`staticRoutes`).
|
||||
*/
|
||||
omittedRoutes: {
|
||||
[route: string]: {
|
||||
routeRegex: string;
|
||||
@@ -1293,8 +1314,6 @@ export async function getPrerenderManifest(
|
||||
prefetchDataRouteRegex,
|
||||
};
|
||||
} else {
|
||||
// Fallback behavior is disabled, all routes would've been provided
|
||||
// in the top-level `routes` key (`staticRoutes`).
|
||||
ret.omittedRoutes[lazyRoute] = {
|
||||
experimentalBypassFor,
|
||||
experimentalPPR,
|
||||
@@ -1361,7 +1380,7 @@ async function getSourceFilePathFromPage({
|
||||
}: {
|
||||
workPath: string;
|
||||
page: string;
|
||||
pageExtensions?: string[];
|
||||
pageExtensions?: ReadonlyArray<string>;
|
||||
}) {
|
||||
const usesSrcDir = await usesSrcDirectory(workPath);
|
||||
const extensionsToTry = pageExtensions || ['js', 'jsx', 'ts', 'tsx'];
|
||||
@@ -1502,13 +1521,14 @@ export async function getPageLambdaGroups({
|
||||
internalPages,
|
||||
pageExtensions,
|
||||
inversedAppPathManifest,
|
||||
experimentalAllowBundling,
|
||||
}: {
|
||||
entryPath: string;
|
||||
config: Config;
|
||||
functionsConfigManifest?: FunctionsConfigManifestV1;
|
||||
pages: string[];
|
||||
prerenderRoutes: Set<string>;
|
||||
experimentalPPRRoutes: Set<string> | undefined;
|
||||
pages: ReadonlyArray<string>;
|
||||
prerenderRoutes: ReadonlySet<string>;
|
||||
experimentalPPRRoutes: ReadonlySet<string> | undefined;
|
||||
pageTraces: {
|
||||
[page: string]: {
|
||||
[key: string]: FileFsRef;
|
||||
@@ -1521,9 +1541,10 @@ export async function getPageLambdaGroups({
|
||||
initialPseudoLayer: PseudoLayerResult;
|
||||
initialPseudoLayerUncompressed: number;
|
||||
lambdaCompressedByteLimit: number;
|
||||
internalPages: string[];
|
||||
pageExtensions?: string[];
|
||||
internalPages: ReadonlyArray<string>;
|
||||
pageExtensions?: ReadonlyArray<string>;
|
||||
inversedAppPathManifest?: Record<string, string>;
|
||||
experimentalAllowBundling?: boolean;
|
||||
}) {
|
||||
const groups: Array<LambdaGroup> = [];
|
||||
|
||||
@@ -1563,42 +1584,46 @@ export async function getPageLambdaGroups({
|
||||
opts = { ...vercelConfigOpts, ...opts };
|
||||
}
|
||||
|
||||
let matchingGroup = groups.find(group => {
|
||||
const matches =
|
||||
group.maxDuration === opts.maxDuration &&
|
||||
group.memory === opts.memory &&
|
||||
group.isPrerenders === isPrerenderRoute &&
|
||||
group.isExperimentalPPR === isExperimentalPPR;
|
||||
let matchingGroup = experimentalAllowBundling
|
||||
? undefined
|
||||
: groups.find(group => {
|
||||
const matches =
|
||||
group.maxDuration === opts.maxDuration &&
|
||||
group.memory === opts.memory &&
|
||||
group.isPrerenders === isPrerenderRoute &&
|
||||
group.isExperimentalPPR === isExperimentalPPR;
|
||||
|
||||
if (matches) {
|
||||
let newTracedFilesSize = group.pseudoLayerBytes;
|
||||
let newTracedFilesUncompressedSize = group.pseudoLayerUncompressedBytes;
|
||||
if (matches) {
|
||||
let newTracedFilesSize = group.pseudoLayerBytes;
|
||||
let newTracedFilesUncompressedSize =
|
||||
group.pseudoLayerUncompressedBytes;
|
||||
|
||||
for (const newPage of newPages) {
|
||||
Object.keys(pageTraces[newPage] || {}).map(file => {
|
||||
if (!group.pseudoLayer[file]) {
|
||||
const item = tracedPseudoLayer[file] as PseudoFile;
|
||||
for (const newPage of newPages) {
|
||||
Object.keys(pageTraces[newPage] || {}).map(file => {
|
||||
if (!group.pseudoLayer[file]) {
|
||||
const item = tracedPseudoLayer[file] as PseudoFile;
|
||||
|
||||
newTracedFilesSize += item.compBuffer?.byteLength || 0;
|
||||
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
|
||||
newTracedFilesSize += item.compBuffer?.byteLength || 0;
|
||||
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
|
||||
}
|
||||
});
|
||||
newTracedFilesSize +=
|
||||
compressedPages[newPage].compBuffer.byteLength;
|
||||
newTracedFilesUncompressedSize +=
|
||||
compressedPages[newPage].uncompressedSize;
|
||||
}
|
||||
});
|
||||
newTracedFilesSize += compressedPages[newPage].compBuffer.byteLength;
|
||||
newTracedFilesUncompressedSize +=
|
||||
compressedPages[newPage].uncompressedSize;
|
||||
}
|
||||
|
||||
const underUncompressedLimit =
|
||||
newTracedFilesUncompressedSize <
|
||||
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
|
||||
const underCompressedLimit =
|
||||
newTracedFilesSize <
|
||||
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
|
||||
const underUncompressedLimit =
|
||||
newTracedFilesUncompressedSize <
|
||||
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
|
||||
const underCompressedLimit =
|
||||
newTracedFilesSize <
|
||||
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
|
||||
|
||||
return underUncompressedLimit && underCompressedLimit;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
return underUncompressedLimit && underCompressedLimit;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
if (matchingGroup) {
|
||||
matchingGroup.pages.push(page);
|
||||
@@ -1906,12 +1931,13 @@ type OnPrerenderRouteArgs = {
|
||||
isServerMode: boolean;
|
||||
canUsePreviewMode: boolean;
|
||||
lambdas: { [key: string]: Lambda };
|
||||
experimentalStreamingLambdaPaths: Map<string, string> | undefined;
|
||||
experimentalStreamingLambdaPaths: ReadonlyMap<string, string> | undefined;
|
||||
prerenders: { [key: string]: Prerender | File };
|
||||
pageLambdaMap: { [key: string]: string };
|
||||
routesManifest?: RoutesManifest;
|
||||
isCorrectNotFoundRoutes?: boolean;
|
||||
isEmptyAllowQueryForPrendered?: boolean;
|
||||
omittedPrerenderRoutes: ReadonlySet<string>;
|
||||
};
|
||||
let prerenderGroup = 1;
|
||||
|
||||
@@ -1948,6 +1974,7 @@ export const onPrerenderRoute =
|
||||
routesManifest,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
omittedPrerenderRoutes,
|
||||
} = prerenderRouteArgs;
|
||||
|
||||
if (isBlocking && isFallback) {
|
||||
@@ -2053,6 +2080,11 @@ export const onPrerenderRoute =
|
||||
|
||||
let isAppPathRoute = false;
|
||||
|
||||
// experimentalPPR signals app path route
|
||||
if (appDir && experimentalPPR) {
|
||||
isAppPathRoute = true;
|
||||
}
|
||||
|
||||
// TODO: leverage manifest to determine app paths more accurately
|
||||
if (appDir && srcRoute && (!dataRoute || dataRoute?.endsWith('.rsc'))) {
|
||||
isAppPathRoute = true;
|
||||
@@ -2184,7 +2216,6 @@ export const onPrerenderRoute =
|
||||
if (routeKey !== '/index' && routeKey.endsWith('/index')) {
|
||||
routeKey = `${routeKey}/index`;
|
||||
routeFileNoExt = routeKey;
|
||||
origRouteFileNoExt = routeKey;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2255,15 +2286,20 @@ export const onPrerenderRoute =
|
||||
const lambdaId = pageLambdaMap[outputSrcPathPage];
|
||||
lambda = lambdas[lambdaId];
|
||||
} else {
|
||||
const outputSrcPathPage = normalizeIndexOutput(
|
||||
let outputSrcPathPage =
|
||||
srcRoute == null
|
||||
? outputPathPageOrig
|
||||
: path.posix.join(
|
||||
entryDirectory,
|
||||
srcRoute === '/' ? '/index' : srcRoute
|
||||
),
|
||||
isServerMode
|
||||
);
|
||||
);
|
||||
|
||||
if (!isAppPathRoute) {
|
||||
outputSrcPathPage = normalizeIndexOutput(
|
||||
outputSrcPathPage,
|
||||
isServerMode
|
||||
);
|
||||
}
|
||||
|
||||
lambda = lambdas[outputSrcPathPage];
|
||||
}
|
||||
@@ -2363,25 +2399,31 @@ export const onPrerenderRoute =
|
||||
sourcePath = srcRoute;
|
||||
}
|
||||
|
||||
// The `experimentalStreamingLambdaPaths` stores the page without the
|
||||
// leading `/` and with the `/` rewritten to be `index`. We should
|
||||
// normalize the key so that it matches that key in the map.
|
||||
let key = srcRoute || routeKey;
|
||||
if (key === '/') {
|
||||
key = 'index';
|
||||
} else {
|
||||
if (!key.startsWith('/')) {
|
||||
throw new Error("Invariant: key doesn't start with /");
|
||||
let experimentalStreamingLambdaPath: string | undefined;
|
||||
if (experimentalPPR) {
|
||||
if (!experimentalStreamingLambdaPaths) {
|
||||
throw new Error(
|
||||
"Invariant: experimentalStreamingLambdaPaths doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
key = key.substring(1);
|
||||
// If a source route exists, and it's not listed as an omitted route,
|
||||
// then use the src route as the basis for the experimental streaming
|
||||
// lambda path. If the route doesn't have a source route or it's not
|
||||
// omitted, then use the more specific `routeKey` as the basis.
|
||||
if (srcRoute && !omittedPrerenderRoutes.has(srcRoute)) {
|
||||
experimentalStreamingLambdaPath =
|
||||
experimentalStreamingLambdaPaths.get(
|
||||
pathnameToOutputName(entryDirectory, srcRoute)
|
||||
);
|
||||
} else {
|
||||
experimentalStreamingLambdaPath =
|
||||
experimentalStreamingLambdaPaths.get(
|
||||
pathnameToOutputName(entryDirectory, routeKey)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
key = path.posix.join(entryDirectory, key);
|
||||
|
||||
const experimentalStreamingLambdaPath =
|
||||
experimentalStreamingLambdaPaths?.get(key);
|
||||
|
||||
prerenders[outputPathPage] = new Prerender({
|
||||
expiration: initialRevalidate,
|
||||
lambda,
|
||||
@@ -2464,11 +2506,18 @@ export const onPrerenderRoute =
|
||||
routesManifest,
|
||||
locale
|
||||
);
|
||||
const localeOutputPathPage = normalizeIndexOutput(
|
||||
path.posix.join(entryDirectory, localeRouteFileNoExt),
|
||||
isServerMode
|
||||
let localeOutputPathPage = path.posix.join(
|
||||
entryDirectory,
|
||||
localeRouteFileNoExt
|
||||
);
|
||||
|
||||
if (!isAppPathRoute) {
|
||||
localeOutputPathPage = normalizeIndexOutput(
|
||||
localeOutputPathPage,
|
||||
isServerMode
|
||||
);
|
||||
}
|
||||
|
||||
const origPrerenderPage = prerenders[outputPathPage];
|
||||
prerenders[localeOutputPathPage] = {
|
||||
...origPrerenderPage,
|
||||
@@ -2577,6 +2626,10 @@ export async function getStaticFiles(
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips the trailing `/index` from the output name if it's not the root if
|
||||
* the server mode is enabled.
|
||||
*/
|
||||
export function normalizeIndexOutput(
|
||||
outputName: string,
|
||||
isServerMode: boolean
|
||||
@@ -2597,6 +2650,19 @@ export function getNextServerPath(nextVersion: string) {
|
||||
: 'next/dist/next-server/server';
|
||||
}
|
||||
|
||||
export function pathnameToOutputName(entryDirectory: string, pathname: string) {
|
||||
if (pathname === '/') pathname = '/index';
|
||||
return path.posix.join(entryDirectory, pathname);
|
||||
}
|
||||
|
||||
export function getPostponeResumePathname(
|
||||
entryDirectory: string,
|
||||
pathname: string
|
||||
): string {
|
||||
if (pathname === '/') pathname = '/index';
|
||||
return path.posix.join(entryDirectory, '_next/postponed/resume', pathname);
|
||||
}
|
||||
|
||||
// update to leverage
|
||||
export function updateRouteSrc(
|
||||
route: Route,
|
||||
@@ -2805,7 +2871,7 @@ export async function getMiddlewareBundle({
|
||||
appPathRoutesManifest: Record<string, string>;
|
||||
}): Promise<{
|
||||
staticRoutes: Route[];
|
||||
dynamicRouteMap: Map<string, RouteWithSrc>;
|
||||
dynamicRouteMap: ReadonlyMap<string, RouteWithSrc>;
|
||||
edgeFunctions: Record<string, EdgeFunction>;
|
||||
}> {
|
||||
const middlewareManifest = await getMiddlewareManifest(
|
||||
@@ -2969,14 +3035,17 @@ export async function getMiddlewareBundle({
|
||||
}
|
||||
|
||||
if (routesManifest?.basePath) {
|
||||
shortPath = normalizeIndexOutput(
|
||||
path.posix.join(
|
||||
'./',
|
||||
routesManifest?.basePath,
|
||||
shortPath.replace(/^\//, '')
|
||||
),
|
||||
true
|
||||
const isAppPathRoute = !!appPathRoutesManifest[shortPath];
|
||||
|
||||
shortPath = path.posix.join(
|
||||
'./',
|
||||
routesManifest?.basePath,
|
||||
shortPath.replace(/^\//, '')
|
||||
);
|
||||
|
||||
if (!isAppPathRoute) {
|
||||
shortPath = normalizeIndexOutput(shortPath, true);
|
||||
}
|
||||
}
|
||||
|
||||
worker.edgeFunction.name = shortPath;
|
||||
@@ -3238,7 +3307,8 @@ export function isApiPage(page: string | undefined) {
|
||||
.match(/(serverless|server)\/pages\/api(\/|\.js$)/);
|
||||
}
|
||||
|
||||
export type VariantsManifest = Record<
|
||||
/** @deprecated */
|
||||
export type VariantsManifestLegacy = Record<
|
||||
string,
|
||||
{
|
||||
defaultValue?: unknown;
|
||||
@@ -3249,7 +3319,7 @@ export type VariantsManifest = Record<
|
||||
export async function getVariantsManifest(
|
||||
entryPath: string,
|
||||
outputDirectory: string
|
||||
): Promise<null | VariantsManifest> {
|
||||
): Promise<null | VariantsManifestLegacy> {
|
||||
const pathVariantsManifest = path.join(
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
@@ -3263,7 +3333,7 @@ export async function getVariantsManifest(
|
||||
|
||||
if (!hasVariantsManifest) return null;
|
||||
|
||||
const variantsManifest: VariantsManifest = await fs.readJSON(
|
||||
const variantsManifest: VariantsManifestLegacy = await fs.readJSON(
|
||||
pathVariantsManifest
|
||||
);
|
||||
|
||||
@@ -3285,7 +3355,7 @@ export async function getServerlessPages(params: {
|
||||
glob('**/route.js', appDir),
|
||||
glob('**/_not-found.js', appDir),
|
||||
]).then(items => Object.assign(...items))
|
||||
: Promise.resolve({}),
|
||||
: Promise.resolve({} as Record<string, FileFsRef>),
|
||||
getMiddlewareManifest(params.entryPath, params.outputDirectory),
|
||||
]);
|
||||
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
"redirect": "manual"
|
||||
},
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -46,7 +46,7 @@
|
||||
"redirect": "manual"
|
||||
},
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -70,7 +70,7 @@
|
||||
"redirect": "manual"
|
||||
},
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
|
||||
7
packages/next/test/fixtures/00-app-dir-no-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
7
packages/next/test/fixtures/00-app-dir-no-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export default function Page() {
|
||||
return (
|
||||
<>
|
||||
<p>dynamic-index</p>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -18,7 +18,22 @@
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": "dynamic-index"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": ":",
|
||||
"mustNotContain": "<html",
|
||||
"headers": {
|
||||
"RSC": 1,
|
||||
"Next-Router-Prefetch": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/rewritten-to-dashboard",
|
||||
"status": 200,
|
||||
"mustContain": "html"
|
||||
|
||||
18
packages/next/test/fixtures/00-app-dir-ppr-full/app/no-fallback/[slug]/page.jsx
vendored
Normal file
18
packages/next/test/fixtures/00-app-dir-ppr-full/app/no-fallback/[slug]/page.jsx
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import React, { Suspense } from 'react'
|
||||
import { Dynamic } from '../../../components/dynamic'
|
||||
|
||||
export const dynamicParams = false;
|
||||
|
||||
const slugs = ['a', 'b', 'c'];
|
||||
|
||||
export function generateStaticParams() {
|
||||
return slugs.map((slug) => ({ slug }));
|
||||
}
|
||||
|
||||
export default function NoFallbackPage({ params: { slug } }) {
|
||||
return (
|
||||
<Suspense fallback={<Dynamic pathname={`/no-fallback/${slug}`} fallback />}>
|
||||
<Dynamic pathname={`/no-fallback/${slug}`} />
|
||||
</Suspense>
|
||||
)
|
||||
}
|
||||
@@ -19,11 +19,23 @@ const pages = [
|
||||
{ pathname: '/no-suspense/nested/a', dynamic: true },
|
||||
{ pathname: '/no-suspense/nested/b', dynamic: true },
|
||||
{ pathname: '/no-suspense/nested/c', dynamic: true },
|
||||
{ pathname: '/no-fallback/a', dynamic: true },
|
||||
{ pathname: '/no-fallback/b', dynamic: true },
|
||||
{ pathname: '/no-fallback/c', dynamic: true },
|
||||
// TODO: uncomment when we've fixed the 404 case for force-dynamic pages
|
||||
// { pathname: '/dynamic/force-dynamic', dynamic: 'force-dynamic' },
|
||||
{ pathname: '/dynamic/force-static', dynamic: 'force-static' },
|
||||
];
|
||||
|
||||
const cases = {
|
||||
404: [
|
||||
// For routes that do not support fallback (they had `dynamicParams` set to
|
||||
// `false`), we shouldn't see any fallback behavior for routes not defined
|
||||
// in `getStaticParams`.
|
||||
{ pathname: '/no-fallback/non-existent' },
|
||||
],
|
||||
};
|
||||
|
||||
const ctx = {};
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
@@ -49,6 +61,14 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
expect(html).toContain('</html>');
|
||||
}
|
||||
);
|
||||
|
||||
it.each(cases[404])(
|
||||
'should return 404 for $pathname',
|
||||
async ({ pathname }) => {
|
||||
const res = await fetch(`${ctx.deploymentUrl}${pathname}`);
|
||||
expect(res.status).toEqual(404);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('prefetch RSC payloads should return', () => {
|
||||
@@ -88,6 +108,16 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
it.each(cases[404])(
|
||||
'should return 404 for $pathname',
|
||||
async ({ pathname }) => {
|
||||
const res = await fetch(`${ctx.deploymentUrl}${pathname}`, {
|
||||
headers: { RSC: 1, 'Next-Router-Prefetch': '1' },
|
||||
});
|
||||
expect(res.status).toEqual(404);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('dynamic RSC payloads should return', () => {
|
||||
@@ -122,5 +152,15 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
expect(text).not.toContain(expected);
|
||||
}
|
||||
});
|
||||
|
||||
it.each(cases[404])(
|
||||
'should return 404 for $pathname',
|
||||
async ({ pathname }) => {
|
||||
const res = await fetch(`${ctx.deploymentUrl}${pathname}`, {
|
||||
headers: { RSC: 1 },
|
||||
});
|
||||
expect(res.status).toEqual(404);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"next": "canary",
|
||||
"react": "experimental",
|
||||
"react-dom": "experimental"
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0"
|
||||
},
|
||||
"ignoreNextjsUpdates": true
|
||||
}
|
||||
|
||||
7
packages/next/test/fixtures/00-app-dir-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
7
packages/next/test/fixtures/00-app-dir-ppr/app/dynamic-index/[slug]/index/page.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export default function Page() {
|
||||
return (
|
||||
<>
|
||||
<p>dynamic-index</p>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -18,6 +18,21 @@
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": "dynamic-index"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic-index/hello/index",
|
||||
"status": 200,
|
||||
"mustContain": ":",
|
||||
"mustNotContain": "<html",
|
||||
"headers": {
|
||||
"RSC": 1,
|
||||
"Next-Router-Prefetch": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/rewritten-to-dashboard",
|
||||
"status": 200,
|
||||
@@ -132,7 +147,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from /ssg",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -148,7 +163,7 @@
|
||||
"path": "/ssg",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -160,7 +175,7 @@
|
||||
"path": "/ssg",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch",
|
||||
"content-type": "text/x-component"
|
||||
},
|
||||
"headers": {
|
||||
@@ -195,14 +210,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard/deployments/[id]/settings",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/123/settings",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -215,14 +230,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "catchall",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/catchall/something",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -235,7 +250,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -255,7 +270,7 @@
|
||||
},
|
||||
"responseHeaders": {
|
||||
"content-type": "text/x-component",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -31,7 +31,7 @@
|
||||
},
|
||||
"responseHeaders": {
|
||||
"content-type": "text/x-component",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -6,7 +6,11 @@ module.exports = {
|
||||
rewrites: async () => {
|
||||
return [
|
||||
{
|
||||
source: '/rewritten-to-dashboard',
|
||||
source: '/rewritten-to-dashboard/',
|
||||
destination: '/dashboard/',
|
||||
},
|
||||
{
|
||||
source: '/:locale/t/size-chart/:chart/',
|
||||
destination: '/dashboard',
|
||||
},
|
||||
];
|
||||
|
||||
@@ -6,6 +6,30 @@
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/en-us/t/size-chart/mens/",
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard"
|
||||
},
|
||||
{
|
||||
"path": "/en-us/t/size-chart/mens/",
|
||||
"status": 200,
|
||||
"headers": {
|
||||
"RSC": 1,
|
||||
"Next-Router-Prefetch": 1
|
||||
},
|
||||
"mustContain": ":{",
|
||||
"mustNotContain": ".prefetch"
|
||||
},
|
||||
{
|
||||
"path": "/en-us/t/size-chart/mens/",
|
||||
"status": 200,
|
||||
"headers": {
|
||||
"RSC": 1
|
||||
},
|
||||
"mustContain": ":{",
|
||||
"mustNotContain": ".rsc"
|
||||
},
|
||||
{
|
||||
"path": "/dynamic/category-1/id-1/",
|
||||
"status": 200,
|
||||
@@ -23,14 +47,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from /ssg",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/ssg/",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -63,14 +87,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard/deployments/[id]/settings",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/123/settings/",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -83,14 +107,14 @@
|
||||
"status": 200,
|
||||
"mustContain": "catchall",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
"path": "/dashboard/deployments/catchall/something/",
|
||||
"status": 200,
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
},
|
||||
"headers": {
|
||||
"RSC": "1"
|
||||
@@ -103,7 +127,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "hello from app/dashboard",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -123,7 +147,7 @@
|
||||
},
|
||||
"responseHeaders": {
|
||||
"content-type": "text/x-component",
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"status": 200,
|
||||
"mustContain": "about",
|
||||
"responseHeaders": {
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch, Next-Url"
|
||||
"vary": "RSC, Next-Router-State-Tree, Next-Router-Prefetch"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -1,5 +1,38 @@
|
||||
# @vercel/node
|
||||
|
||||
## 3.0.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 3.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
## 3.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- build: upgrade edge-runtime ([#11148](https://github.com/vercel/vercel/pull/11148))
|
||||
|
||||
- refactor: simplify content-length check ([#11150](https://github.com/vercel/vercel/pull/11150))
|
||||
|
||||
- Updated dependencies [[`24c3dd282`](https://github.com/vercel/vercel/commit/24c3dd282d7714cd63d2b94fb94745c45fdc79ab)]:
|
||||
- @vercel/build-utils@7.7.0
|
||||
|
||||
## 3.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
- Updated dependencies [[`b6ed28b9b`](https://github.com/vercel/vercel/commit/b6ed28b9b1712f882c93fe053b70d3eb1df21819), [`8ba0ce932`](https://github.com/vercel/vercel/commit/8ba0ce932434c6295fedb5307bee59a804b7e6a8), [`0d034b682`](https://github.com/vercel/vercel/commit/0d034b6820c0f3252949c0ffc483048c5aac7f04), [`abaa700ce`](https://github.com/vercel/vercel/commit/abaa700cea44c723cfc851baa2dfe9e1ae2e8a5c)]:
|
||||
- @vercel/build-utils@7.6.0
|
||||
|
||||
## 3.0.17
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "3.0.17",
|
||||
"version": "3.0.21",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -20,16 +20,18 @@
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@edge-runtime/node-utils": "2.2.1",
|
||||
"@edge-runtime/primitives": "4.0.5",
|
||||
"@edge-runtime/vm": "3.1.7",
|
||||
"@edge-runtime/node-utils": "2.3.0",
|
||||
"@edge-runtime/primitives": "4.1.0",
|
||||
"@edge-runtime/vm": "3.2.0",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/nft": "0.26.2",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/static-config": "3.0.0",
|
||||
"async-listen": "3.0.0",
|
||||
"edge-runtime": "2.5.7",
|
||||
"cjs-module-lexer": "1.2.3",
|
||||
"edge-runtime": "2.5.9",
|
||||
"es-module-lexer": "1.4.1",
|
||||
"esbuild": "0.14.47",
|
||||
"etag": "1.8.1",
|
||||
"node-fetch": "2.6.9",
|
||||
|
||||
@@ -11,11 +11,17 @@ import type { VercelProxyResponse } from './types.js';
|
||||
import { Config } from '@vercel/build-utils';
|
||||
import { createEdgeEventHandler } from './edge-functions/edge-handler.mjs';
|
||||
import { createServer, IncomingMessage, ServerResponse } from 'http';
|
||||
import { createServerlessEventHandler } from './serverless-functions/serverless-handler.mjs';
|
||||
import {
|
||||
createServerlessEventHandler,
|
||||
HTTP_METHODS,
|
||||
} from './serverless-functions/serverless-handler.mjs';
|
||||
import { isEdgeRuntime, logError, validateConfiguredRuntime } from './utils.js';
|
||||
import { init, parse as parseEsm } from 'es-module-lexer';
|
||||
import { parse as parseCjs } from 'cjs-module-lexer';
|
||||
import { getConfig } from '@vercel/static-config';
|
||||
import { Project } from 'ts-morph';
|
||||
import { listen } from 'async-listen';
|
||||
import { readFile } from 'fs/promises';
|
||||
|
||||
const parseConfig = (entryPointPath: string) =>
|
||||
getConfig(new Project(), entryPointPath);
|
||||
@@ -46,12 +52,31 @@ async function createEventHandler(
|
||||
);
|
||||
}
|
||||
|
||||
const content = await readFile(entrypointPath, 'utf8');
|
||||
|
||||
const isStreaming =
|
||||
staticConfig?.supportsResponseStreaming ||
|
||||
(await hasWebHandlers(async () => parseCjs(content).exports)) ||
|
||||
(await hasWebHandlers(async () =>
|
||||
init.then(() => parseEsm(content)[1].map(specifier => specifier.n))
|
||||
));
|
||||
|
||||
return createServerlessEventHandler(entrypointPath, {
|
||||
mode: staticConfig?.supportsResponseStreaming ? 'streaming' : 'buffer',
|
||||
mode: isStreaming ? 'streaming' : 'buffer',
|
||||
shouldAddHelpers: options.shouldAddHelpers,
|
||||
});
|
||||
}
|
||||
|
||||
async function hasWebHandlers(getExports: () => Promise<string[]>) {
|
||||
const exports = await getExports().catch(() => []);
|
||||
for (const name of exports) {
|
||||
if (HTTP_METHODS.includes(name)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
let handleEvent: (request: IncomingMessage) => Promise<VercelProxyResponse>;
|
||||
let handlerEventError: Error;
|
||||
let onExit: (() => Promise<void>) | undefined;
|
||||
|
||||
@@ -230,8 +230,10 @@ export async function createEdgeEventHandler(
|
||||
}
|
||||
|
||||
const body: Buffer | string | undefined = await serializeBody(request);
|
||||
if (body !== undefined)
|
||||
|
||||
if (body !== undefined && body.length) {
|
||||
request.headers['content-length'] = String(body.length);
|
||||
}
|
||||
|
||||
const url = new URL(request.url ?? '/', server.url);
|
||||
const response = await undiciRequest(url, {
|
||||
|
||||
@@ -1,26 +1,7 @@
|
||||
import type { ServerResponse, IncomingMessage } from 'http';
|
||||
import type { NodeHandler } from '@edge-runtime/node-utils';
|
||||
import { buildToNodeHandler } from '@edge-runtime/node-utils';
|
||||
|
||||
class FetchEvent {
|
||||
public request: Request;
|
||||
public awaiting: Set<Promise<void>>;
|
||||
public response: Response | null;
|
||||
|
||||
constructor(request: Request) {
|
||||
this.request = request;
|
||||
this.response = null;
|
||||
this.awaiting = new Set();
|
||||
}
|
||||
|
||||
respondWith(response: Response) {
|
||||
this.response = response;
|
||||
}
|
||||
|
||||
waitUntil() {
|
||||
throw new Error('waitUntil is not implemented yet for Node.js');
|
||||
}
|
||||
}
|
||||
import Edge from '@edge-runtime/primitives';
|
||||
|
||||
const webHandlerToNodeHandler = buildToNodeHandler(
|
||||
{
|
||||
@@ -32,8 +13,8 @@ const webHandlerToNodeHandler = buildToNodeHandler(
|
||||
super(input, addDuplexToInit(init));
|
||||
}
|
||||
},
|
||||
Uint8Array: Uint8Array,
|
||||
FetchEvent: FetchEvent,
|
||||
Uint8Array,
|
||||
FetchEvent: Edge.FetchEvent,
|
||||
},
|
||||
{ defaultOrigin: 'https://vercel.com' }
|
||||
);
|
||||
|
||||
@@ -27,7 +27,7 @@ type ServerlessFunctionSignature = (
|
||||
const [NODE_MAJOR] = process.versions.node.split('.').map(v => Number(v));
|
||||
|
||||
/* https://nextjs.org/docs/app/building-your-application/routing/router-handlers#supported-http-methods */
|
||||
const HTTP_METHODS = [
|
||||
export const HTTP_METHODS = [
|
||||
'GET',
|
||||
'HEAD',
|
||||
'OPTIONS',
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
/* global Response */
|
||||
|
||||
const baseUrl = ({ headers }) =>
|
||||
`${headers.get('x-forwarded-proto')}://${headers.get('x-forwarded-host')}`;
|
||||
|
||||
export function GET(request) {
|
||||
const { searchParams } = new URL(request.url, baseUrl(request));
|
||||
const name = searchParams.get('name');
|
||||
return new Response(`Greetings, ${name}`);
|
||||
}
|
||||
50
packages/node/test/dev-fixtures/web-handlers-edge.js
Normal file
50
packages/node/test/dev-fixtures/web-handlers-edge.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/* global ReadableStream, TextEncoderStream, Response */
|
||||
|
||||
export const config = { runtime: 'edge' };
|
||||
|
||||
const DEFER_MS = 10;
|
||||
|
||||
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
const streaming =
|
||||
text =>
|
||||
(_, { waitUntil }) => {
|
||||
const DATA = text.split(' ');
|
||||
let index = 0;
|
||||
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
while (index < DATA.length) {
|
||||
const data = DATA[index++];
|
||||
let chunk = data;
|
||||
if (index !== DATA.length) chunk += ' ';
|
||||
controller.enqueue(chunk);
|
||||
await wait(DEFER_MS);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
}).pipeThrough(new TextEncoderStream());
|
||||
|
||||
waitUntil(wait(DATA.length * DEFER_MS));
|
||||
|
||||
return new Response(readable, {
|
||||
headers: {
|
||||
'Content-Type': 'text/plain',
|
||||
'x-web-handler': text,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const GET = streaming('Web handler using GET');
|
||||
|
||||
export const HEAD = streaming('Web handler using HEAD');
|
||||
|
||||
export const OPTIONS = streaming('Web handler using OPTIONS');
|
||||
|
||||
export const POST = streaming('Web handler using POST');
|
||||
|
||||
export const PUT = streaming('Web handler using PUT');
|
||||
|
||||
export const DELETE = streaming('Web handler using DELETE');
|
||||
|
||||
export const PATCH = streaming('Web handler using PATCH');
|
||||
48
packages/node/test/dev-fixtures/web-handlers-node.js
Normal file
48
packages/node/test/dev-fixtures/web-handlers-node.js
Normal file
@@ -0,0 +1,48 @@
|
||||
/* global ReadableStream, TextEncoderStream, Response */
|
||||
|
||||
const DEFER_MS = 10;
|
||||
|
||||
const wait = ms => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
const streaming =
|
||||
text =>
|
||||
(_, { waitUntil }) => {
|
||||
const DATA = text.split(' ');
|
||||
let index = 0;
|
||||
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
while (index < DATA.length) {
|
||||
const data = DATA[index++];
|
||||
let chunk = data;
|
||||
if (index !== DATA.length) chunk += ' ';
|
||||
controller.enqueue(chunk);
|
||||
await wait(DEFER_MS);
|
||||
}
|
||||
controller.close();
|
||||
},
|
||||
}).pipeThrough(new TextEncoderStream());
|
||||
|
||||
waitUntil(wait(DATA.length * DEFER_MS));
|
||||
|
||||
return new Response(readable, {
|
||||
headers: {
|
||||
'Content-Type': 'text/plain',
|
||||
'x-web-handler': text,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
export const GET = streaming('Web handler using GET');
|
||||
|
||||
export const HEAD = streaming('Web handler using HEAD');
|
||||
|
||||
export const OPTIONS = streaming('Web handler using OPTIONS');
|
||||
|
||||
export const POST = streaming('Web handler using POST');
|
||||
|
||||
export const PUT = streaming('Web handler using PUT');
|
||||
|
||||
export const DELETE = streaming('Web handler using DELETE');
|
||||
|
||||
export const PATCH = streaming('Web handler using PATCH');
|
||||
@@ -30,9 +30,9 @@ function testForkDevServer(entrypoint: string) {
|
||||
}
|
||||
|
||||
(NODE_MAJOR < 18 ? test.skip : test)(
|
||||
'runs an serverless function that exports GET',
|
||||
'web handlers for node runtime',
|
||||
async () => {
|
||||
const child = testForkDevServer('./serverless-response.js');
|
||||
const child = testForkDevServer('./web-handlers-node.js');
|
||||
try {
|
||||
const result = await readMessage(child);
|
||||
if (result.state !== 'message') {
|
||||
@@ -43,20 +43,251 @@ function testForkDevServer(entrypoint: string) {
|
||||
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/serverless-response?name=Vercel`
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'GET' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
}).toEqual({ status: 200, body: 'Greetings, Vercel' });
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using GET',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using GET',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'POST' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using POST',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using POST',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'DELETE' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using DELETE',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using DELETE',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'PUT' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PUT',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PUT',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'PATCH' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PATCH',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PATCH',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'HEAD' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
'x-web-handler': 'Web handler using HEAD',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-node`,
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using OPTIONS',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using OPTIONS',
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
child.kill(9);
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
(NODE_MAJOR < 18 ? test.skip : test)(
|
||||
'web handlers for edge runtime',
|
||||
async () => {
|
||||
const child = testForkDevServer('./web-handlers-edge.js');
|
||||
try {
|
||||
const result = await readMessage(child);
|
||||
if (result.state !== 'message') {
|
||||
throw new Error('Exited. error: ' + JSON.stringify(result.value));
|
||||
}
|
||||
|
||||
const { address, port } = result.value;
|
||||
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/serverless-response?name=Vercel`,
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'GET' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using GET',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using GET',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'POST' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using POST',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using POST',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'DELETE' }
|
||||
);
|
||||
|
||||
console.log(response);
|
||||
expect({
|
||||
status: response.status,
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using DELETE',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'PUT' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PUT',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PUT',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'PATCH' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using PATCH',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using PATCH',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'HEAD' }
|
||||
);
|
||||
expect({ status: response.status }).toEqual({ status: 405 });
|
||||
expect({
|
||||
status: response.status,
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
'x-web-handler': 'Web handler using HEAD',
|
||||
});
|
||||
}
|
||||
{
|
||||
const response = await fetch(
|
||||
`http://${address}:${port}/api/web-handlers-edge`,
|
||||
{ method: 'OPTIONS' }
|
||||
);
|
||||
expect({
|
||||
status: response.status,
|
||||
body: await response.text(),
|
||||
transferEncoding: response.headers.get('transfer-encoding'),
|
||||
'x-web-handler': response.headers.get('x-web-handler'),
|
||||
}).toEqual({
|
||||
status: 200,
|
||||
body: 'Web handler using OPTIONS',
|
||||
transferEncoding: 'chunked',
|
||||
'x-web-handler': 'Web handler using OPTIONS',
|
||||
});
|
||||
}
|
||||
} finally {
|
||||
child.kill(9);
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"@types/jest": "27.4.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/which": "3.0.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "11.1.1",
|
||||
"jest-junit": "16.0.0",
|
||||
|
||||
@@ -1,5 +1,17 @@
|
||||
# @vercel/redwood
|
||||
|
||||
## 2.0.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
## 2.0.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
## 2.0.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/redwood",
|
||||
"version": "2.0.6",
|
||||
"version": "2.0.8",
|
||||
"main": "./dist/index.js",
|
||||
"license": "Apache-2.0",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -20,7 +20,7 @@
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.2",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"semver": "6.3.1"
|
||||
},
|
||||
@@ -28,7 +28,7 @@
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"execa": "3.2.0",
|
||||
"fs-extra": "11.1.0",
|
||||
"jest-junit": "16.0.0"
|
||||
|
||||
@@ -1,5 +1,45 @@
|
||||
# @vercel/remix-builder
|
||||
|
||||
## 2.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
- Remove usage of `ensureResolvable()` in Vite builds ([#11213](https://github.com/vercel/vercel/pull/11213))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.8.0 ([#11206](https://github.com/vercel/vercel/pull/11206))
|
||||
|
||||
- Ensure the symlink directory exists in `ensureSymlink()` ([#11205](https://github.com/vercel/vercel/pull/11205))
|
||||
|
||||
## 2.1.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Remix Vite plugin support ([#11031](https://github.com/vercel/vercel/pull/11031))
|
||||
|
||||
## 2.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Don't install Remix fork when not using split configuration ([#11152](https://github.com/vercel/vercel/pull/11152))
|
||||
|
||||
- Add `serverBundles` post-build sanity check and fallback ([#11153](https://github.com/vercel/vercel/pull/11153))
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.6.0 ([#11162](https://github.com/vercel/vercel/pull/11162))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.7.0 ([#11180](https://github.com/vercel/vercel/pull/11180))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.7.2 ([#11186](https://github.com/vercel/vercel/pull/11186))
|
||||
|
||||
## 2.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [node][next][redwood][remix] bump `@vercel/nft@0.26.3` ([#11115](https://github.com/vercel/vercel/pull/11115))
|
||||
|
||||
## 2.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
{
|
||||
"name": "@vercel/remix-builder",
|
||||
"version": "2.0.18",
|
||||
"version": "2.1.1",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
"sideEffects": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
@@ -13,7 +14,7 @@
|
||||
"build": "node ../../utils/build-builder.mjs",
|
||||
"test": "jest --reporters=default --reporters=jest-junit --env node --verbose --bail --runInBand",
|
||||
"test-unit": "pnpm test test/unit.*test.*",
|
||||
"test-e2e": "pnpm test test/integration.test.ts",
|
||||
"test-e2e": "pnpm test test/integration-*.test.ts",
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"files": [
|
||||
@@ -21,16 +22,17 @@
|
||||
"defaults"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.2",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/static-config": "3.0.0",
|
||||
"ts-morph": "12.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.5.1",
|
||||
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.8.0",
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "7.3.13",
|
||||
"@vercel/build-utils": "7.5.1",
|
||||
"@vercel/build-utils": "7.7.1",
|
||||
"jest-junit": "16.0.0",
|
||||
"path-to-regexp": "6.2.1",
|
||||
"semver": "7.5.2"
|
||||
|
||||
814
packages/remix/src/build-legacy.ts
Normal file
814
packages/remix/src/build-legacy.ts
Normal file
@@ -0,0 +1,814 @@
|
||||
import { Project } from 'ts-morph';
|
||||
import { readFileSync, promises as fs, existsSync } from 'fs';
|
||||
import { basename, dirname, extname, join, posix, relative, sep } from 'path';
|
||||
import {
|
||||
debug,
|
||||
download,
|
||||
execCommand,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
getEnvForPackageManager,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
glob,
|
||||
EdgeFunction,
|
||||
NodejsLambda,
|
||||
rename,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
scanParentDirs,
|
||||
} from '@vercel/build-utils';
|
||||
import { getConfig } from '@vercel/static-config';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import type {
|
||||
BuildV2,
|
||||
Files,
|
||||
NodeVersion,
|
||||
PackageJson,
|
||||
BuildResultV2Typical,
|
||||
} from '@vercel/build-utils';
|
||||
import type { ConfigRoute } from '@remix-run/dev/dist/config/routes';
|
||||
import type { BaseFunctionConfig } from '@vercel/static-config';
|
||||
import {
|
||||
calculateRouteConfigHash,
|
||||
findConfig,
|
||||
getPathFromRoute,
|
||||
getRegExpFromPath,
|
||||
getResolvedRouteConfig,
|
||||
isLayoutRoute,
|
||||
ResolvedRouteConfig,
|
||||
ResolvedNodeRouteConfig,
|
||||
ResolvedEdgeRouteConfig,
|
||||
findEntry,
|
||||
chdirAndReadConfig,
|
||||
resolveSemverMinMax,
|
||||
ensureResolvable,
|
||||
isESM,
|
||||
} from './utils';
|
||||
import { patchHydrogenServer } from './hydrogen';
|
||||
|
||||
interface ServerBundle {
|
||||
serverBuildPath: string;
|
||||
routes: string[];
|
||||
}
|
||||
|
||||
const remixBuilderPkg = JSON.parse(
|
||||
readFileSync(join(__dirname, '../package.json'), 'utf8')
|
||||
);
|
||||
const remixRunDevForkVersion =
|
||||
remixBuilderPkg.devDependencies['@remix-run/dev'];
|
||||
|
||||
const DEFAULTS_PATH = join(__dirname, '../defaults');
|
||||
|
||||
const edgeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-edge.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
const nodeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-node.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Minimum supported version of the `@vercel/remix` package
|
||||
const VERCEL_REMIX_MIN_VERSION = '1.10.0';
|
||||
|
||||
// Minimum supported version of the `@vercel/remix-run-dev` forked compiler
|
||||
const REMIX_RUN_DEV_MIN_VERSION = '1.15.0';
|
||||
|
||||
// Maximum version of `@vercel/remix-run-dev` fork
|
||||
// (and also `@vercel/remix` since they get published at the same time)
|
||||
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice(
|
||||
remixRunDevForkVersion.lastIndexOf('@') + 1
|
||||
);
|
||||
|
||||
export const build: BuildV2 = async ({
|
||||
entrypoint,
|
||||
files,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config,
|
||||
meta = {},
|
||||
}) => {
|
||||
const { installCommand, buildCommand } = config;
|
||||
|
||||
await download(files, workPath, meta);
|
||||
|
||||
const mountpoint = dirname(entrypoint);
|
||||
const entrypointFsDirname = join(workPath, mountpoint);
|
||||
|
||||
// Run "Install Command"
|
||||
const nodeVersion = await getNodeVersion(
|
||||
entrypointFsDirname,
|
||||
undefined,
|
||||
config,
|
||||
meta
|
||||
);
|
||||
|
||||
const { cliType, packageJsonPath, lockfileVersion, lockfilePath } =
|
||||
await scanParentDirs(entrypointFsDirname);
|
||||
|
||||
if (!packageJsonPath) {
|
||||
throw new Error('Failed to locate `package.json` file in your project');
|
||||
}
|
||||
|
||||
const [lockfileRaw, pkgRaw] = await Promise.all([
|
||||
lockfilePath ? fs.readFile(lockfilePath) : null,
|
||||
fs.readFile(packageJsonPath, 'utf8'),
|
||||
]);
|
||||
const pkg = JSON.parse(pkgRaw);
|
||||
|
||||
const spawnOpts = getSpawnOptions(meta, nodeVersion);
|
||||
if (!spawnOpts.env) {
|
||||
spawnOpts.env = {};
|
||||
}
|
||||
|
||||
spawnOpts.env = getEnvForPackageManager({
|
||||
cliType,
|
||||
lockfileVersion,
|
||||
nodeVersion,
|
||||
env: spawnOpts.env,
|
||||
});
|
||||
|
||||
if (typeof installCommand === 'string') {
|
||||
if (installCommand.trim()) {
|
||||
console.log(`Running "install" command: \`${installCommand}\`...`);
|
||||
await execCommand(installCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
console.log(`Skipping "install" command...`);
|
||||
}
|
||||
} else {
|
||||
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
||||
}
|
||||
|
||||
const isHydrogen2 = Boolean(
|
||||
pkg.dependencies?.['@shopify/remix-oxygen'] ||
|
||||
pkg.devDependencies?.['@shopify/remix-oxygen']
|
||||
);
|
||||
|
||||
// Determine the version of Remix based on the `@remix-run/dev`
|
||||
// package version.
|
||||
const remixRunDevPath = await ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/dev'
|
||||
);
|
||||
const remixRunDevPkg = JSON.parse(
|
||||
readFileSync(join(remixRunDevPath, 'package.json'), 'utf8')
|
||||
);
|
||||
const remixVersion = remixRunDevPkg.version;
|
||||
|
||||
const remixConfig = await chdirAndReadConfig(
|
||||
remixRunDevPath,
|
||||
entrypointFsDirname,
|
||||
packageJsonPath
|
||||
);
|
||||
const { serverEntryPoint, appDirectory } = remixConfig;
|
||||
const remixRoutes = Object.values(remixConfig.routes);
|
||||
|
||||
let depsModified = false;
|
||||
|
||||
const remixRunDevPkgVersion: string | undefined =
|
||||
pkg.dependencies?.['@remix-run/dev'] ||
|
||||
pkg.devDependencies?.['@remix-run/dev'];
|
||||
|
||||
const serverBundlesMap = new Map<string, ConfigRoute[]>();
|
||||
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
|
||||
|
||||
// Read the `export const config` (if any) for each route
|
||||
const project = new Project();
|
||||
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
|
||||
for (const route of remixRoutes) {
|
||||
const routePath = join(remixConfig.appDirectory, route.file);
|
||||
let staticConfig = getConfig(project, routePath);
|
||||
if (staticConfig && isHydrogen2) {
|
||||
console.log(
|
||||
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
|
||||
);
|
||||
staticConfig = null;
|
||||
}
|
||||
staticConfigsMap.set(route, staticConfig);
|
||||
}
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
const config = getResolvedRouteConfig(
|
||||
route,
|
||||
remixConfig.routes,
|
||||
staticConfigsMap,
|
||||
isHydrogen2
|
||||
);
|
||||
resolvedConfigsMap.set(route, config);
|
||||
}
|
||||
|
||||
// Figure out which routes belong to which server bundles
|
||||
// based on having common static config properties
|
||||
for (const route of remixRoutes) {
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const config = resolvedConfigsMap.get(route);
|
||||
if (!config) {
|
||||
throw new Error(`Expected resolved config for "${route.id}"`);
|
||||
}
|
||||
const hash = calculateRouteConfigHash(config);
|
||||
|
||||
let routesForHash = serverBundlesMap.get(hash);
|
||||
if (!Array.isArray(routesForHash)) {
|
||||
routesForHash = [];
|
||||
serverBundlesMap.set(hash, routesForHash);
|
||||
}
|
||||
|
||||
routesForHash.push(route);
|
||||
}
|
||||
|
||||
let serverBundles: ServerBundle[] = Array.from(
|
||||
serverBundlesMap.entries()
|
||||
).map(([hash, routes]) => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
|
||||
return {
|
||||
serverBuildPath: isHydrogen2
|
||||
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
|
||||
: `${relative(
|
||||
entrypointFsDirname,
|
||||
dirname(remixConfig.serverBuildPath)
|
||||
)}/build-${runtime}-${hash}.js`,
|
||||
routes: routes.map(r => r.id),
|
||||
};
|
||||
});
|
||||
|
||||
// If the project is *not* relying on split configurations, then set
|
||||
// the `serverBuildPath` to the default Remix path, since the forked
|
||||
// Remix compiler will not be used
|
||||
if (!isHydrogen2 && serverBundles.length === 1) {
|
||||
// `serverBuildTarget` and `serverBuildPath` are undefined with
|
||||
// our remix config modifications, so use the default build path
|
||||
serverBundles[0].serverBuildPath = 'build/index.js';
|
||||
}
|
||||
|
||||
// If the project is relying on split configurations, then override
|
||||
// the official `@remix-run/dev` package with the Vercel fork,
|
||||
// which supports the `serverBundles` config
|
||||
if (
|
||||
serverBundles.length > 1 &&
|
||||
!isHydrogen2 &&
|
||||
remixRunDevPkg.name !== '@vercel/remix-run-dev' &&
|
||||
!remixRunDevPkgVersion?.startsWith('https:')
|
||||
) {
|
||||
const remixDevForkVersion = resolveSemverMinMax(
|
||||
REMIX_RUN_DEV_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
// Remove `@remix-run/dev`, add `@vercel/remix-run-dev`
|
||||
if (pkg.devDependencies['@remix-run/dev']) {
|
||||
delete pkg.devDependencies['@remix-run/dev'];
|
||||
pkg.devDependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
} else {
|
||||
delete pkg.dependencies['@remix-run/dev'];
|
||||
pkg.dependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
}
|
||||
depsModified = true;
|
||||
}
|
||||
|
||||
// `app/entry.server.tsx` and `app/entry.client.tsx` are optional in Remix,
|
||||
// so if either of those files are missing then add our own versions.
|
||||
const userEntryServerFile = findEntry(appDirectory, 'entry.server');
|
||||
if (!userEntryServerFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.server.jsx'),
|
||||
join(appDirectory, 'entry.server.jsx')
|
||||
);
|
||||
if (!pkg.dependencies['@vercel/remix']) {
|
||||
// Dependency version resolution logic
|
||||
// 1. Users app is on 1.9.0 -> we install the 1.10.0 (minimum) version of `@vercel/remix`.
|
||||
// 2. Users app is on 1.11.0 (a version greater than 1.10.0 and less than the known max
|
||||
// published version) -> we install the (matching) 1.11.0 version of `@vercel/remix`.
|
||||
// 3. Users app is on something greater than our latest version of the fork -> we install
|
||||
// the latest known published version of `@vercel/remix`.
|
||||
const vercelRemixVersion = resolveSemverMinMax(
|
||||
VERCEL_REMIX_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
pkg.dependencies['@vercel/remix'] = vercelRemixVersion;
|
||||
depsModified = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (depsModified) {
|
||||
await fs.writeFile(packageJsonPath, JSON.stringify(pkg, null, 2) + '\n');
|
||||
|
||||
// Bypass `--frozen-lockfile` enforcement by removing
|
||||
// env vars that are considered to be CI
|
||||
const nonCiEnv = { ...spawnOpts.env };
|
||||
delete nonCiEnv.CI;
|
||||
delete nonCiEnv.VERCEL;
|
||||
delete nonCiEnv.NOW_BUILDER;
|
||||
|
||||
// Purposefully not passing `meta` here to avoid
|
||||
// the optimization that prevents `npm install`
|
||||
// from running a second time
|
||||
await runNpmInstall(
|
||||
entrypointFsDirname,
|
||||
[],
|
||||
{
|
||||
...spawnOpts,
|
||||
env: nonCiEnv,
|
||||
},
|
||||
undefined,
|
||||
nodeVersion
|
||||
);
|
||||
}
|
||||
|
||||
const userEntryClientFile = findEntry(
|
||||
remixConfig.appDirectory,
|
||||
'entry.client'
|
||||
);
|
||||
if (!userEntryClientFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.client.react.jsx'),
|
||||
join(appDirectory, 'entry.client.jsx')
|
||||
);
|
||||
}
|
||||
|
||||
let remixConfigWrapped = false;
|
||||
let serverEntryPointAbs: string | undefined;
|
||||
let originalServerEntryPoint: string | undefined;
|
||||
const remixConfigPath = findConfig(entrypointFsDirname, 'remix.config');
|
||||
const renamedRemixConfigPath = remixConfigPath
|
||||
? `${remixConfigPath}.original${extname(remixConfigPath)}`
|
||||
: undefined;
|
||||
|
||||
try {
|
||||
// We need to patch the `remix.config.js` file to force some values necessary
|
||||
// for a build that works on either Node.js or the Edge runtime
|
||||
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) {
|
||||
await fs.rename(remixConfigPath, renamedRemixConfigPath);
|
||||
|
||||
let patchedConfig: string;
|
||||
// Figure out if the `remix.config` file is using ESM syntax
|
||||
if (isESM(renamedRemixConfigPath)) {
|
||||
patchedConfig = `import config from './${basename(
|
||||
renamedRemixConfigPath
|
||||
)}';
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
export default config;`;
|
||||
} else {
|
||||
patchedConfig = `const config = require('./${basename(
|
||||
renamedRemixConfigPath
|
||||
)}');
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
module.exports = config;`;
|
||||
}
|
||||
await fs.writeFile(remixConfigPath, patchedConfig);
|
||||
remixConfigWrapped = true;
|
||||
}
|
||||
|
||||
// For Hydrogen v2, patch the `server.ts` file to be Vercel-compatible
|
||||
if (isHydrogen2) {
|
||||
if (remixConfig.serverEntryPoint) {
|
||||
serverEntryPointAbs = join(
|
||||
entrypointFsDirname,
|
||||
remixConfig.serverEntryPoint
|
||||
);
|
||||
originalServerEntryPoint = await fs.readFile(
|
||||
serverEntryPointAbs,
|
||||
'utf8'
|
||||
);
|
||||
const patchedServerEntryPoint = patchHydrogenServer(
|
||||
project,
|
||||
serverEntryPointAbs
|
||||
);
|
||||
if (patchedServerEntryPoint) {
|
||||
debug(
|
||||
`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}`
|
||||
);
|
||||
await fs.writeFile(serverEntryPointAbs, patchedServerEntryPoint);
|
||||
}
|
||||
} else {
|
||||
console.log('WARN: No "server" field found in Remix config');
|
||||
}
|
||||
}
|
||||
|
||||
// Make `remix build` output production mode
|
||||
spawnOpts.env.NODE_ENV = 'production';
|
||||
|
||||
// Run "Build Command"
|
||||
if (buildCommand) {
|
||||
debug(`Executing build command "${buildCommand}"`);
|
||||
await execCommand(buildCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
if (hasScript('vercel-build', pkg)) {
|
||||
debug(`Executing "yarn vercel-build"`);
|
||||
await runPackageJsonScript(
|
||||
entrypointFsDirname,
|
||||
'vercel-build',
|
||||
spawnOpts
|
||||
);
|
||||
} else if (hasScript('build', pkg)) {
|
||||
debug(`Executing "yarn build"`);
|
||||
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
|
||||
} else {
|
||||
await execCommand('remix build', {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
const cleanupOps: Promise<void>[] = [];
|
||||
// Clean up our patched `remix.config.js` to be polite
|
||||
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.rename(renamedRemixConfigPath, remixConfigPath)
|
||||
.then(() => debug(`Restored original "${remixConfigPath}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original server entrypoint if it was modified (for Hydrogen v2)
|
||||
if (serverEntryPointAbs && originalServerEntryPoint) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(serverEntryPointAbs, originalServerEntryPoint)
|
||||
.then(() => debug(`Restored original "${serverEntryPointAbs}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original `package.json` file and lockfile
|
||||
if (depsModified) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(packageJsonPath, pkgRaw)
|
||||
.then(() => debug(`Restored original "${packageJsonPath}" file`))
|
||||
);
|
||||
if (lockfilePath && lockfileRaw) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(lockfilePath, lockfileRaw)
|
||||
.then(() => debug(`Restored original "${lockfilePath}" file`))
|
||||
);
|
||||
}
|
||||
}
|
||||
await Promise.all(cleanupOps);
|
||||
}
|
||||
|
||||
// This needs to happen before we run NFT to create the Node/Edge functions
|
||||
await Promise.all([
|
||||
ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/server-runtime'
|
||||
),
|
||||
!isHydrogen2
|
||||
? ensureResolvable(entrypointFsDirname, repoRootPath, '@remix-run/node')
|
||||
: null,
|
||||
]);
|
||||
|
||||
const staticDir = join(entrypointFsDirname, 'public');
|
||||
|
||||
// Do a sanity check to ensure that the server bundles `serverBuildPath` was actually created.
|
||||
// If it was not, then that usually means the Vercel forked Remix compiler was not used and
|
||||
// thus only a singular server bundle was produced.
|
||||
const serverBundlesRespected = existsSync(
|
||||
join(entrypointFsDirname, serverBundles[0].serverBuildPath)
|
||||
);
|
||||
if (!serverBundlesRespected) {
|
||||
console.warn(
|
||||
'WARN: `serverBundles` configuration failed. Falling back to a singular server bundle.'
|
||||
);
|
||||
serverBundles = [
|
||||
{
|
||||
serverBuildPath: 'build/index.js',
|
||||
routes: serverBundles.flatMap(b => b.routes),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const [staticFiles, buildAssets, ...functions] = await Promise.all([
|
||||
glob('**', staticDir),
|
||||
glob('**', remixConfig.assetsBuildDirectory),
|
||||
...serverBundles.map(bundle => {
|
||||
const firstRoute = remixConfig.routes[bundle.routes[0]];
|
||||
const config = resolvedConfigsMap.get(firstRoute) ?? {
|
||||
runtime: 'nodejs',
|
||||
};
|
||||
const serverBuildPath = join(entrypointFsDirname, bundle.serverBuildPath);
|
||||
|
||||
if (config.runtime === 'edge') {
|
||||
return createRenderEdgeFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
serverBuildPath,
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}
|
||||
|
||||
return createRenderNodeFunction(
|
||||
nodeVersion,
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
serverBuildPath,
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}),
|
||||
]);
|
||||
|
||||
const transformedBuildAssets = rename(buildAssets, name => {
|
||||
return posix.join('./', remixConfig.publicPath, name);
|
||||
});
|
||||
|
||||
const output: BuildResultV2Typical['output'] = {
|
||||
...staticFiles,
|
||||
...transformedBuildAssets,
|
||||
};
|
||||
const routes: any[] = [
|
||||
{
|
||||
src: `^/${remixConfig.publicPath.replace(/^\/|\/$/g, '')}/(.*)$`,
|
||||
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
];
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
// Layout routes don't get a function / route added
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const { path, rePath } = getPathFromRoute(route, remixConfig.routes);
|
||||
|
||||
// If the route is a pathless layout route (at the root level)
|
||||
// and doesn't have any sub-routes, then a function should not be created.
|
||||
if (!path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const funcIndex = serverBundles.findIndex(bundle => {
|
||||
return bundle.routes.includes(route.id);
|
||||
});
|
||||
const func = functions[funcIndex];
|
||||
|
||||
if (!func) {
|
||||
throw new Error(`Could not determine server bundle for "${route.id}"`);
|
||||
}
|
||||
|
||||
output[path] = func;
|
||||
|
||||
// If this is a dynamic route then add a Vercel route
|
||||
const re = getRegExpFromPath(rePath);
|
||||
if (re) {
|
||||
routes.push({
|
||||
src: re.source,
|
||||
dest: path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add a 404 path for not found pages to be server-side rendered by Remix.
|
||||
// Use an edge function bundle if one was generated, otherwise use Node.js.
|
||||
if (!output['404']) {
|
||||
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex(
|
||||
routes => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime;
|
||||
return runtime === 'edge';
|
||||
}
|
||||
);
|
||||
const func =
|
||||
edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0];
|
||||
output['404'] = func;
|
||||
}
|
||||
routes.push({
|
||||
src: '/(.*)',
|
||||
dest: '/404',
|
||||
});
|
||||
|
||||
return { routes, output, framework: { version: remixVersion } };
|
||||
};
|
||||
|
||||
function hasScript(scriptName: string, pkg: PackageJson | null) {
|
||||
const scripts = (pkg && pkg.scripts) || {};
|
||||
return typeof scripts[scriptName] === 'string';
|
||||
}
|
||||
|
||||
async function createRenderNodeFunction(
|
||||
nodeVersion: NodeVersion,
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedNodeRouteConfig
|
||||
): Promise<NodejsLambda> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-node.mjs` file into the "build" directory
|
||||
const nodeServerSrc = await nodeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
nodeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
|
||||
const fn = new NodejsLambda({
|
||||
files,
|
||||
handler,
|
||||
runtime: nodeVersion.runtime,
|
||||
shouldAddHelpers: false,
|
||||
shouldAddSourcemapSupport: false,
|
||||
operationType: 'SSR',
|
||||
supportsResponseStreaming: true,
|
||||
regions: config.regions,
|
||||
memory: config.memory,
|
||||
maxDuration: config.maxDuration,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function createRenderEdgeFunction(
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedEdgeRouteConfig
|
||||
): Promise<EdgeFunction> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-edge.mjs` file into the "build" directory
|
||||
const edgeServerSrc = await edgeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
edgeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
let remixRunVercelPkgJson: string | undefined;
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
|
||||
async readFile(fsPath) {
|
||||
let source: Buffer | string;
|
||||
try {
|
||||
source = await fs.readFile(fsPath);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (basename(fsPath) === 'package.json') {
|
||||
// For Edge Functions, patch "main" field to prefer "browser" or "module"
|
||||
const pkgJson = JSON.parse(source.toString());
|
||||
|
||||
// When `@remix-run/vercel` is detected, we need to modify the `package.json`
|
||||
// to include the "browser" field so that the proper Edge entrypoint file
|
||||
// is used. This is a temporary stop gap until this PR is merged:
|
||||
// https://github.com/remix-run/remix/pull/5537
|
||||
if (pkgJson.name === '@remix-run/vercel') {
|
||||
pkgJson.browser = 'dist/edge.js';
|
||||
pkgJson.dependencies['@remix-run/server-runtime'] =
|
||||
pkgJson.dependencies['@remix-run/node'];
|
||||
|
||||
if (!remixRunVercelPkgJson) {
|
||||
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + '\n';
|
||||
|
||||
// Copy in the edge entrypoint so that NFT can properly resolve it
|
||||
const vercelEdgeEntrypointPath = join(
|
||||
DEFAULTS_PATH,
|
||||
'vercel-edge-entrypoint.js'
|
||||
);
|
||||
const vercelEdgeEntrypointDest = join(
|
||||
dirname(fsPath),
|
||||
'dist/edge.js'
|
||||
);
|
||||
await fs.copyFile(
|
||||
vercelEdgeEntrypointPath,
|
||||
vercelEdgeEntrypointDest
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const prop of ['browser', 'module']) {
|
||||
const val = pkgJson[prop];
|
||||
if (typeof val === 'string') {
|
||||
pkgJson.main = val;
|
||||
|
||||
// Return the modified `package.json` to nft
|
||||
source = JSON.stringify(pkgJson);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return source;
|
||||
},
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
if (
|
||||
remixRunVercelPkgJson &&
|
||||
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
|
||||
) {
|
||||
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
|
||||
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
|
||||
} else {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
}
|
||||
|
||||
const fn = new EdgeFunction({
|
||||
files,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: handler,
|
||||
regions: config.regions,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function writeEntrypointFile(
|
||||
path: string,
|
||||
data: string,
|
||||
rootDir: string
|
||||
) {
|
||||
try {
|
||||
await fs.writeFile(path, data);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT') {
|
||||
throw new Error(
|
||||
`The "${relative(
|
||||
rootDir,
|
||||
dirname(path)
|
||||
)}" directory does not exist. Please contact support at https://vercel.com/help.`
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
452
packages/remix/src/build-vite.ts
Normal file
452
packages/remix/src/build-vite.ts
Normal file
@@ -0,0 +1,452 @@
|
||||
import { readFileSync, promises as fs, statSync, existsSync } from 'fs';
|
||||
import { basename, dirname, join, relative, sep } from 'path';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import {
|
||||
BuildResultV2Typical,
|
||||
debug,
|
||||
execCommand,
|
||||
getEnvForPackageManager,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
glob,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
scanParentDirs,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
EdgeFunction,
|
||||
NodejsLambda,
|
||||
} from '@vercel/build-utils';
|
||||
import {
|
||||
getPathFromRoute,
|
||||
getRegExpFromPath,
|
||||
getRemixVersion,
|
||||
hasScript,
|
||||
logNftWarnings,
|
||||
} from './utils';
|
||||
import type { BuildV2, Files, NodeVersion } from '@vercel/build-utils';
|
||||
|
||||
const DEFAULTS_PATH = join(__dirname, '../defaults');
|
||||
|
||||
const edgeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-edge.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
const nodeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-node.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
interface RemixBuildResult {
|
||||
buildManifest: {
|
||||
serverBundles?: Record<
|
||||
string,
|
||||
{ id: string; file: string; config: Record<string, unknown> }
|
||||
>;
|
||||
routeIdToServerBundleId?: Record<string, string>;
|
||||
routes: Record<
|
||||
string,
|
||||
{
|
||||
id: string;
|
||||
file: string;
|
||||
path?: string;
|
||||
index?: boolean;
|
||||
parentId?: string;
|
||||
config: Record<string, unknown>;
|
||||
}
|
||||
>;
|
||||
};
|
||||
remixConfig: {
|
||||
buildDirectory: string;
|
||||
};
|
||||
viteConfig?: {
|
||||
build?: {
|
||||
assetsDir: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export const build: BuildV2 = async ({
|
||||
entrypoint,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config,
|
||||
meta = {},
|
||||
}) => {
|
||||
const { installCommand, buildCommand } = config;
|
||||
const mountpoint = dirname(entrypoint);
|
||||
const entrypointFsDirname = join(workPath, mountpoint);
|
||||
|
||||
// Run "Install Command"
|
||||
const nodeVersion = await getNodeVersion(
|
||||
entrypointFsDirname,
|
||||
undefined,
|
||||
config,
|
||||
meta
|
||||
);
|
||||
|
||||
const { cliType, lockfileVersion, packageJson } = await scanParentDirs(
|
||||
entrypointFsDirname,
|
||||
true
|
||||
);
|
||||
|
||||
const spawnOpts = getSpawnOptions(meta, nodeVersion);
|
||||
if (!spawnOpts.env) {
|
||||
spawnOpts.env = {};
|
||||
}
|
||||
|
||||
spawnOpts.env = getEnvForPackageManager({
|
||||
cliType,
|
||||
lockfileVersion,
|
||||
nodeVersion,
|
||||
env: spawnOpts.env,
|
||||
});
|
||||
|
||||
if (typeof installCommand === 'string') {
|
||||
if (installCommand.trim()) {
|
||||
console.log(`Running "install" command: \`${installCommand}\`...`);
|
||||
await execCommand(installCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
console.log(`Skipping "install" command...`);
|
||||
}
|
||||
} else {
|
||||
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
||||
}
|
||||
|
||||
// Determine the version of Remix based on the `@remix-run/dev`
|
||||
// package version.
|
||||
const remixVersion = await getRemixVersion(entrypointFsDirname, repoRootPath);
|
||||
|
||||
// Run "Build Command"
|
||||
if (buildCommand) {
|
||||
debug(`Executing build command "${buildCommand}"`);
|
||||
await execCommand(buildCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
if (hasScript('vercel-build', packageJson)) {
|
||||
debug(`Executing "vercel-build" script`);
|
||||
await runPackageJsonScript(
|
||||
entrypointFsDirname,
|
||||
'vercel-build',
|
||||
spawnOpts
|
||||
);
|
||||
} else if (hasScript('build', packageJson)) {
|
||||
debug(`Executing "build" script`);
|
||||
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
|
||||
} else {
|
||||
await execCommand('remix build', {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const remixBuildResultPath = join(
|
||||
entrypointFsDirname,
|
||||
'.vercel/remix-build-result.json'
|
||||
);
|
||||
let remixBuildResult: RemixBuildResult | undefined;
|
||||
try {
|
||||
const remixBuildResultContents = readFileSync(remixBuildResultPath, 'utf8');
|
||||
remixBuildResult = JSON.parse(remixBuildResultContents);
|
||||
} catch (err: unknown) {
|
||||
if (!isErrnoException(err) || err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
// The project has not configured the `vercelPreset()`
|
||||
// Preset in the "vite.config" file. Attempt to check
|
||||
// for the default build output directory.
|
||||
const buildDirectory = join(entrypointFsDirname, 'build');
|
||||
if (statSync(buildDirectory).isDirectory()) {
|
||||
console.warn('WARN: The `vercelPreset()` Preset was not detected.');
|
||||
remixBuildResult = {
|
||||
buildManifest: {
|
||||
routes: {
|
||||
root: {
|
||||
path: '',
|
||||
id: 'root',
|
||||
file: 'app/root.tsx',
|
||||
config: {},
|
||||
},
|
||||
'routes/_index': {
|
||||
file: 'app/routes/_index.tsx',
|
||||
id: 'routes/_index',
|
||||
index: true,
|
||||
parentId: 'root',
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
remixConfig: {
|
||||
buildDirectory,
|
||||
},
|
||||
};
|
||||
// Detect if a server build exists (won't be the case when `ssr: false`)
|
||||
const serverPath = 'build/server/index.js';
|
||||
if (existsSync(join(entrypointFsDirname, serverPath))) {
|
||||
remixBuildResult.buildManifest.routeIdToServerBundleId = {
|
||||
'routes/_index': '',
|
||||
};
|
||||
remixBuildResult.buildManifest.serverBundles = {
|
||||
'': {
|
||||
id: '',
|
||||
file: serverPath,
|
||||
config: {},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!remixBuildResult) {
|
||||
throw new Error(
|
||||
'Could not determine build output directory. Please configure the `vercelPreset()` Preset from the `@vercel/remix` npm package'
|
||||
);
|
||||
}
|
||||
|
||||
const { buildManifest, remixConfig, viteConfig } = remixBuildResult;
|
||||
|
||||
const staticDir = join(remixConfig.buildDirectory, 'client');
|
||||
const serverBundles = Object.values(buildManifest.serverBundles ?? {});
|
||||
|
||||
const [staticFiles, ...functions] = await Promise.all([
|
||||
glob('**', staticDir),
|
||||
...serverBundles.map(bundle => {
|
||||
if (bundle.config.runtime === 'edge') {
|
||||
return createRenderEdgeFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.file),
|
||||
undefined,
|
||||
remixVersion,
|
||||
bundle.config
|
||||
);
|
||||
}
|
||||
|
||||
return createRenderNodeFunction(
|
||||
nodeVersion,
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.file),
|
||||
undefined,
|
||||
remixVersion,
|
||||
bundle.config
|
||||
);
|
||||
}),
|
||||
]);
|
||||
|
||||
const functionsMap = new Map<string, EdgeFunction | NodejsLambda>();
|
||||
for (let i = 0; i < serverBundles.length; i++) {
|
||||
functionsMap.set(serverBundles[i].id, functions[i]);
|
||||
}
|
||||
|
||||
const output: BuildResultV2Typical['output'] = staticFiles;
|
||||
const assetsDir = viteConfig?.build?.assetsDir || 'assets';
|
||||
const routes: any[] = [
|
||||
{
|
||||
src: `^/${assetsDir}/(.*)$`,
|
||||
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
];
|
||||
|
||||
for (const [id, functionId] of Object.entries(
|
||||
buildManifest.routeIdToServerBundleId ?? {}
|
||||
)) {
|
||||
const route = buildManifest.routes[id];
|
||||
const { path, rePath } = getPathFromRoute(route, buildManifest.routes);
|
||||
|
||||
// If the route is a pathless layout route (at the root level)
|
||||
// and doesn't have any sub-routes, then a function should not be created.
|
||||
if (!path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const func = functionsMap.get(functionId);
|
||||
if (!func) {
|
||||
throw new Error(`Could not determine server bundle for "${id}"`);
|
||||
}
|
||||
|
||||
output[path] = func;
|
||||
|
||||
// If this is a dynamic route then add a Vercel route
|
||||
const re = getRegExpFromPath(rePath);
|
||||
if (re) {
|
||||
routes.push({
|
||||
src: re.source,
|
||||
dest: path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// For the 404 case, invoke the Function (or serve the static file
|
||||
// for `ssr: false` mode) at the `/` path. Remix will serve its 404 route.
|
||||
routes.push({
|
||||
src: '/(.*)',
|
||||
dest: '/',
|
||||
});
|
||||
|
||||
return { routes, output, framework: { version: remixVersion } };
|
||||
};
|
||||
|
||||
async function createRenderNodeFunction(
|
||||
nodeVersion: NodeVersion,
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: /*TODO: ResolvedNodeRouteConfig*/ any
|
||||
): Promise<NodejsLambda> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-node.mjs` file into the "build" directory
|
||||
const nodeServerSrc = await nodeServerSrcPromise;
|
||||
await fs.writeFile(
|
||||
handlerPath,
|
||||
nodeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
});
|
||||
|
||||
logNftWarnings(trace.warnings, '@remix-run/node');
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
|
||||
const fn = new NodejsLambda({
|
||||
files,
|
||||
handler,
|
||||
runtime: nodeVersion.runtime,
|
||||
shouldAddHelpers: false,
|
||||
shouldAddSourcemapSupport: false,
|
||||
operationType: 'SSR',
|
||||
supportsResponseStreaming: true,
|
||||
regions: config.regions,
|
||||
memory: config.memory,
|
||||
maxDuration: config.maxDuration,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function createRenderEdgeFunction(
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: /* TODO: ResolvedEdgeRouteConfig*/ any
|
||||
): Promise<EdgeFunction> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-edge.mjs` file into the "build" directory
|
||||
const edgeServerSrc = await edgeServerSrcPromise;
|
||||
await fs.writeFile(
|
||||
handlerPath,
|
||||
edgeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let remixRunVercelPkgJson: string | undefined;
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
|
||||
async readFile(fsPath) {
|
||||
let source: Buffer | string;
|
||||
try {
|
||||
source = await fs.readFile(fsPath);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (basename(fsPath) === 'package.json') {
|
||||
// For Edge Functions, patch "main" field to prefer "browser" or "module"
|
||||
const pkgJson = JSON.parse(source.toString());
|
||||
|
||||
for (const prop of ['browser', 'module']) {
|
||||
const val = pkgJson[prop];
|
||||
if (typeof val === 'string') {
|
||||
pkgJson.main = val;
|
||||
|
||||
// Return the modified `package.json` to nft
|
||||
source = JSON.stringify(pkgJson);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return source;
|
||||
},
|
||||
});
|
||||
|
||||
logNftWarnings(trace.warnings, '@remix-run/server-runtime');
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
if (
|
||||
remixRunVercelPkgJson &&
|
||||
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
|
||||
) {
|
||||
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
|
||||
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
|
||||
} else {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
}
|
||||
|
||||
const fn = new EdgeFunction({
|
||||
files,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: handler,
|
||||
regions: config.regions,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
@@ -1,786 +1,9 @@
|
||||
import { Project } from 'ts-morph';
|
||||
import { readFileSync, promises as fs } from 'fs';
|
||||
import { basename, dirname, extname, join, posix, relative, sep } from 'path';
|
||||
import {
|
||||
debug,
|
||||
download,
|
||||
execCommand,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
getEnvForPackageManager,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
glob,
|
||||
EdgeFunction,
|
||||
NodejsLambda,
|
||||
rename,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
scanParentDirs,
|
||||
} from '@vercel/build-utils';
|
||||
import { getConfig } from '@vercel/static-config';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import type {
|
||||
BuildV2,
|
||||
Files,
|
||||
NodeVersion,
|
||||
PackageJson,
|
||||
BuildResultV2Typical,
|
||||
} from '@vercel/build-utils';
|
||||
import type { ConfigRoute } from '@remix-run/dev/dist/config/routes';
|
||||
import type { BaseFunctionConfig } from '@vercel/static-config';
|
||||
import {
|
||||
calculateRouteConfigHash,
|
||||
findConfig,
|
||||
getPathFromRoute,
|
||||
getRegExpFromPath,
|
||||
getResolvedRouteConfig,
|
||||
isLayoutRoute,
|
||||
ResolvedRouteConfig,
|
||||
ResolvedNodeRouteConfig,
|
||||
ResolvedEdgeRouteConfig,
|
||||
findEntry,
|
||||
chdirAndReadConfig,
|
||||
resolveSemverMinMax,
|
||||
ensureResolvable,
|
||||
isESM,
|
||||
} from './utils';
|
||||
import { patchHydrogenServer } from './hydrogen';
|
||||
import { build as buildVite } from './build-vite';
|
||||
import { build as buildLegacy } from './build-legacy';
|
||||
import { findConfig } from './utils';
|
||||
import type { BuildV2 } from '@vercel/build-utils';
|
||||
|
||||
interface ServerBundle {
|
||||
serverBuildPath: string;
|
||||
routes: string[];
|
||||
}
|
||||
|
||||
const remixBuilderPkg = JSON.parse(
|
||||
readFileSync(join(__dirname, '../package.json'), 'utf8')
|
||||
);
|
||||
const remixRunDevForkVersion =
|
||||
remixBuilderPkg.devDependencies['@remix-run/dev'];
|
||||
|
||||
const DEFAULTS_PATH = join(__dirname, '../defaults');
|
||||
|
||||
const edgeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-edge.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
const nodeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-node.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Minimum supported version of the `@vercel/remix` package
|
||||
const VERCEL_REMIX_MIN_VERSION = '1.10.0';
|
||||
|
||||
// Minimum supported version of the `@vercel/remix-run-dev` forked compiler
|
||||
const REMIX_RUN_DEV_MIN_VERSION = '1.15.0';
|
||||
|
||||
// Maximum version of `@vercel/remix-run-dev` fork
|
||||
// (and also `@vercel/remix` since they get published at the same time)
|
||||
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice(
|
||||
remixRunDevForkVersion.lastIndexOf('@') + 1
|
||||
);
|
||||
|
||||
export const build: BuildV2 = async ({
|
||||
entrypoint,
|
||||
files,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config,
|
||||
meta = {},
|
||||
}) => {
|
||||
const { installCommand, buildCommand } = config;
|
||||
|
||||
await download(files, workPath, meta);
|
||||
|
||||
const mountpoint = dirname(entrypoint);
|
||||
const entrypointFsDirname = join(workPath, mountpoint);
|
||||
|
||||
// Run "Install Command"
|
||||
const nodeVersion = await getNodeVersion(
|
||||
entrypointFsDirname,
|
||||
undefined,
|
||||
config,
|
||||
meta
|
||||
);
|
||||
|
||||
const { cliType, packageJsonPath, lockfileVersion, lockfilePath } =
|
||||
await scanParentDirs(entrypointFsDirname);
|
||||
|
||||
if (!packageJsonPath) {
|
||||
throw new Error('Failed to locate `package.json` file in your project');
|
||||
}
|
||||
|
||||
const [lockfileRaw, pkgRaw] = await Promise.all([
|
||||
lockfilePath ? fs.readFile(lockfilePath) : null,
|
||||
fs.readFile(packageJsonPath, 'utf8'),
|
||||
]);
|
||||
const pkg = JSON.parse(pkgRaw);
|
||||
|
||||
const spawnOpts = getSpawnOptions(meta, nodeVersion);
|
||||
if (!spawnOpts.env) {
|
||||
spawnOpts.env = {};
|
||||
}
|
||||
|
||||
spawnOpts.env = getEnvForPackageManager({
|
||||
cliType,
|
||||
lockfileVersion,
|
||||
nodeVersion,
|
||||
env: spawnOpts.env,
|
||||
});
|
||||
|
||||
if (typeof installCommand === 'string') {
|
||||
if (installCommand.trim()) {
|
||||
console.log(`Running "install" command: \`${installCommand}\`...`);
|
||||
await execCommand(installCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
console.log(`Skipping "install" command...`);
|
||||
}
|
||||
} else {
|
||||
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
||||
}
|
||||
|
||||
const isHydrogen2 = Boolean(
|
||||
pkg.dependencies?.['@shopify/remix-oxygen'] ||
|
||||
pkg.devDependencies?.['@shopify/remix-oxygen']
|
||||
);
|
||||
|
||||
// Determine the version of Remix based on the `@remix-run/dev`
|
||||
// package version.
|
||||
const remixRunDevPath = await ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/dev'
|
||||
);
|
||||
const remixRunDevPkg = JSON.parse(
|
||||
readFileSync(join(remixRunDevPath, 'package.json'), 'utf8')
|
||||
);
|
||||
const remixVersion = remixRunDevPkg.version;
|
||||
|
||||
const remixConfig = await chdirAndReadConfig(
|
||||
remixRunDevPath,
|
||||
entrypointFsDirname,
|
||||
packageJsonPath
|
||||
);
|
||||
const { serverEntryPoint, appDirectory } = remixConfig;
|
||||
const remixRoutes = Object.values(remixConfig.routes);
|
||||
|
||||
let depsModified = false;
|
||||
|
||||
const remixRunDevPkgVersion: string | undefined =
|
||||
pkg.dependencies?.['@remix-run/dev'] ||
|
||||
pkg.devDependencies?.['@remix-run/dev'];
|
||||
|
||||
// Override the official `@remix-run/dev` package with the
|
||||
// Vercel fork, which supports the `serverBundles` config
|
||||
if (
|
||||
!isHydrogen2 &&
|
||||
remixRunDevPkg.name !== '@vercel/remix-run-dev' &&
|
||||
!remixRunDevPkgVersion?.startsWith('https:')
|
||||
) {
|
||||
const remixDevForkVersion = resolveSemverMinMax(
|
||||
REMIX_RUN_DEV_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
// Remove `@remix-run/dev`, add `@vercel/remix-run-dev`
|
||||
if (pkg.devDependencies['@remix-run/dev']) {
|
||||
delete pkg.devDependencies['@remix-run/dev'];
|
||||
pkg.devDependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
} else {
|
||||
delete pkg.dependencies['@remix-run/dev'];
|
||||
pkg.dependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
}
|
||||
depsModified = true;
|
||||
}
|
||||
|
||||
// `app/entry.server.tsx` and `app/entry.client.tsx` are optional in Remix,
|
||||
// so if either of those files are missing then add our own versions.
|
||||
const userEntryServerFile = findEntry(appDirectory, 'entry.server');
|
||||
if (!userEntryServerFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.server.jsx'),
|
||||
join(appDirectory, 'entry.server.jsx')
|
||||
);
|
||||
if (!pkg.dependencies['@vercel/remix']) {
|
||||
// Dependency version resolution logic
|
||||
// 1. Users app is on 1.9.0 -> we install the 1.10.0 (minimum) version of `@vercel/remix`.
|
||||
// 2. Users app is on 1.11.0 (a version greater than 1.10.0 and less than the known max
|
||||
// published version) -> we install the (matching) 1.11.0 version of `@vercel/remix`.
|
||||
// 3. Users app is on something greater than our latest version of the fork -> we install
|
||||
// the latest known published version of `@vercel/remix`.
|
||||
const vercelRemixVersion = resolveSemverMinMax(
|
||||
VERCEL_REMIX_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
pkg.dependencies['@vercel/remix'] = vercelRemixVersion;
|
||||
depsModified = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (depsModified) {
|
||||
await fs.writeFile(packageJsonPath, JSON.stringify(pkg, null, 2) + '\n');
|
||||
|
||||
// Bypass `--frozen-lockfile` enforcement by removing
|
||||
// env vars that are considered to be CI
|
||||
const nonCiEnv = { ...spawnOpts.env };
|
||||
delete nonCiEnv.CI;
|
||||
delete nonCiEnv.VERCEL;
|
||||
delete nonCiEnv.NOW_BUILDER;
|
||||
|
||||
// Purposefully not passing `meta` here to avoid
|
||||
// the optimization that prevents `npm install`
|
||||
// from running a second time
|
||||
await runNpmInstall(
|
||||
entrypointFsDirname,
|
||||
[],
|
||||
{
|
||||
...spawnOpts,
|
||||
env: nonCiEnv,
|
||||
},
|
||||
undefined,
|
||||
nodeVersion
|
||||
);
|
||||
}
|
||||
|
||||
const userEntryClientFile = findEntry(
|
||||
remixConfig.appDirectory,
|
||||
'entry.client'
|
||||
);
|
||||
if (!userEntryClientFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.client.react.jsx'),
|
||||
join(appDirectory, 'entry.client.jsx')
|
||||
);
|
||||
}
|
||||
|
||||
let remixConfigWrapped = false;
|
||||
let serverEntryPointAbs: string | undefined;
|
||||
let originalServerEntryPoint: string | undefined;
|
||||
const remixConfigPath = findConfig(entrypointFsDirname, 'remix.config');
|
||||
const renamedRemixConfigPath = remixConfigPath
|
||||
? `${remixConfigPath}.original${extname(remixConfigPath)}`
|
||||
: undefined;
|
||||
|
||||
// These get populated inside the try/catch below
|
||||
let serverBundles: ServerBundle[];
|
||||
const serverBundlesMap = new Map<string, ConfigRoute[]>();
|
||||
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
|
||||
|
||||
try {
|
||||
// Read the `export const config` (if any) for each route
|
||||
const project = new Project();
|
||||
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
|
||||
for (const route of remixRoutes) {
|
||||
const routePath = join(remixConfig.appDirectory, route.file);
|
||||
let staticConfig = getConfig(project, routePath);
|
||||
if (staticConfig && isHydrogen2) {
|
||||
console.log(
|
||||
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
|
||||
);
|
||||
staticConfig = null;
|
||||
}
|
||||
staticConfigsMap.set(route, staticConfig);
|
||||
}
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
const config = getResolvedRouteConfig(
|
||||
route,
|
||||
remixConfig.routes,
|
||||
staticConfigsMap,
|
||||
isHydrogen2
|
||||
);
|
||||
resolvedConfigsMap.set(route, config);
|
||||
}
|
||||
|
||||
// Figure out which routes belong to which server bundles
|
||||
// based on having common static config properties
|
||||
for (const route of remixRoutes) {
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const config = resolvedConfigsMap.get(route);
|
||||
if (!config) {
|
||||
throw new Error(`Expected resolved config for "${route.id}"`);
|
||||
}
|
||||
const hash = calculateRouteConfigHash(config);
|
||||
|
||||
let routesForHash = serverBundlesMap.get(hash);
|
||||
if (!Array.isArray(routesForHash)) {
|
||||
routesForHash = [];
|
||||
serverBundlesMap.set(hash, routesForHash);
|
||||
}
|
||||
|
||||
routesForHash.push(route);
|
||||
}
|
||||
|
||||
serverBundles = Array.from(serverBundlesMap.entries()).map(
|
||||
([hash, routes]) => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
|
||||
return {
|
||||
serverBuildPath: isHydrogen2
|
||||
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
|
||||
: `${relative(
|
||||
entrypointFsDirname,
|
||||
dirname(remixConfig.serverBuildPath)
|
||||
)}/build-${runtime}-${hash}.js`,
|
||||
routes: routes.map(r => r.id),
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
// We need to patch the `remix.config.js` file to force some values necessary
|
||||
// for a build that works on either Node.js or the Edge runtime
|
||||
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) {
|
||||
await fs.rename(remixConfigPath, renamedRemixConfigPath);
|
||||
|
||||
let patchedConfig: string;
|
||||
// Figure out if the `remix.config` file is using ESM syntax
|
||||
if (isESM(renamedRemixConfigPath)) {
|
||||
patchedConfig = `import config from './${basename(
|
||||
renamedRemixConfigPath
|
||||
)}';
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
export default config;`;
|
||||
} else {
|
||||
patchedConfig = `const config = require('./${basename(
|
||||
renamedRemixConfigPath
|
||||
)}');
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
module.exports = config;`;
|
||||
}
|
||||
await fs.writeFile(remixConfigPath, patchedConfig);
|
||||
remixConfigWrapped = true;
|
||||
}
|
||||
|
||||
// For Hydrogen v2, patch the `server.ts` file to be Vercel-compatible
|
||||
if (isHydrogen2) {
|
||||
if (remixConfig.serverEntryPoint) {
|
||||
serverEntryPointAbs = join(
|
||||
entrypointFsDirname,
|
||||
remixConfig.serverEntryPoint
|
||||
);
|
||||
originalServerEntryPoint = await fs.readFile(
|
||||
serverEntryPointAbs,
|
||||
'utf8'
|
||||
);
|
||||
const patchedServerEntryPoint = patchHydrogenServer(
|
||||
project,
|
||||
serverEntryPointAbs
|
||||
);
|
||||
if (patchedServerEntryPoint) {
|
||||
debug(
|
||||
`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}`
|
||||
);
|
||||
await fs.writeFile(serverEntryPointAbs, patchedServerEntryPoint);
|
||||
}
|
||||
} else {
|
||||
console.log('WARN: No "server" field found in Remix config');
|
||||
}
|
||||
}
|
||||
|
||||
// Make `remix build` output production mode
|
||||
spawnOpts.env.NODE_ENV = 'production';
|
||||
|
||||
// Run "Build Command"
|
||||
if (buildCommand) {
|
||||
debug(`Executing build command "${buildCommand}"`);
|
||||
await execCommand(buildCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
if (hasScript('vercel-build', pkg)) {
|
||||
debug(`Executing "yarn vercel-build"`);
|
||||
await runPackageJsonScript(
|
||||
entrypointFsDirname,
|
||||
'vercel-build',
|
||||
spawnOpts
|
||||
);
|
||||
} else if (hasScript('build', pkg)) {
|
||||
debug(`Executing "yarn build"`);
|
||||
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
|
||||
} else {
|
||||
await execCommand('remix build', {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
const cleanupOps: Promise<void>[] = [];
|
||||
// Clean up our patched `remix.config.js` to be polite
|
||||
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.rename(renamedRemixConfigPath, remixConfigPath)
|
||||
.then(() => debug(`Restored original "${remixConfigPath}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original server entrypoint if it was modified (for Hydrogen v2)
|
||||
if (serverEntryPointAbs && originalServerEntryPoint) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(serverEntryPointAbs, originalServerEntryPoint)
|
||||
.then(() => debug(`Restored original "${serverEntryPointAbs}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original `package.json` file and lockfile
|
||||
if (depsModified) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(packageJsonPath, pkgRaw)
|
||||
.then(() => debug(`Restored original "${packageJsonPath}" file`))
|
||||
);
|
||||
if (lockfilePath && lockfileRaw) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(lockfilePath, lockfileRaw)
|
||||
.then(() => debug(`Restored original "${lockfilePath}" file`))
|
||||
);
|
||||
}
|
||||
}
|
||||
await Promise.all(cleanupOps);
|
||||
}
|
||||
|
||||
// This needs to happen before we run NFT to create the Node/Edge functions
|
||||
await Promise.all([
|
||||
ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/server-runtime'
|
||||
),
|
||||
!isHydrogen2
|
||||
? ensureResolvable(entrypointFsDirname, repoRootPath, '@remix-run/node')
|
||||
: null,
|
||||
]);
|
||||
|
||||
const staticDir = join(entrypointFsDirname, 'public');
|
||||
|
||||
const [staticFiles, buildAssets, ...functions] = await Promise.all([
|
||||
glob('**', staticDir),
|
||||
glob('**', remixConfig.assetsBuildDirectory),
|
||||
...serverBundles.map(bundle => {
|
||||
const firstRoute = remixConfig.routes[bundle.routes[0]];
|
||||
const config = resolvedConfigsMap.get(firstRoute) ?? {
|
||||
runtime: 'nodejs',
|
||||
};
|
||||
|
||||
if (config.runtime === 'edge') {
|
||||
return createRenderEdgeFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.serverBuildPath),
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}
|
||||
|
||||
return createRenderNodeFunction(
|
||||
nodeVersion,
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.serverBuildPath),
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}),
|
||||
]);
|
||||
|
||||
const transformedBuildAssets = rename(buildAssets, name => {
|
||||
return posix.join('./', remixConfig.publicPath, name);
|
||||
});
|
||||
|
||||
const output: BuildResultV2Typical['output'] = {
|
||||
...staticFiles,
|
||||
...transformedBuildAssets,
|
||||
};
|
||||
const routes: any[] = [
|
||||
{
|
||||
src: `^/${remixConfig.publicPath.replace(/^\/|\/$/g, '')}/(.*)$`,
|
||||
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
];
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
// Layout routes don't get a function / route added
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const { path, rePath } = getPathFromRoute(route, remixConfig.routes);
|
||||
|
||||
// If the route is a pathless layout route (at the root level)
|
||||
// and doesn't have any sub-routes, then a function should not be created.
|
||||
if (!path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const funcIndex = serverBundles.findIndex(bundle => {
|
||||
return bundle.routes.includes(route.id);
|
||||
});
|
||||
const func = functions[funcIndex];
|
||||
|
||||
if (!func) {
|
||||
throw new Error(`Could not determine server bundle for "${route.id}"`);
|
||||
}
|
||||
|
||||
output[path] = func;
|
||||
|
||||
// If this is a dynamic route then add a Vercel route
|
||||
const re = getRegExpFromPath(rePath);
|
||||
if (re) {
|
||||
routes.push({
|
||||
src: re.source,
|
||||
dest: path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add a 404 path for not found pages to be server-side rendered by Remix.
|
||||
// Use an edge function bundle if one was generated, otherwise use Node.js.
|
||||
if (!output['404']) {
|
||||
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex(
|
||||
routes => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime;
|
||||
return runtime === 'edge';
|
||||
}
|
||||
);
|
||||
const func =
|
||||
edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0];
|
||||
output['404'] = func;
|
||||
}
|
||||
routes.push({
|
||||
src: '/(.*)',
|
||||
dest: '/404',
|
||||
});
|
||||
|
||||
return { routes, output, framework: { version: remixVersion } };
|
||||
export const build: BuildV2 = opts => {
|
||||
const isLegacy = findConfig(opts.workPath, 'remix.config');
|
||||
return isLegacy ? buildLegacy(opts) : buildVite(opts);
|
||||
};
|
||||
|
||||
function hasScript(scriptName: string, pkg: PackageJson | null) {
|
||||
const scripts = (pkg && pkg.scripts) || {};
|
||||
return typeof scripts[scriptName] === 'string';
|
||||
}
|
||||
|
||||
async function createRenderNodeFunction(
|
||||
nodeVersion: NodeVersion,
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedNodeRouteConfig
|
||||
): Promise<NodejsLambda> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-node.mjs` file into the "build" directory
|
||||
const nodeServerSrc = await nodeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
nodeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
|
||||
const fn = new NodejsLambda({
|
||||
files,
|
||||
handler,
|
||||
runtime: nodeVersion.runtime,
|
||||
shouldAddHelpers: false,
|
||||
shouldAddSourcemapSupport: false,
|
||||
operationType: 'SSR',
|
||||
supportsResponseStreaming: true,
|
||||
regions: config.regions,
|
||||
memory: config.memory,
|
||||
maxDuration: config.maxDuration,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function createRenderEdgeFunction(
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedEdgeRouteConfig
|
||||
): Promise<EdgeFunction> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-edge.mjs` file into the "build" directory
|
||||
const edgeServerSrc = await edgeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
edgeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
let remixRunVercelPkgJson: string | undefined;
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
|
||||
async readFile(fsPath) {
|
||||
let source: Buffer | string;
|
||||
try {
|
||||
source = await fs.readFile(fsPath);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (basename(fsPath) === 'package.json') {
|
||||
// For Edge Functions, patch "main" field to prefer "browser" or "module"
|
||||
const pkgJson = JSON.parse(source.toString());
|
||||
|
||||
// When `@remix-run/vercel` is detected, we need to modify the `package.json`
|
||||
// to include the "browser" field so that the proper Edge entrypoint file
|
||||
// is used. This is a temporary stop gap until this PR is merged:
|
||||
// https://github.com/remix-run/remix/pull/5537
|
||||
if (pkgJson.name === '@remix-run/vercel') {
|
||||
pkgJson.browser = 'dist/edge.js';
|
||||
pkgJson.dependencies['@remix-run/server-runtime'] =
|
||||
pkgJson.dependencies['@remix-run/node'];
|
||||
|
||||
if (!remixRunVercelPkgJson) {
|
||||
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + '\n';
|
||||
|
||||
// Copy in the edge entrypoint so that NFT can properly resolve it
|
||||
const vercelEdgeEntrypointPath = join(
|
||||
DEFAULTS_PATH,
|
||||
'vercel-edge-entrypoint.js'
|
||||
);
|
||||
const vercelEdgeEntrypointDest = join(
|
||||
dirname(fsPath),
|
||||
'dist/edge.js'
|
||||
);
|
||||
await fs.copyFile(
|
||||
vercelEdgeEntrypointPath,
|
||||
vercelEdgeEntrypointDest
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const prop of ['browser', 'module']) {
|
||||
const val = pkgJson[prop];
|
||||
if (typeof val === 'string') {
|
||||
pkgJson.main = val;
|
||||
|
||||
// Return the modified `package.json` to nft
|
||||
source = JSON.stringify(pkgJson);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return source;
|
||||
},
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
if (
|
||||
remixRunVercelPkgJson &&
|
||||
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
|
||||
) {
|
||||
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
|
||||
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
|
||||
} else {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
}
|
||||
|
||||
const fn = new EdgeFunction({
|
||||
files,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: handler,
|
||||
regions: config.regions,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function writeEntrypointFile(
|
||||
path: string,
|
||||
data: string,
|
||||
rootDir: string
|
||||
) {
|
||||
try {
|
||||
await fs.writeFile(path, data);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT') {
|
||||
throw new Error(
|
||||
`The "${relative(
|
||||
rootDir,
|
||||
dirname(path)
|
||||
)}" directory does not exist. Please contact support at https://vercel.com/help.`
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import semver from 'semver';
|
||||
import { existsSync, promises as fs } from 'fs';
|
||||
import { basename, dirname, join, relative, resolve, sep } from 'path';
|
||||
import { pathToRegexp, Key } from 'path-to-regexp';
|
||||
import { debug } from '@vercel/build-utils';
|
||||
import { debug, type PackageJson } from '@vercel/build-utils';
|
||||
import { walkParentDirs } from '@vercel/build-utils';
|
||||
import { createRequire } from 'module';
|
||||
import type {
|
||||
@@ -58,8 +58,12 @@ export function findEntry(dir: string, basename: string): string | undefined {
|
||||
|
||||
const configExts = ['.js', '.cjs', '.mjs'];
|
||||
|
||||
export function findConfig(dir: string, basename: string): string | undefined {
|
||||
for (const ext of configExts) {
|
||||
export function findConfig(
|
||||
dir: string,
|
||||
basename: string,
|
||||
exts = configExts
|
||||
): string | undefined {
|
||||
for (const ext of exts) {
|
||||
const name = basename + ext;
|
||||
const file = join(dir, name);
|
||||
if (existsSync(file)) return file;
|
||||
@@ -355,6 +359,7 @@ async function ensureSymlink(
|
||||
}
|
||||
}
|
||||
|
||||
await fs.mkdir(symlinkDir, { recursive: true });
|
||||
await fs.symlink(relativeTarget, symlinkPath);
|
||||
debug(`Created symlink for "${pkgName}"`);
|
||||
}
|
||||
@@ -369,3 +374,49 @@ export function isESM(path: string): boolean {
|
||||
}
|
||||
return isESM;
|
||||
}
|
||||
|
||||
export function hasScript(scriptName: string, pkg?: PackageJson) {
|
||||
const scripts = pkg?.scripts || {};
|
||||
return typeof scripts[scriptName] === 'string';
|
||||
}
|
||||
|
||||
export async function getRemixVersion(
|
||||
dir: string,
|
||||
base: string
|
||||
): Promise<string> {
|
||||
const resolvedPath = require_.resolve('@remix-run/dev', { paths: [dir] });
|
||||
const pkgPath = await walkParentDirs({
|
||||
base,
|
||||
start: dirname(resolvedPath),
|
||||
filename: 'package.json',
|
||||
});
|
||||
if (!pkgPath) {
|
||||
throw new Error(
|
||||
`Failed to find \`package.json\` file for "@remix-run/dev"`
|
||||
);
|
||||
}
|
||||
const { version } = JSON.parse(
|
||||
await fs.readFile(pkgPath, 'utf8')
|
||||
) as PackageJson;
|
||||
if (typeof version !== 'string') {
|
||||
throw new Error(`Missing "version" field`);
|
||||
}
|
||||
return version;
|
||||
}
|
||||
|
||||
export function logNftWarnings(warnings: Set<Error>, required?: string) {
|
||||
for (const warning of warnings) {
|
||||
const m = warning.message.match(/^Failed to resolve dependency "(.+)"/);
|
||||
if (m) {
|
||||
if (m[1] === required) {
|
||||
throw new Error(
|
||||
`Missing required "${required}" package. Please add it to your \`package.json\` file.`
|
||||
);
|
||||
} else {
|
||||
console.warn(`WARN: ${m[0]}`);
|
||||
}
|
||||
} else {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user