mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
51 Commits
@vercel-in
...
@vercel-in
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
fab5fca939 | ||
|
|
2ad44999dd | ||
|
|
eee57262f0 | ||
|
|
fd762d8800 | ||
|
|
8ea93839cc | ||
|
|
b9f3438c2d | ||
|
|
bf587f77b7 | ||
|
|
58ef91bfe8 | ||
|
|
4111fbaa89 | ||
|
|
c3251e3775 | ||
|
|
33202dec1f | ||
|
|
a53d1b0d38 | ||
|
|
5e3656ec1b | ||
|
|
908e7837d5 | ||
|
|
dfa2f07c45 | ||
|
|
33181274bb | ||
|
|
96117d3f17 | ||
|
|
20237d4f7b | ||
|
|
293770a2f6 | ||
|
|
c9c0a203cc | ||
|
|
5064dd404d | ||
|
|
1a45731c92 | ||
|
|
168f9578cf | ||
|
|
c2728ef9c0 | ||
|
|
c5fe7c2bea | ||
|
|
b1d8b83abb | ||
|
|
24ec5c5aca | ||
|
|
37b193c845 | ||
|
|
f8fab639bf | ||
|
|
6ed0fe6fb1 | ||
|
|
de2738ba06 | ||
|
|
1333071a3a | ||
|
|
c2d99855ea | ||
|
|
0d112c848a | ||
|
|
d17abf463a | ||
|
|
440ef3ba98 | ||
|
|
38c5e93625 | ||
|
|
4d51d777fe | ||
|
|
1fee87e76f | ||
|
|
ea0e9aeaec | ||
|
|
7910f2f307 | ||
|
|
670441620f | ||
|
|
bfc01fd98f | ||
|
|
6d74b9b61a | ||
|
|
9483d49f72 | ||
|
|
6740f9b155 | ||
|
|
b663f813e1 | ||
|
|
e318a0eea5 | ||
|
|
644721a90d | ||
|
|
e109e3325a | ||
|
|
92b2fbe372 |
@@ -10,6 +10,7 @@ packages/*/test/fixtures
|
||||
packages/cli/@types
|
||||
packages/cli/download
|
||||
packages/cli/dist
|
||||
packages/cli/test/fixtures
|
||||
packages/cli/test/dev/fixtures
|
||||
packages/cli/bin
|
||||
packages/cli/link
|
||||
@@ -38,7 +39,7 @@ packages/static-build/test/cache-fixtures
|
||||
packages/redwood/test/fixtures
|
||||
|
||||
# remix
|
||||
packages/remix/test/fixtures
|
||||
packages/remix/test/fixtures-*
|
||||
|
||||
# gatsby-plugin-vercel-analytics
|
||||
packages/gatsby-plugin-vercel-analytics
|
||||
|
||||
22
.github/CODEOWNERS
vendored
22
.github/CODEOWNERS
vendored
@@ -2,17 +2,17 @@
|
||||
# https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
# Restricted Paths
|
||||
* @TooTallNate @EndangeredMassa @trek
|
||||
/.github/workflows @TooTallNate @EndangeredMassa @trek @ijjk
|
||||
/packages/fs-detectors @TooTallNate @EndangeredMassa @trek @agadzik @chloetedder
|
||||
/packages/next @TooTallNate @EndangeredMassa @Ethan-Arrowood @trek @ijjk @ztanner
|
||||
/packages/routing-utils @TooTallNate @EndangeredMassa @trek @ijjk
|
||||
/packages/static-build @TooTallNate @EndangeredMassa @trek
|
||||
/packages/edge @vercel/compute @TooTallNate @EndangeredMassa @trek
|
||||
/examples @leerob
|
||||
/examples/create-react-app @Timer
|
||||
/examples/nextjs @timneutkens @ijjk @ztanner @huozhi
|
||||
/packages/node @TooTallNate @EndangeredMassa @trek @Kikobeats
|
||||
* @TooTallNate @EndangeredMassa @trek @onsclom
|
||||
/.github/workflows @TooTallNate @EndangeredMassa @trek @onsclom @ijjk
|
||||
/packages/fs-detectors @TooTallNate @EndangeredMassa @trek @onsclom @agadzik @chloetedder
|
||||
/packages/next @TooTallNate @EndangeredMassa @trek @onsclom @timneutkens @ijjk @ztanner @huozhi @Ethan-Arrowood @styfle
|
||||
/packages/routing-utils @TooTallNate @EndangeredMassa @trek @onsclom @ijjk
|
||||
/packages/static-build @TooTallNate @EndangeredMassa @trek @onsclom
|
||||
/packages/edge @TooTallNate @EndangeredMassa @trek @onsclom @vercel/compute
|
||||
/examples @TooTallNate @EndangeredMassa @trek @onsclom @leerob
|
||||
/examples/create-react-app @TooTallNate @EndangeredMassa @trek @onsclom @Timer
|
||||
/examples/nextjs @TooTallNate @EndangeredMassa @trek @onsclom @timneutkens @ijjk @ztanner @huozhi @Ethan-Arrowood @styfle
|
||||
/packages/node @TooTallNate @EndangeredMassa @trek @onsclom @Kikobeats
|
||||
|
||||
# Unrestricted Paths
|
||||
.changeset/
|
||||
|
||||
79
.github/dependabot.yml
vendored
Normal file
79
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
version: 2
|
||||
|
||||
updates:
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/angular-v17
|
||||
allow:
|
||||
- dependency-name: '@angular*'
|
||||
ignore:
|
||||
- dependency-name: '@angular*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- '@angular*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/astro-v4
|
||||
allow:
|
||||
- dependency-name: 'astro*'
|
||||
ignore:
|
||||
- dependency-name: 'astro*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- 'astro*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
|
||||
- schedule:
|
||||
interval: 'daily'
|
||||
open-pull-requests-limit: 1
|
||||
reviewers:
|
||||
- 'trek'
|
||||
- 'TooTallNate'
|
||||
- 'EndangeredMassa'
|
||||
commit-message:
|
||||
prefix: '[framework-fixtures]'
|
||||
package-ecosystem: 'npm'
|
||||
directory: /packages/static-build/test/fixtures/hydrogen-v2023
|
||||
allow:
|
||||
- dependency-name: '@remix-run*'
|
||||
- dependency-name: '@shopify*'
|
||||
ignore:
|
||||
- dependency-name: '@remix-run*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
- dependency-name: '@shopify*'
|
||||
update-types:
|
||||
['version-update:semver-major', 'version-update:semver-patch']
|
||||
groups:
|
||||
core:
|
||||
patterns:
|
||||
- '@remix-run*'
|
||||
- '@shopify*'
|
||||
update-types:
|
||||
- 'minor'
|
||||
@@ -29,6 +29,7 @@ turbo-cache-key.json
|
||||
packages/*/dist
|
||||
packages/*/node_modules
|
||||
packages/**/test/fixtures
|
||||
packages/**/test/fixtures-*
|
||||
packages/**/test/dev/fixtures
|
||||
packages/**/test/build-fixtures
|
||||
packages/**/test/cache-fixtures
|
||||
|
||||
@@ -13,7 +13,7 @@
|
||||
<a href="https://vercel.com/docs"><strong>Documentation</strong></a> ·
|
||||
<a href="https://vercel.com/changelog"><strong>Changelog</strong></a> ·
|
||||
<a href="https://vercel.com/templates"><strong>Templates</strong></a> ·
|
||||
<a href="https://vercel.com/cli"><strong>CLI</strong></a>
|
||||
<a href="https://vercel.com/docs/cli"><strong>CLI</strong></a>
|
||||
</p>
|
||||
<br/>
|
||||
|
||||
@@ -23,7 +23,7 @@ Vercel’s Frontend Cloud provides the developer experience and infrastructure t
|
||||
|
||||
## Deploy
|
||||
|
||||
Get started by [importing a project](https://vercel.com/new) or using the [Vercel CLI](https://vercel.com/cli). Then, `git push` to deploy.
|
||||
Get started by [importing a project](https://vercel.com/new) or using the [Vercel CLI](https://vercel.com/docs/cli). Then, `git push` to deploy.
|
||||
|
||||
## Documentation
|
||||
|
||||
|
||||
2
examples/README.md
vendored
2
examples/README.md
vendored
@@ -1,6 +1,6 @@
|
||||
# Vercel Examples
|
||||
|
||||
To get started using any of these examples as your own project, [install Vercel](https://vercel.com/cli) and use either of the following commands in your terminal:
|
||||
To get started using any of these examples as your own project, [install Vercel](https://vercel.com/docs/cli) and use either of the following commands in your terminal:
|
||||
|
||||
```sh
|
||||
vercel init # Pick an example in the CLI
|
||||
|
||||
@@ -8,7 +8,7 @@ This directory is a brief example of a [Hydrogen v2](https://shopify.dev/custom-
|
||||
|
||||
_Live Example: https://hydrogen-v2-template.vercel.app_
|
||||
|
||||
You can also deploy using the [Vercel CLI](https://vercel.com/cli):
|
||||
You can also deploy using the [Vercel CLI](https://vercel.com/docs/cli):
|
||||
|
||||
```sh
|
||||
npm i -g vercel
|
||||
|
||||
@@ -27,3 +27,6 @@ end
|
||||
|
||||
# Performance-booster for watching directories on Windows
|
||||
gem "wdm", "~> 0.1.1", :platforms => [:mingw, :x64_mingw, :mswin]
|
||||
|
||||
# Webrick not installed by default in Ruby 3.0+
|
||||
gem "webrick"
|
||||
|
||||
@@ -1,20 +1,20 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
addressable (2.7.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
addressable (2.8.6)
|
||||
public_suffix (>= 2.0.2, < 6.0)
|
||||
colorator (1.1.0)
|
||||
concurrent-ruby (1.1.8)
|
||||
em-websocket (0.5.2)
|
||||
concurrent-ruby (1.2.3)
|
||||
em-websocket (0.5.3)
|
||||
eventmachine (>= 0.12.9)
|
||||
http_parser.rb (~> 0.6.0)
|
||||
http_parser.rb (~> 0)
|
||||
eventmachine (1.2.7)
|
||||
ffi (1.14.2)
|
||||
ffi (1.16.3)
|
||||
forwardable-extended (2.6.0)
|
||||
http_parser.rb (0.6.0)
|
||||
i18n (1.8.9)
|
||||
http_parser.rb (0.8.0)
|
||||
i18n (1.14.1)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jekyll (4.2.0)
|
||||
jekyll (4.2.2)
|
||||
addressable (~> 2.4)
|
||||
colorator (~> 1.0)
|
||||
em-websocket (~> 0.5)
|
||||
@@ -29,20 +29,20 @@ GEM
|
||||
rouge (~> 3.0)
|
||||
safe_yaml (~> 1.0)
|
||||
terminal-table (~> 2.0)
|
||||
jekyll-feed (0.15.1)
|
||||
jekyll-feed (0.17.0)
|
||||
jekyll (>= 3.7, < 5.0)
|
||||
jekyll-sass-converter (2.1.0)
|
||||
jekyll-sass-converter (2.2.0)
|
||||
sassc (> 2.0.1, < 3.0)
|
||||
jekyll-seo-tag (2.7.1)
|
||||
jekyll-seo-tag (2.8.0)
|
||||
jekyll (>= 3.8, < 5.0)
|
||||
jekyll-watch (2.2.1)
|
||||
listen (~> 3.0)
|
||||
kramdown (2.3.0)
|
||||
kramdown (2.4.0)
|
||||
rexml
|
||||
kramdown-parser-gfm (1.1.0)
|
||||
kramdown (~> 2.0)
|
||||
liquid (4.0.3)
|
||||
listen (3.4.1)
|
||||
liquid (4.0.4)
|
||||
listen (3.9.0)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
mercenary (0.4.0)
|
||||
@@ -52,21 +52,22 @@ GEM
|
||||
jekyll-seo-tag (~> 2.1)
|
||||
pathutil (0.16.2)
|
||||
forwardable-extended (~> 2.6)
|
||||
public_suffix (4.0.6)
|
||||
rb-fsevent (0.10.4)
|
||||
public_suffix (5.0.4)
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.10.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.2.4)
|
||||
rouge (3.26.0)
|
||||
rexml (3.2.6)
|
||||
rouge (3.30.0)
|
||||
safe_yaml (1.0.5)
|
||||
sassc (2.4.0)
|
||||
ffi (~> 1.9)
|
||||
terminal-table (2.0.0)
|
||||
unicode-display_width (~> 1.1, >= 1.1.1)
|
||||
unicode-display_width (1.7.0)
|
||||
unicode-display_width (1.8.0)
|
||||
webrick (1.8.1)
|
||||
|
||||
PLATFORMS
|
||||
x86_64-linux
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
jekyll (~> 4.2.0)
|
||||
@@ -75,6 +76,7 @@ DEPENDENCIES
|
||||
tzinfo (~> 1.2)
|
||||
tzinfo-data
|
||||
wdm (~> 0.1.1)
|
||||
webrick
|
||||
|
||||
BUNDLED WITH
|
||||
2.2.4
|
||||
2.5.6
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
source 'https://rubygems.org'
|
||||
|
||||
gem 'middleman', '~> 4.2'
|
||||
gem 'middleman-autoprefixer', '~> 2.7'
|
||||
gem 'middleman', '~> 4.5'
|
||||
gem 'middleman-autoprefixer', '~> 3.0'
|
||||
gem 'tzinfo-data', platforms: [:mswin, :mingw, :jruby, :x64_mingw]
|
||||
gem 'wdm', '~> 0.1', platforms: [:mswin, :mingw, :x64_mingw]
|
||||
|
||||
@@ -1,59 +1,60 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
activesupport (5.2.4.5)
|
||||
activesupport (7.0.8.1)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
minitest (~> 5.1)
|
||||
tzinfo (~> 1.1)
|
||||
addressable (2.7.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
autoprefixer-rails (9.8.6.5)
|
||||
execjs
|
||||
backports (3.20.2)
|
||||
i18n (>= 1.6, < 2)
|
||||
minitest (>= 5.1)
|
||||
tzinfo (~> 2.0)
|
||||
addressable (2.8.6)
|
||||
public_suffix (>= 2.0.2, < 6.0)
|
||||
autoprefixer-rails (10.4.16.0)
|
||||
execjs (~> 2)
|
||||
backports (3.24.1)
|
||||
coffee-script (2.4.1)
|
||||
coffee-script-source
|
||||
execjs
|
||||
coffee-script-source (1.12.2)
|
||||
concurrent-ruby (1.1.8)
|
||||
contracts (0.13.0)
|
||||
dotenv (2.7.6)
|
||||
concurrent-ruby (1.2.3)
|
||||
contracts (0.16.1)
|
||||
dotenv (3.1.0)
|
||||
erubis (2.7.0)
|
||||
execjs (2.7.0)
|
||||
fast_blank (1.0.0)
|
||||
fastimage (2.2.2)
|
||||
ffi (1.14.2)
|
||||
haml (5.2.1)
|
||||
temple (>= 0.8.0)
|
||||
execjs (2.9.1)
|
||||
fast_blank (1.0.1)
|
||||
fastimage (2.3.0)
|
||||
ffi (1.16.3)
|
||||
haml (6.3.0)
|
||||
temple (>= 0.8.2)
|
||||
thor
|
||||
tilt
|
||||
hamster (3.0.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
hashie (3.6.0)
|
||||
i18n (0.9.5)
|
||||
i18n (1.6.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
kramdown (2.3.0)
|
||||
kramdown (2.4.0)
|
||||
rexml
|
||||
listen (3.0.8)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
listen (3.9.0)
|
||||
rb-fsevent (~> 0.10, >= 0.10.3)
|
||||
rb-inotify (~> 0.9, >= 0.9.10)
|
||||
memoist (0.16.2)
|
||||
middleman (4.3.11)
|
||||
middleman (4.5.1)
|
||||
coffee-script (~> 2.2)
|
||||
haml (>= 4.0.5)
|
||||
kramdown (>= 2.3.0)
|
||||
middleman-cli (= 4.3.11)
|
||||
middleman-core (= 4.3.11)
|
||||
middleman-autoprefixer (2.10.1)
|
||||
autoprefixer-rails (~> 9.1)
|
||||
middleman-core (>= 3.3.3)
|
||||
middleman-cli (4.3.11)
|
||||
thor (>= 0.17.0, < 2.0)
|
||||
middleman-core (4.3.11)
|
||||
activesupport (>= 4.2, < 6.0)
|
||||
addressable (~> 2.3)
|
||||
middleman-cli (= 4.5.1)
|
||||
middleman-core (= 4.5.1)
|
||||
middleman-autoprefixer (3.0.0)
|
||||
autoprefixer-rails (~> 10.0)
|
||||
middleman-core (>= 4.0.0)
|
||||
middleman-cli (4.5.1)
|
||||
thor (>= 0.17.0, < 1.3.0)
|
||||
middleman-core (4.5.1)
|
||||
activesupport (>= 6.1, < 7.1)
|
||||
addressable (~> 2.4)
|
||||
backports (~> 3.6)
|
||||
bundler
|
||||
contracts (~> 0.13.0)
|
||||
bundler (~> 2.0)
|
||||
contracts (~> 0.13, < 0.17)
|
||||
dotenv
|
||||
erubis
|
||||
execjs (~> 2.0)
|
||||
@@ -61,48 +62,52 @@ GEM
|
||||
fastimage (~> 2.0)
|
||||
hamster (~> 3.0)
|
||||
hashie (~> 3.4)
|
||||
i18n (~> 0.9.0)
|
||||
listen (~> 3.0.0)
|
||||
i18n (~> 1.6.0)
|
||||
listen (~> 3.0)
|
||||
memoist (~> 0.14)
|
||||
padrino-helpers (~> 0.13.0)
|
||||
padrino-helpers (~> 0.15.0)
|
||||
parallel
|
||||
rack (>= 1.4.5, < 3)
|
||||
sassc (~> 2.0)
|
||||
servolux
|
||||
tilt (~> 2.0.9)
|
||||
toml
|
||||
uglifier (~> 3.0)
|
||||
minitest (5.14.3)
|
||||
padrino-helpers (0.13.3.4)
|
||||
i18n (~> 0.6, >= 0.6.7)
|
||||
padrino-support (= 0.13.3.4)
|
||||
webrick
|
||||
minitest (5.22.2)
|
||||
padrino-helpers (0.15.3)
|
||||
i18n (>= 0.6.7, < 2)
|
||||
padrino-support (= 0.15.3)
|
||||
tilt (>= 1.4.1, < 3)
|
||||
padrino-support (0.13.3.4)
|
||||
activesupport (>= 3.1)
|
||||
parallel (1.20.1)
|
||||
public_suffix (4.0.6)
|
||||
rack (2.2.3)
|
||||
rb-fsevent (0.10.4)
|
||||
padrino-support (0.15.3)
|
||||
parallel (1.24.0)
|
||||
parslet (2.0.0)
|
||||
public_suffix (5.0.4)
|
||||
rack (2.2.8.1)
|
||||
rb-fsevent (0.11.2)
|
||||
rb-inotify (0.10.1)
|
||||
ffi (~> 1.0)
|
||||
rexml (3.2.4)
|
||||
rexml (3.2.6)
|
||||
sassc (2.4.0)
|
||||
ffi (~> 1.9)
|
||||
servolux (0.13.0)
|
||||
temple (0.8.2)
|
||||
thor (1.1.0)
|
||||
thread_safe (0.3.6)
|
||||
tilt (2.0.10)
|
||||
tzinfo (1.2.9)
|
||||
thread_safe (~> 0.1)
|
||||
temple (0.10.3)
|
||||
thor (1.2.2)
|
||||
tilt (2.0.11)
|
||||
toml (0.3.0)
|
||||
parslet (>= 1.8.0, < 3.0.0)
|
||||
tzinfo (2.0.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
uglifier (3.2.0)
|
||||
execjs (>= 0.3.0, < 3)
|
||||
webrick (1.8.1)
|
||||
|
||||
PLATFORMS
|
||||
x86_64-linux
|
||||
|
||||
DEPENDENCIES
|
||||
middleman (~> 4.2)
|
||||
middleman-autoprefixer (~> 2.7)
|
||||
middleman (~> 4.5)
|
||||
middleman-autoprefixer (~> 3.0)
|
||||
tzinfo-data
|
||||
wdm (~> 0.1)
|
||||
|
||||
|
||||
913
examples/nextjs/package-lock.json
generated
913
examples/nextjs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,7 @@
|
||||
"dependencies": {
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"next": "14.1.0"
|
||||
"next": "14.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5",
|
||||
@@ -22,6 +22,6 @@
|
||||
"postcss": "^8",
|
||||
"tailwindcss": "^3.3.0",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "14.1.0"
|
||||
"eslint-config-next": "14.1.3"
|
||||
}
|
||||
}
|
||||
|
||||
2
examples/package.json
vendored
2
examples/package.json
vendored
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.4.1",
|
||||
"@vercel/frameworks": "2.0.6"
|
||||
"@vercel/frameworks": "3.0.0"
|
||||
},
|
||||
"version": null
|
||||
}
|
||||
|
||||
2
examples/remix/.gitignore
vendored
2
examples/remix/.gitignore
vendored
@@ -1,7 +1,5 @@
|
||||
node_modules
|
||||
|
||||
/.cache
|
||||
/build
|
||||
/public/build
|
||||
.env
|
||||
.vercel
|
||||
|
||||
@@ -14,7 +14,7 @@ npx create-remix@latest --template vercel/vercel/examples/remix
|
||||
|
||||
_Live Example: https://remix-run-template.vercel.app_
|
||||
|
||||
You can also deploy using the [Vercel CLI](https://vercel.com/cli):
|
||||
You can also deploy using the [Vercel CLI](https://vercel.com/docs/cli):
|
||||
|
||||
```sh
|
||||
npm i -g vercel
|
||||
|
||||
@@ -1,20 +1,13 @@
|
||||
import { cssBundleHref } from "@remix-run/css-bundle";
|
||||
import {
|
||||
Links,
|
||||
LiveReload,
|
||||
Meta,
|
||||
Outlet,
|
||||
Scripts,
|
||||
ScrollRestoration,
|
||||
} from "@remix-run/react";
|
||||
import { Analytics } from "@vercel/analytics/react";
|
||||
import type { LinksFunction } from "@vercel/remix";
|
||||
|
||||
export const links: LinksFunction = () => [
|
||||
...(cssBundleHref ? [{ rel: "stylesheet", href: cssBundleHref }] : []),
|
||||
];
|
||||
|
||||
export default function App() {
|
||||
export function Layout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
@@ -24,12 +17,15 @@ export default function App() {
|
||||
<Links />
|
||||
</head>
|
||||
<body>
|
||||
<Outlet />
|
||||
{children}
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
<LiveReload />
|
||||
<Analytics />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
|
||||
export default function App() {
|
||||
return <Outlet />;
|
||||
}
|
||||
|
||||
@@ -4,29 +4,29 @@
|
||||
"sideEffects": false,
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"build": "remix build",
|
||||
"dev": "remix dev --manual",
|
||||
"start": "remix-serve ./build/index.js",
|
||||
"build": "remix vite:build",
|
||||
"dev": "remix vite:dev",
|
||||
"typecheck": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@remix-run/css-bundle": "^2.0.0",
|
||||
"@remix-run/node": "^2.0.0",
|
||||
"@remix-run/react": "^2.0.0",
|
||||
"@remix-run/serve": "^2.0.0",
|
||||
"@vercel/analytics": "^1.0.2",
|
||||
"@vercel/remix": "^2.0.0",
|
||||
"isbot": "^3.6.8",
|
||||
"@remix-run/node": "^2.8.0",
|
||||
"@remix-run/react": "^2.8.0",
|
||||
"@remix-run/server-runtime": "^2.8.0",
|
||||
"@vercel/analytics": "^1.2.2",
|
||||
"@vercel/remix": "^2.8.0",
|
||||
"isbot": "^4",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "^2.0.0",
|
||||
"@remix-run/eslint-config": "^2.0.0",
|
||||
"@remix-run/dev": "^2.8.0",
|
||||
"@remix-run/eslint-config": "^2.8.0",
|
||||
"@types/react": "^18.2.20",
|
||||
"@types/react-dom": "^18.2.7",
|
||||
"eslint": "^8.38.0",
|
||||
"typescript": "^5.1.6"
|
||||
"typescript": "^5.1.6",
|
||||
"vite": "^5.1.0",
|
||||
"vite-tsconfig-paths": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
|
||||
@@ -1,22 +1,25 @@
|
||||
{
|
||||
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"include": ["**/*.ts", "**/*.tsx"],
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2022"],
|
||||
"types": ["@vercel/remix", "node", "vite/client"],
|
||||
"isolatedModules": true,
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "Bundler",
|
||||
"resolveJsonModule": true,
|
||||
"target": "ES2022",
|
||||
"strict": true,
|
||||
"allowJs": true,
|
||||
"skipLibCheck": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
|
||||
// Remix takes care of building everything in `remix build`.
|
||||
// Vite takes care of building everything, not tsc.
|
||||
"noEmit": true
|
||||
}
|
||||
}
|
||||
|
||||
11
examples/remix/vite.config.ts
Normal file
11
examples/remix/vite.config.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
import { vitePlugin as remix } from "@remix-run/dev";
|
||||
import { installGlobals } from "@remix-run/node";
|
||||
import { defineConfig } from "vite";
|
||||
import { vercelPreset } from '@vercel/remix/vite';
|
||||
import tsconfigPaths from "vite-tsconfig-paths";
|
||||
|
||||
installGlobals();
|
||||
|
||||
export default defineConfig({
|
||||
plugins: [remix({ presets: [vercelPreset()] }), tsconfigPaths()],
|
||||
});
|
||||
@@ -18,7 +18,7 @@ Install dependencies:
|
||||
npx @sanity/cli install
|
||||
```
|
||||
|
||||
Pull down environment variables from your Vercel project (requires the [Vercel CLI](https://vercel.com/cli)):
|
||||
Pull down environment variables from your Vercel project (requires the [Vercel CLI](https://vercel.com/docs/cli)):
|
||||
|
||||
```sh
|
||||
vercel env pull
|
||||
|
||||
@@ -1,5 +1,26 @@
|
||||
# @vercel-internals/types
|
||||
|
||||
## 1.0.26
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`8ea93839c`](https://github.com/vercel/vercel/commit/8ea93839ccc70816f3ece9d7cfdb857aa7a4b015)]:
|
||||
- @vercel/build-utils@7.9.0
|
||||
|
||||
## 1.0.25
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`908e7837d`](https://github.com/vercel/vercel/commit/908e7837d55bc02e708f402c700e00208415e954), [`5e3656ec1`](https://github.com/vercel/vercel/commit/5e3656ec1b3f0561091636582715ba09ddd8cb2d)]:
|
||||
- @vercel/build-utils@7.8.0
|
||||
|
||||
## 1.0.24
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 1.0.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "@vercel-internals/types",
|
||||
"version": "1.0.23",
|
||||
"version": "1.0.26",
|
||||
"types": "index.d.ts",
|
||||
"main": "index.d.ts",
|
||||
"files": [
|
||||
@@ -10,7 +10,7 @@
|
||||
"dependencies": {
|
||||
"@types/node": "14.14.31",
|
||||
"@vercel-internals/constants": "1.0.4",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"@vercel/routing-utils": "3.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -33,7 +33,7 @@
|
||||
"source-map-support": "0.5.12",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "29.1.0",
|
||||
"turbo": "1.12.2",
|
||||
"turbo": "1.12.5",
|
||||
"typescript": "4.9.5"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,5 +1,27 @@
|
||||
# @vercel/build-utils
|
||||
|
||||
## 7.9.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Add `base` parameter to `scanParentDirs()` ([#11261](https://github.com/vercel/vercel/pull/11261))
|
||||
|
||||
## 7.8.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Remove `VERCEL_ENABLE_NPM_DEFAULT` env var check ([#11242](https://github.com/vercel/vercel/pull/11242))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Rename variants to flags and remove legacy flags ([#11121](https://github.com/vercel/vercel/pull/11121))
|
||||
|
||||
## 7.7.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
## 7.7.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "7.7.0",
|
||||
"version": "7.9.0",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -276,12 +276,13 @@ export async function getNodeVersion(
|
||||
|
||||
export async function scanParentDirs(
|
||||
destPath: string,
|
||||
readPackageJson = false
|
||||
readPackageJson = false,
|
||||
base = '/'
|
||||
): Promise<ScanParentDirsResult> {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
const pkgJsonPath = await walkParentDirs({
|
||||
base: '/',
|
||||
base,
|
||||
start: destPath,
|
||||
filename: 'package.json',
|
||||
});
|
||||
@@ -291,7 +292,7 @@ export async function scanParentDirs(
|
||||
: undefined;
|
||||
const [yarnLockPath, npmLockPath, pnpmLockPath, bunLockPath] =
|
||||
await walkParentDirsMulti({
|
||||
base: '/',
|
||||
base,
|
||||
start: destPath,
|
||||
filenames: [
|
||||
'yarn.lock',
|
||||
@@ -339,11 +340,7 @@ export async function scanParentDirs(
|
||||
// TODO: read "bun-lockfile-format-v0"
|
||||
lockfileVersion = 0;
|
||||
} else {
|
||||
if (process.env.VERCEL_ENABLE_NPM_DEFAULT === '1') {
|
||||
cliType = 'npm';
|
||||
} else {
|
||||
cliType = 'yarn';
|
||||
}
|
||||
cliType = 'npm';
|
||||
}
|
||||
|
||||
const packageJsonPath = pkgJsonPath || undefined;
|
||||
|
||||
@@ -14,7 +14,7 @@ export const functionsSchema = {
|
||||
},
|
||||
memory: {
|
||||
minimum: 128,
|
||||
maximum: 3008,
|
||||
maximum: 3009,
|
||||
},
|
||||
maxDuration: {
|
||||
type: 'number',
|
||||
|
||||
@@ -440,15 +440,6 @@ export interface Cron {
|
||||
schedule: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated Replaced by Variants. Remove once fully replaced.
|
||||
*/
|
||||
export interface Flag {
|
||||
key: string;
|
||||
defaultValue?: unknown;
|
||||
metadata: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/** The framework which created the function */
|
||||
export interface FunctionFramework {
|
||||
slug: string;
|
||||
@@ -473,9 +464,7 @@ export interface BuildResultV2Typical {
|
||||
framework?: {
|
||||
version: string;
|
||||
};
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags?: Flag[];
|
||||
variants?: Record<string, VariantDefinition>;
|
||||
flags?: { definitions: FlagDefinitions };
|
||||
}
|
||||
|
||||
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
|
||||
@@ -500,23 +489,25 @@ export type StartDevServer = (
|
||||
* TODO: The following types will eventually be exported by a more
|
||||
* relevant package.
|
||||
*/
|
||||
type VariantJSONArray = ReadonlyArray<VariantJSONValue>;
|
||||
type FlagJSONArray = ReadonlyArray<FlagJSONValue>;
|
||||
|
||||
type VariantJSONValue =
|
||||
type FlagJSONValue =
|
||||
| string
|
||||
| boolean
|
||||
| number
|
||||
| null
|
||||
| VariantJSONArray
|
||||
| { [key: string]: VariantJSONValue };
|
||||
| FlagJSONArray
|
||||
| { [key: string]: FlagJSONValue };
|
||||
|
||||
type VariantOption = {
|
||||
value: VariantJSONValue;
|
||||
type FlagOption = {
|
||||
value: FlagJSONValue;
|
||||
label?: string;
|
||||
};
|
||||
|
||||
export interface VariantDefinition {
|
||||
options?: VariantOption[];
|
||||
export interface FlagDefinition {
|
||||
options?: FlagOption[];
|
||||
origin?: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
export type FlagDefinitions = Record<string, FlagDefinition>;
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"build": {
|
||||
"env": {
|
||||
"VERCEL_ENABLE_NPM_DEFAULT": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,8 +23,6 @@ const skipFixtures: string[] = [
|
||||
'23-pnpm-workspaces',
|
||||
'41-nx-monorepo',
|
||||
'42-npm-workspace-with-nx',
|
||||
'jekyll-v4',
|
||||
'middleman-v4',
|
||||
];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
|
||||
26
packages/build-utils/test/unit.test.ts
vendored
26
packages/build-utils/test/unit.test.ts
vendored
@@ -576,7 +576,7 @@ it(
|
||||
ms('1m')
|
||||
);
|
||||
|
||||
it('should return cliType npm when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is set', async () => {
|
||||
it('should return cliType "npm" when no lockfile is present', async () => {
|
||||
const originalRepoLockfilePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
@@ -587,36 +587,12 @@ it('should return cliType npm when no lockfile is present and VERCEL_ENABLE_NPM_
|
||||
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
|
||||
await fs.remove(originalRepoLockfilePath);
|
||||
try {
|
||||
process.env.VERCEL_ENABLE_NPM_DEFAULT = '1';
|
||||
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
|
||||
const result = await scanParentDirs(fixture);
|
||||
expect(result.cliType).toEqual('npm');
|
||||
expect(result.lockfileVersion).toEqual(undefined);
|
||||
expect(result.lockfilePath).toEqual(undefined);
|
||||
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
|
||||
} finally {
|
||||
delete process.env.VERCEL_ENABLE_NPM_DEFAULT;
|
||||
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return cliType yarn when no lockfile is present and VERCEL_ENABLE_NPM_DEFAULT is not set', async () => {
|
||||
const originalRepoLockfilePath = path.join(
|
||||
__dirname,
|
||||
'..',
|
||||
'..',
|
||||
'..',
|
||||
'pnpm-lock.yaml'
|
||||
);
|
||||
const originalRepoLockfileData = await fs.readFile(originalRepoLockfilePath);
|
||||
await fs.remove(originalRepoLockfilePath);
|
||||
try {
|
||||
const fixture = path.join(__dirname, 'fixtures', '40-no-lockfile');
|
||||
const result = await scanParentDirs(fixture);
|
||||
expect(result.cliType).toEqual('yarn');
|
||||
expect(result.lockfileVersion).toEqual(undefined);
|
||||
expect(result.lockfilePath).toEqual(undefined);
|
||||
expect(result.packageJsonPath).toEqual(path.join(fixture, 'package.json'));
|
||||
} finally {
|
||||
await fs.writeFile(originalRepoLockfilePath, originalRepoLockfileData);
|
||||
}
|
||||
|
||||
@@ -1,5 +1,69 @@
|
||||
# vercel
|
||||
|
||||
## 33.6.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Set `projectSettings.nodeVersion` in `vc deploy` based on "engines.node" field ([#11261](https://github.com/vercel/vercel/pull/11261))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Stops warning about legacy Speed Insights for Next.js apps ([#11268](https://github.com/vercel/vercel/pull/11268))
|
||||
|
||||
- Fix framework version detection in monorepos ([#11212](https://github.com/vercel/vercel/pull/11212))
|
||||
|
||||
- Updated dependencies [[`8ea93839c`](https://github.com/vercel/vercel/commit/8ea93839ccc70816f3ece9d7cfdb857aa7a4b015), [`58ef91bfe`](https://github.com/vercel/vercel/commit/58ef91bfe8c2e7176e8783cc4eb91ee8580c70dc)]:
|
||||
- @vercel/build-utils@7.9.0
|
||||
- @vercel/remix-builder@2.1.3
|
||||
- @vercel/node@3.0.23
|
||||
- @vercel/static-build@2.4.3
|
||||
|
||||
## 33.5.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Rename variants to flags and remove legacy flags ([#11121](https://github.com/vercel/vercel/pull/11121))
|
||||
|
||||
- fix vc with root dir issues ([#11243](https://github.com/vercel/vercel/pull/11243))
|
||||
|
||||
- Updated dependencies [[`908e7837d`](https://github.com/vercel/vercel/commit/908e7837d55bc02e708f402c700e00208415e954), [`5e3656ec1`](https://github.com/vercel/vercel/commit/5e3656ec1b3f0561091636582715ba09ddd8cb2d), [`a53d1b0d3`](https://github.com/vercel/vercel/commit/a53d1b0d38efa9637f8b8f81a70898add87530e3)]:
|
||||
- @vercel/build-utils@7.8.0
|
||||
- @vercel/next@4.1.5
|
||||
- @vercel/remix-builder@2.1.2
|
||||
- @vercel/node@3.0.22
|
||||
- @vercel/static-build@2.4.2
|
||||
|
||||
## 33.5.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
- Updated dependencies [[`b1d8b83ab`](https://github.com/vercel/vercel/commit/b1d8b83abbf23a3485aedb490992d0a3bf44573f), [`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f), [`20237d4f7`](https://github.com/vercel/vercel/commit/20237d4f7b55b0697b57db15636c11204cb0dc39), [`f8fab639b`](https://github.com/vercel/vercel/commit/f8fab639bf49a60389b8d0b7b265a737c17b4ae1), [`6ed0fe6fb`](https://github.com/vercel/vercel/commit/6ed0fe6fb1e487545a790ff5b9fc691cf625f005)]:
|
||||
- @vercel/next@4.1.4
|
||||
- @vercel/build-utils@7.7.1
|
||||
- @vercel/remix-builder@2.1.1
|
||||
- @vercel/static-build@2.4.1
|
||||
- @vercel/node@3.0.21
|
||||
|
||||
## 33.5.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`c2d99855e`](https://github.com/vercel/vercel/commit/c2d99855ea6132380434ed29643120680f95fad7), [`1333071a3`](https://github.com/vercel/vercel/commit/1333071a3a2d324679327bfdd4e872f8fd3521c6)]:
|
||||
- @vercel/next@4.1.3
|
||||
- @vercel/remix-builder@2.1.0
|
||||
|
||||
## 33.5.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`e109e3325`](https://github.com/vercel/vercel/commit/e109e3325ab5299da0903034175fabe72d486a4e), [`d17abf463`](https://github.com/vercel/vercel/commit/d17abf463acabf9e1e43b91200f18efd34e91f62), [`644721a90`](https://github.com/vercel/vercel/commit/644721a90da8cf98414d272be9da0a821a2ce217), [`ea0e9aeae`](https://github.com/vercel/vercel/commit/ea0e9aeaec8ddddb5a726be0d252df9cdbd84808), [`e318a0eea`](https://github.com/vercel/vercel/commit/e318a0eea55c9b8536b0874f66cfd03aca6f0adf), [`1fee87e76`](https://github.com/vercel/vercel/commit/1fee87e76f18d2f5e5524247cfce615fa1832e49), [`bfc01fd98`](https://github.com/vercel/vercel/commit/bfc01fd98f760a008d0d2e6c52b5216503b44b75), [`7910f2f30`](https://github.com/vercel/vercel/commit/7910f2f3070ff69742e845e795d4db77d598c181), [`440ef3ba9`](https://github.com/vercel/vercel/commit/440ef3ba98af8f05e7714c86c67c36dbda11e85c)]:
|
||||
- @vercel/remix-builder@2.0.20
|
||||
- @vercel/next@4.1.2
|
||||
- @vercel/node@3.0.20
|
||||
- @vercel/redwood@2.0.8
|
||||
|
||||
## 33.5.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -7,6 +7,7 @@ module.exports = {
|
||||
{
|
||||
diagnostics: true,
|
||||
isolatedModules: true,
|
||||
tsconfig: 'test/tsconfig.json',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "33.5.1",
|
||||
"version": "33.6.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -31,17 +31,17 @@
|
||||
"node": ">= 16"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"@vercel/fun": "1.1.0",
|
||||
"@vercel/go": "3.0.5",
|
||||
"@vercel/hydrogen": "1.0.2",
|
||||
"@vercel/next": "4.1.1",
|
||||
"@vercel/node": "3.0.19",
|
||||
"@vercel/next": "4.1.5",
|
||||
"@vercel/node": "3.0.23",
|
||||
"@vercel/python": "4.1.1",
|
||||
"@vercel/redwood": "2.0.7",
|
||||
"@vercel/remix-builder": "2.0.19",
|
||||
"@vercel/redwood": "2.0.8",
|
||||
"@vercel/remix-builder": "2.1.3",
|
||||
"@vercel/ruby": "2.0.5",
|
||||
"@vercel/static-build": "2.4.0",
|
||||
"@vercel/static-build": "2.4.3",
|
||||
"chokidar": "3.3.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -88,11 +88,11 @@
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel-internals/constants": "1.0.4",
|
||||
"@vercel-internals/get-package-json": "1.0.0",
|
||||
"@vercel-internals/types": "1.0.23",
|
||||
"@vercel/client": "13.1.3",
|
||||
"@vercel-internals/types": "1.0.26",
|
||||
"@vercel/client": "13.1.6",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/frameworks": "2.0.6",
|
||||
"@vercel/fs-detectors": "5.1.6",
|
||||
"@vercel/frameworks": "3.0.0",
|
||||
"@vercel/fs-detectors": "5.2.1",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"ajv": "6.12.2",
|
||||
"alpha-sort": "2.0.1",
|
||||
|
||||
@@ -21,8 +21,7 @@ import {
|
||||
NowBuildError,
|
||||
Cron,
|
||||
validateNpmrc,
|
||||
Flag,
|
||||
VariantDefinition,
|
||||
type FlagDefinitions,
|
||||
} from '@vercel/build-utils';
|
||||
import {
|
||||
detectBuilders,
|
||||
@@ -96,9 +95,6 @@ interface BuildOutputConfig {
|
||||
version: string;
|
||||
};
|
||||
crons?: Cron[];
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags?: Flag[];
|
||||
variants?: Record<string, VariantDefinition>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -261,9 +257,6 @@ export default async function main(client: Client): Promise<number> {
|
||||
if (project.settings.analyticsId) {
|
||||
envToUnset.add('VERCEL_ANALYTICS_ID');
|
||||
process.env.VERCEL_ANALYTICS_ID = project.settings.analyticsId;
|
||||
output.warn(
|
||||
'Vercel Speed Insights auto-injection is deprecated in favor of @vercel/speed-insights package. Learn more: https://vercel.link/upgrate-to-speed-insights-package'
|
||||
);
|
||||
}
|
||||
|
||||
// Some build processes use these env vars to platform detect Vercel
|
||||
@@ -667,9 +660,8 @@ async function doBuild(
|
||||
const mergedWildcard = mergeWildcard(buildResults.values());
|
||||
const mergedOverrides: Record<string, PathOverride> =
|
||||
overrides.length > 0 ? Object.assign({}, ...overrides) : undefined;
|
||||
const mergedFlags = mergeFlags(buildResults.values());
|
||||
|
||||
const framework = await getFramework(cwd, buildResults);
|
||||
const framework = await getFramework(workPath, buildResults);
|
||||
|
||||
// Write out the final `config.json` file based on the
|
||||
// user configuration and Builder build results
|
||||
@@ -681,12 +673,10 @@ async function doBuild(
|
||||
overrides: mergedOverrides,
|
||||
framework,
|
||||
crons: mergedCrons,
|
||||
/** @deprecated Replaced by Variants. Remove once fully replaced. */
|
||||
flags: mergedFlags,
|
||||
};
|
||||
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
|
||||
|
||||
await writeVariantsJson(client, buildResults.values(), outputDir);
|
||||
await writeFlagsJSON(client, buildResults.values(), outputDir);
|
||||
|
||||
const relOutputDir = relative(cwd, outputDir);
|
||||
output.print(
|
||||
@@ -820,60 +810,51 @@ function mergeWildcard(
|
||||
return wildcard;
|
||||
}
|
||||
|
||||
function mergeFlags(
|
||||
buildResults: Iterable<BuildResult | BuildOutputConfig>
|
||||
): BuildResultV2Typical['flags'] {
|
||||
return Array.from(buildResults).flatMap(result => {
|
||||
if ('flags' in result) {
|
||||
return result.flags ?? [];
|
||||
}
|
||||
|
||||
return [];
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Takes the build output and writes all the variants into the `variants.json`
|
||||
* file. It'll skip variants that already exist.
|
||||
* Takes the build output and writes all the flags into the `flags.json`
|
||||
* file. It'll skip flags that already exist.
|
||||
*/
|
||||
async function writeVariantsJson(
|
||||
async function writeFlagsJSON(
|
||||
{ output }: Client,
|
||||
buildResults: Iterable<BuildResult | BuildOutputConfig>,
|
||||
outputDir: string
|
||||
): Promise<void> {
|
||||
const variantsFilePath = join(outputDir, 'variants.json');
|
||||
const flagsFilePath = join(outputDir, 'flags.json');
|
||||
|
||||
let hasVariants = true;
|
||||
let hasFlags = true;
|
||||
|
||||
const variants = (await fs.readJSON(variantsFilePath).catch(error => {
|
||||
const flags = (await fs.readJSON(flagsFilePath).catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
hasVariants = false;
|
||||
hasFlags = false;
|
||||
return { definitions: {} };
|
||||
}
|
||||
|
||||
throw error;
|
||||
})) as { definitions: Record<string, VariantDefinition> };
|
||||
})) as { definitions: FlagDefinitions };
|
||||
|
||||
for (const result of buildResults) {
|
||||
if (!('variants' in result) || !result.variants) continue;
|
||||
if (!('flags' in result) || !result.flags || !result.flags.definitions)
|
||||
continue;
|
||||
|
||||
for (const [key, defintion] of Object.entries(result.variants)) {
|
||||
if (result.variants[key]) {
|
||||
for (const [key, definition] of Object.entries(result.flags.definitions)) {
|
||||
if (result.flags.definitions[key]) {
|
||||
output.warn(
|
||||
`The variant "${key}" was found multiple times. Only its first occurrence will be considered.`
|
||||
`The flag "${key}" was found multiple times. Only its first occurrence will be considered.`
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
hasVariants = true;
|
||||
variants.definitions[key] = defintion;
|
||||
hasFlags = true;
|
||||
flags.definitions[key] = definition;
|
||||
}
|
||||
}
|
||||
|
||||
// Only create the file when there are variants to write,
|
||||
// Only create the file when there are flags to write,
|
||||
// or when the file already exists.
|
||||
if (hasVariants) {
|
||||
await fs.writeJSON(variantsFilePath, variants, { spaces: 2 });
|
||||
// Checking `definitions` alone won't be enough in case there
|
||||
// are other properties set.
|
||||
if (hasFlags) {
|
||||
await fs.writeJSON(flagsFilePath, flags, { spaces: 2 });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ import ms from 'ms';
|
||||
import fs from 'fs-extra';
|
||||
import bytes from 'bytes';
|
||||
import chalk from 'chalk';
|
||||
import semver from 'semver';
|
||||
import { join, resolve } from 'path';
|
||||
import {
|
||||
fileNameSymbol,
|
||||
@@ -14,7 +15,7 @@ import { readLocalConfig } from '../../util/config/files';
|
||||
import getArgs from '../../util/get-args';
|
||||
import { handleError } from '../../util/error';
|
||||
import Client from '../../util/client';
|
||||
import { getPrettyError } from '@vercel/build-utils';
|
||||
import { getPrettyError, scanParentDirs } from '@vercel/build-utils';
|
||||
import toHumanPath from '../../util/humanize-path';
|
||||
import Now, { CreateOptions } from '../../util';
|
||||
import stamp from '../../util/output/stamp';
|
||||
@@ -513,6 +514,19 @@ export default async (client: Client): Promise<number> => {
|
||||
);
|
||||
}
|
||||
|
||||
const { packageJson } = await scanParentDirs(
|
||||
join(cwd, project?.rootDirectory ?? ''),
|
||||
true,
|
||||
cwd
|
||||
);
|
||||
let nodeVersion: string | undefined;
|
||||
if (packageJson?.engines?.node) {
|
||||
const parsedNodeVersion = semver.coerce(packageJson.engines.node);
|
||||
if (parsedNodeVersion) {
|
||||
nodeVersion = `${parsedNodeVersion.major}.x`;
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// if this flag is not set, use `undefined` to allow the project setting to be used
|
||||
const autoAssignCustomDomains = argv['--skip-domain'] ? false : undefined;
|
||||
@@ -545,12 +559,18 @@ export default async (client: Client): Promise<number> => {
|
||||
|
||||
if (!localConfig.builds || localConfig.builds.length === 0) {
|
||||
// Only add projectSettings for zero config deployments
|
||||
createArgs.projectSettings =
|
||||
status === 'not_linked'
|
||||
? {
|
||||
sourceFilesOutsideRootDirectory,
|
||||
}
|
||||
: { ...localConfigurationOverrides, sourceFilesOutsideRootDirectory };
|
||||
createArgs.projectSettings = {
|
||||
sourceFilesOutsideRootDirectory,
|
||||
rootDirectory,
|
||||
nodeVersion,
|
||||
};
|
||||
|
||||
if (status !== 'not_linked') {
|
||||
createArgs.projectSettings = {
|
||||
...createArgs.projectSettings,
|
||||
...localConfigurationOverrides,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
deployment = await createDeploy(
|
||||
|
||||
@@ -367,7 +367,7 @@ export async function executeBuild(
|
||||
Code: { ZipFile },
|
||||
Handler: asset.handler,
|
||||
Runtime: asset.runtime,
|
||||
MemorySize: asset.memory || 3008,
|
||||
MemorySize: asset.memory || 3009,
|
||||
Environment: {
|
||||
Variables: {
|
||||
...vercelConfig.env,
|
||||
|
||||
@@ -198,6 +198,7 @@ export default async function setupAndLink(
|
||||
projectSettings: {
|
||||
...localConfigurationOverrides,
|
||||
sourceFilesOutsideRootDirectory,
|
||||
rootDirectory,
|
||||
},
|
||||
autoAssignCustomDomains: true,
|
||||
};
|
||||
|
||||
8
packages/cli/test/fixtures/unit/commands/build/monorepo/.vercel/project.json
vendored
Normal file
8
packages/cli/test/fixtures/unit/commands/build/monorepo/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": "next",
|
||||
"rootDirectory": "apps/nextjs"
|
||||
}
|
||||
}
|
||||
36
packages/cli/test/fixtures/unit/commands/build/monorepo/apps/nextjs/.gitignore
vendored
Normal file
36
packages/cli/test/fixtures/unit/commands/build/monorepo/apps/nextjs/.gitignore
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
/.pnp
|
||||
.pnp.js
|
||||
.yarn/install-state.gz
|
||||
|
||||
# testing
|
||||
/coverage
|
||||
|
||||
# next.js
|
||||
/.next/
|
||||
/out/
|
||||
|
||||
# production
|
||||
/build
|
||||
|
||||
# misc
|
||||
.DS_Store
|
||||
*.pem
|
||||
|
||||
# debug
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# local env files
|
||||
.env*.local
|
||||
|
||||
# vercel
|
||||
.vercel
|
||||
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
11
packages/cli/test/fixtures/unit/commands/build/monorepo/apps/nextjs/package.json
vendored
Normal file
11
packages/cli/test/fixtures/unit/commands/build/monorepo/apps/nextjs/package.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "nextjs-monorepo-test",
|
||||
"scripts": {
|
||||
"build": "next build"
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"next": "^12"
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/monorepo/apps/nextjs/pages/index.jsx
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/monorepo/apps/nextjs/pages/index.jsx
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export default () => <div>Hi</div>
|
||||
463
packages/cli/test/fixtures/unit/commands/build/monorepo/package-lock.json
generated
vendored
Normal file
463
packages/cli/test/fixtures/unit/commands/build/monorepo/package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,463 @@
|
||||
{
|
||||
"name": "monorepo",
|
||||
"version": "0.0.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "monorepo",
|
||||
"version": "0.0.0",
|
||||
"workspaces": [
|
||||
"apps/*"
|
||||
]
|
||||
},
|
||||
"apps/nextjs": {
|
||||
"name": "nextjs-monorepo-test",
|
||||
"dependencies": {
|
||||
"next": "^12",
|
||||
"react": "^18",
|
||||
"react-dom": "^18"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-android-arm-eabi": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-android-arm-eabi/-/swc-android-arm-eabi-12.3.4.tgz",
|
||||
"integrity": "sha512-cM42Cw6V4Bz/2+j/xIzO8nK/Q3Ly+VSlZJTa1vHzsocJRYz8KT6MrreXaci2++SIZCF1rVRCDgAg5PpqRibdIA==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-android-arm64": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-android-arm64/-/swc-android-arm64-12.3.4.tgz",
|
||||
"integrity": "sha512-5jf0dTBjL+rabWjGj3eghpLUxCukRhBcEJgwLedewEA/LJk2HyqCvGIwj5rH+iwmq1llCWbOky2dO3pVljrapg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"android"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-darwin-arm64": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.3.4.tgz",
|
||||
"integrity": "sha512-DqsSTd3FRjQUR6ao0E1e2OlOcrF5br+uegcEGPVonKYJpcr0MJrtYmPxd4v5T6UCJZ+XzydF7eQo5wdGvSZAyA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-darwin-x64": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-darwin-x64/-/swc-darwin-x64-12.3.4.tgz",
|
||||
"integrity": "sha512-PPF7tbWD4k0dJ2EcUSnOsaOJ5rhT3rlEt/3LhZUGiYNL8KvoqczFrETlUx0cUYaXe11dRA3F80Hpt727QIwByQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"darwin"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-freebsd-x64": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-freebsd-x64/-/swc-freebsd-x64-12.3.4.tgz",
|
||||
"integrity": "sha512-KM9JXRXi/U2PUM928z7l4tnfQ9u8bTco/jb939pdFUHqc28V43Ohd31MmZD1QzEK4aFlMRaIBQOWQZh4D/E5lQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"freebsd"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-linux-arm-gnueabihf": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.3.4.tgz",
|
||||
"integrity": "sha512-3zqD3pO+z5CZyxtKDTnOJ2XgFFRUBciOox6EWkoZvJfc9zcidNAQxuwonUeNts6Xbm8Wtm5YGIRC0x+12YH7kw==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-linux-arm64-gnu": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.3.4.tgz",
|
||||
"integrity": "sha512-kiX0vgJGMZVv+oo1QuObaYulXNvdH/IINmvdZnVzMO/jic/B8EEIGlZ8Bgvw8LCjH3zNVPO3mGrdMvnEEPEhKA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-linux-arm64-musl": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.3.4.tgz",
|
||||
"integrity": "sha512-EETZPa1juczrKLWk5okoW2hv7D7WvonU+Cf2CgsSoxgsYbUCZ1voOpL4JZTOb6IbKMDo6ja+SbY0vzXZBUMvkQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-linux-x64-gnu": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.3.4.tgz",
|
||||
"integrity": "sha512-4csPbRbfZbuWOk3ATyWcvVFdD9/Rsdq5YHKvRuEni68OCLkfy4f+4I9OBpyK1SKJ00Cih16NJbHE+k+ljPPpag==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-linux-x64-musl": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.3.4.tgz",
|
||||
"integrity": "sha512-YeBmI+63Ro75SUiL/QXEVXQ19T++58aI/IINOyhpsRL1LKdyfK/35iilraZEFz9bLQrwy1LYAR5lK200A9Gjbg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-win32-arm64-msvc": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.3.4.tgz",
|
||||
"integrity": "sha512-Sd0qFUJv8Tj0PukAYbCCDbmXcMkbIuhnTeHm9m4ZGjCf6kt7E/RMs55Pd3R5ePjOkN7dJEuxYBehawTR/aPDSQ==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-win32-ia32-msvc": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.3.4.tgz",
|
||||
"integrity": "sha512-rt/vv/vg/ZGGkrkKcuJ0LyliRdbskQU+91bje+PgoYmxTZf/tYs6IfbmgudBJk6gH3QnjHWbkphDdRQrseRefQ==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/@next/swc-win32-x64-msvc": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.3.4.tgz",
|
||||
"integrity": "sha512-DQ20JEfTBZAgF8QCjYfJhv2/279M6onxFjdG/+5B0Cyj00/EdBxiWb2eGGFgQhrBbNv/lsvzFbbi0Ptf8Vw/bg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"optional": true,
|
||||
"os": [
|
||||
"win32"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">= 10"
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/next": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/next/-/next-12.3.4.tgz",
|
||||
"integrity": "sha512-VcyMJUtLZBGzLKo3oMxrEF0stxh8HwuW976pAzlHhI3t8qJ4SROjCrSh1T24bhrbjw55wfZXAbXPGwPt5FLRfQ==",
|
||||
"dependencies": {
|
||||
"@next/env": "12.3.4",
|
||||
"@swc/helpers": "0.4.11",
|
||||
"caniuse-lite": "^1.0.30001406",
|
||||
"postcss": "8.4.14",
|
||||
"styled-jsx": "5.0.7",
|
||||
"use-sync-external-store": "1.2.0"
|
||||
},
|
||||
"bin": {
|
||||
"next": "dist/bin/next"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=12.22.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@next/swc-android-arm-eabi": "12.3.4",
|
||||
"@next/swc-android-arm64": "12.3.4",
|
||||
"@next/swc-darwin-arm64": "12.3.4",
|
||||
"@next/swc-darwin-x64": "12.3.4",
|
||||
"@next/swc-freebsd-x64": "12.3.4",
|
||||
"@next/swc-linux-arm-gnueabihf": "12.3.4",
|
||||
"@next/swc-linux-arm64-gnu": "12.3.4",
|
||||
"@next/swc-linux-arm64-musl": "12.3.4",
|
||||
"@next/swc-linux-x64-gnu": "12.3.4",
|
||||
"@next/swc-linux-x64-musl": "12.3.4",
|
||||
"@next/swc-win32-arm64-msvc": "12.3.4",
|
||||
"@next/swc-win32-ia32-msvc": "12.3.4",
|
||||
"@next/swc-win32-x64-msvc": "12.3.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"fibers": ">= 3.1.0",
|
||||
"node-sass": "^6.0.0 || ^7.0.0",
|
||||
"react": "^17.0.2 || ^18.0.0-0",
|
||||
"react-dom": "^17.0.2 || ^18.0.0-0",
|
||||
"sass": "^1.3.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"fibers": {
|
||||
"optional": true
|
||||
},
|
||||
"node-sass": {
|
||||
"optional": true
|
||||
},
|
||||
"sass": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"apps/nextjs/node_modules/postcss": {
|
||||
"version": "8.4.14",
|
||||
"resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.14.tgz",
|
||||
"integrity": "sha512-E398TUmfAYFPBSdzgeieK2Y1+1cpdxJx8yXbK/m57nRhKSmk1GB2tO4lbLBtlkfPQTDKfe4Xqv1ASWPpayPEig==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/postcss/"
|
||||
},
|
||||
{
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/funding/github/npm/postcss"
|
||||
}
|
||||
],
|
||||
"dependencies": {
|
||||
"nanoid": "^3.3.4",
|
||||
"picocolors": "^1.0.0",
|
||||
"source-map-js": "^1.0.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || >=14"
|
||||
}
|
||||
},
|
||||
"apps/remix": {
|
||||
"name": "my-remix-app",
|
||||
"extraneous": true,
|
||||
"dependencies": {
|
||||
"@remix-run/node": "^2.8.0",
|
||||
"@remix-run/react": "^2.8.0",
|
||||
"@remix-run/server-runtime": "^2.8.0",
|
||||
"@vercel/analytics": "^1.2.2",
|
||||
"@vercel/remix": "^2.8.0",
|
||||
"isbot": "^4",
|
||||
"react": "^18.2.0",
|
||||
"react-dom": "^18.2.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "^2.8.0",
|
||||
"@types/react": "^18.2.20",
|
||||
"@types/react-dom": "^18.2.7",
|
||||
"typescript": "^5.1.6",
|
||||
"vite": "^5.1.0",
|
||||
"vite-tsconfig-paths": "^4.2.1"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@next/env": {
|
||||
"version": "12.3.4",
|
||||
"resolved": "https://registry.npmjs.org/@next/env/-/env-12.3.4.tgz",
|
||||
"integrity": "sha512-H/69Lc5Q02dq3o+dxxy5O/oNxFsZpdL6WREtOOtOM1B/weonIwDXkekr1KV5DPVPr12IHFPrMrcJQ6bgPMfn7A=="
|
||||
},
|
||||
"node_modules/@swc/helpers": {
|
||||
"version": "0.4.11",
|
||||
"resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.4.11.tgz",
|
||||
"integrity": "sha512-rEUrBSGIoSFuYxwBYtlUFMlE2CwGhmW+w9355/5oduSw8e5h2+Tj4UrAGNNgP9915++wj5vkQo0UuOBqOAq4nw==",
|
||||
"dependencies": {
|
||||
"tslib": "^2.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/caniuse-lite": {
|
||||
"version": "1.0.30001594",
|
||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001594.tgz",
|
||||
"integrity": "sha512-VblSX6nYqyJVs8DKFMldE2IVCJjZ225LW00ydtUWwh5hk9IfkTOffO6r8gJNsH0qqqeAF8KrbMYA2VEwTlGW5g==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/browserslist"
|
||||
},
|
||||
{
|
||||
"type": "tidelift",
|
||||
"url": "https://tidelift.com/funding/github/npm/caniuse-lite"
|
||||
},
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
]
|
||||
},
|
||||
"node_modules/js-tokens": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
|
||||
"integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ=="
|
||||
},
|
||||
"node_modules/loose-envify": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
|
||||
"integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==",
|
||||
"dependencies": {
|
||||
"js-tokens": "^3.0.0 || ^4.0.0"
|
||||
},
|
||||
"bin": {
|
||||
"loose-envify": "cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/nanoid": {
|
||||
"version": "3.3.7",
|
||||
"resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz",
|
||||
"integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/ai"
|
||||
}
|
||||
],
|
||||
"bin": {
|
||||
"nanoid": "bin/nanoid.cjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/nextjs-monorepo-test": {
|
||||
"resolved": "apps/nextjs",
|
||||
"link": true
|
||||
},
|
||||
"node_modules/picocolors": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz",
|
||||
"integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ=="
|
||||
},
|
||||
"node_modules/react": {
|
||||
"version": "18.2.0",
|
||||
"resolved": "https://registry.npmjs.org/react/-/react-18.2.0.tgz",
|
||||
"integrity": "sha512-/3IjMdb2L9QbBdWiW5e3P2/npwMBaU9mHCSCUzNln0ZCYbcfTsGbTJrU/kGemdH2IWmB2ioZ+zkxtmq6g09fGQ==",
|
||||
"dependencies": {
|
||||
"loose-envify": "^1.1.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/react-dom": {
|
||||
"version": "18.2.0",
|
||||
"resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.2.0.tgz",
|
||||
"integrity": "sha512-6IMTriUmvsjHUjNtEDudZfuDQUoWXVxKHhlEGSk81n4YFS+r/Kl99wXiwlVXtPBtJenozv2P+hxDsw9eA7Xo6g==",
|
||||
"dependencies": {
|
||||
"loose-envify": "^1.1.0",
|
||||
"scheduler": "^0.23.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": "^18.2.0"
|
||||
}
|
||||
},
|
||||
"node_modules/scheduler": {
|
||||
"version": "0.23.0",
|
||||
"resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.0.tgz",
|
||||
"integrity": "sha512-CtuThmgHNg7zIZWAXi3AsyIzA3n4xx7aNyjwC2VJldO2LMVDhFK+63xGqq6CsJH4rTAt6/M+N4GhZiDYPx9eUw==",
|
||||
"dependencies": {
|
||||
"loose-envify": "^1.1.0"
|
||||
}
|
||||
},
|
||||
"node_modules/source-map-js": {
|
||||
"version": "1.0.2",
|
||||
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.0.2.tgz",
|
||||
"integrity": "sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==",
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/styled-jsx": {
|
||||
"version": "5.0.7",
|
||||
"resolved": "https://registry.npmjs.org/styled-jsx/-/styled-jsx-5.0.7.tgz",
|
||||
"integrity": "sha512-b3sUzamS086YLRuvnaDigdAewz1/EFYlHpYBP5mZovKEdQQOIIYq8lApylub3HHZ6xFjV051kkGU7cudJmrXEA==",
|
||||
"engines": {
|
||||
"node": ">= 12.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"react": ">= 16.8.0 || 17.x.x || ^18.0.0-0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@babel/core": {
|
||||
"optional": true
|
||||
},
|
||||
"babel-plugin-macros": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/tslib": {
|
||||
"version": "2.6.2",
|
||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz",
|
||||
"integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q=="
|
||||
},
|
||||
"node_modules/use-sync-external-store": {
|
||||
"version": "1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz",
|
||||
"integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==",
|
||||
"peerDependencies": {
|
||||
"react": "^16.8.0 || ^17.0.0 || ^18.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
9
packages/cli/test/fixtures/unit/commands/build/monorepo/package.json
vendored
Normal file
9
packages/cli/test/fixtures/unit/commands/build/monorepo/package.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"name": "monorepo",
|
||||
"private": true,
|
||||
"version": "0.0.0",
|
||||
"description": "",
|
||||
"workspaces": [
|
||||
"apps/*"
|
||||
]
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/with-flags/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/with-flags/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
21
packages/cli/test/fixtures/unit/commands/build/with-flags/build.js
vendored
Executable file
21
packages/cli/test/fixtures/unit/commands/build/with-flags/build.js
vendored
Executable file
@@ -0,0 +1,21 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
async function main() {
|
||||
const outputDir = path.join(__dirname, '.vercel', 'output');
|
||||
|
||||
await fs.promises.mkdir(outputDir).catch((error) => {
|
||||
if (error.code === 'EEXIST') return;
|
||||
throw error;
|
||||
});
|
||||
|
||||
await fs.promises.copyFile(path.join(__dirname, 'config.json'), path.join(outputDir, 'config.json'));
|
||||
await fs.promises.copyFile(path.join(__dirname, 'flags.json'), path.join(outputDir, 'flags.json'));
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
process.exit(0);
|
||||
}).catch((error) => {
|
||||
console.error(error);
|
||||
process.exit(1);
|
||||
});
|
||||
3
packages/cli/test/fixtures/unit/commands/build/with-flags/config.json
vendored
Normal file
3
packages/cli/test/fixtures/unit/commands/build/with-flags/config.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"version": 3
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/commands/build/with-flags/flags.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/with-flags/flags.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"definitions": {
|
||||
"my-next-flag": {
|
||||
"options": [{ "value": true }, { "value": false }]
|
||||
}
|
||||
}
|
||||
}
|
||||
5
packages/cli/test/fixtures/unit/commands/build/with-flags/package.json
vendored
Normal file
5
packages/cli/test/fixtures/unit/commands/build/with-flags/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"scripts": {
|
||||
"build": "node build.js"
|
||||
}
|
||||
}
|
||||
4
packages/cli/test/fixtures/unit/commands/deploy/node/.vercel/project.json
vendored
Normal file
4
packages/cli/test/fixtures/unit/commands/deploy/node/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"orgId": "team_dummy",
|
||||
"projectId": "node"
|
||||
}
|
||||
8
packages/cli/test/fixtures/unit/commands/deploy/node/package.json
vendored
Normal file
8
packages/cli/test/fixtures/unit/commands/deploy/node/package.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "node",
|
||||
"version": "1.0.0",
|
||||
"private": true,
|
||||
"engines": {
|
||||
"node": ">= 20.x"
|
||||
}
|
||||
}
|
||||
@@ -265,6 +265,23 @@ module.exports = async function prepare(session, binaryPath, tmpFixturesDir) {
|
||||
},
|
||||
}),
|
||||
},
|
||||
'zero-config-next-js-nested': {
|
||||
'app/pages/index.js':
|
||||
'export default () => <div><h1>Now CLI test</h1><p>Zero-config + Next.js</p></div>',
|
||||
'app/package.json': JSON.stringify({
|
||||
name: 'zero-config-next-js-test',
|
||||
scripts: {
|
||||
dev: 'next',
|
||||
start: 'next start',
|
||||
build: 'next build',
|
||||
},
|
||||
dependencies: {
|
||||
next: 'latest',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'lambda-with-128-memory': {
|
||||
'api/memory.js': `
|
||||
module.exports = (req, res) => {
|
||||
|
||||
32
packages/cli/test/integration-1.test.ts
vendored
32
packages/cli/test/integration-1.test.ts
vendored
@@ -19,6 +19,7 @@ import {
|
||||
import formatOutput from './helpers/format-output';
|
||||
import type http from 'http';
|
||||
import type { CLIProcess } from './helpers/types';
|
||||
|
||||
const TEST_TIMEOUT = 3 * 60 * 1000;
|
||||
jest.setTimeout(TEST_TIMEOUT);
|
||||
|
||||
@@ -584,6 +585,37 @@ test('deploy fails using --local-config flag with non-existent path', async () =
|
||||
expect(stderr).toMatch(/does-not-exist\.json/);
|
||||
});
|
||||
|
||||
test('deploy using --local-config flag above target', async () => {
|
||||
const root = await setupE2EFixture('zero-config-next-js-nested');
|
||||
const projectName = `project-link-dev-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const vc = execCli(binaryPath, ['deploy', `--name=${projectName}`], {
|
||||
cwd: root,
|
||||
});
|
||||
|
||||
await waitForPrompt(vc, /Set up and deploy [^?]+\?/);
|
||||
vc.stdin?.write('yes\n');
|
||||
|
||||
await waitForPrompt(vc, 'Which scope do you want to deploy to?');
|
||||
vc.stdin?.write('\n');
|
||||
|
||||
await waitForPrompt(vc, 'Link to existing project?');
|
||||
vc.stdin?.write('no\n');
|
||||
|
||||
await waitForPrompt(vc, `What’s your project’s name? (${projectName})`);
|
||||
vc.stdin?.write(`\n`);
|
||||
|
||||
await waitForPrompt(vc, 'In which directory is your code located?');
|
||||
vc.stdin?.write('app\n');
|
||||
|
||||
// This means the framework detection worked!
|
||||
await waitForPrompt(vc, 'Auto-detected Project Settings (Next.js)');
|
||||
|
||||
vc.kill();
|
||||
});
|
||||
|
||||
test('deploy using --local-config flag above target', async () => {
|
||||
const root = await setupE2EFixture('local-config-above-target');
|
||||
const target = path.join(root, 'dir');
|
||||
|
||||
39
packages/cli/test/integration-2.test.ts
vendored
39
packages/cli/test/integration-2.test.ts
vendored
@@ -1006,6 +1006,45 @@ test('[vc link --yes] should not show prompts and autolink', async () => {
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
test('[vc link] should detect frameworks in project rootDirectory', async () => {
|
||||
const dir = await setupE2EFixture('zero-config-next-js-nested');
|
||||
const projectRootDir = 'app';
|
||||
|
||||
const projectName = `project-link-dev-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
// remove previously linked project if it exists
|
||||
await remove(path.join(dir, '.vercel'));
|
||||
|
||||
const vc = execCli(binaryPath, ['link', `--project=${projectName}`], {
|
||||
cwd: dir,
|
||||
env: {
|
||||
FORCE_TTY: '1',
|
||||
},
|
||||
});
|
||||
|
||||
await waitForPrompt(vc, /Set up [^?]+\?/);
|
||||
vc.stdin?.write('yes\n');
|
||||
|
||||
await waitForPrompt(vc, 'Which scope should contain your project?');
|
||||
vc.stdin?.write('\n');
|
||||
|
||||
await waitForPrompt(vc, 'Link to existing project?');
|
||||
vc.stdin?.write('no\n');
|
||||
|
||||
await waitForPrompt(vc, 'What’s your project’s name?');
|
||||
vc.stdin?.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(vc, 'In which directory is your code located?');
|
||||
vc.stdin?.write(`${projectRootDir}\n`);
|
||||
|
||||
// This means the framework detection worked!
|
||||
await waitForPrompt(vc, 'Auto-detected Project Settings (Next.js)');
|
||||
|
||||
vc.kill();
|
||||
});
|
||||
|
||||
test('[vc link] should not duplicate paths in .gitignore', async () => {
|
||||
const dir = await setupE2EFixture('project-link-gitignore');
|
||||
|
||||
|
||||
2
packages/cli/test/integration-3.test.ts
vendored
2
packages/cli/test/integration-3.test.ts
vendored
@@ -1175,7 +1175,7 @@ test('render build errors', async () => {
|
||||
const output = await execCli(binaryPath, [deploymentPath, '--yes']);
|
||||
|
||||
expect(output.exitCode, formatOutput(output)).toBe(1);
|
||||
expect(output.stderr).toMatch(/Command "yarn run build" exited with 1/gm);
|
||||
expect(output.stderr).toMatch(/Command "npm run build" exited with 1/gm);
|
||||
});
|
||||
|
||||
test('invalid deployment, projects and alias names', async () => {
|
||||
|
||||
@@ -785,7 +785,7 @@ describe('build', () => {
|
||||
expect(files.sort()).toEqual(['index.html', 'package.json']);
|
||||
});
|
||||
|
||||
it('should set `VERCEL_ANALYTICS_ID` environment variable and warn users', async () => {
|
||||
it('should set `VERCEL_ANALYTICS_ID` environment variable', async () => {
|
||||
const cwd = fixture('vercel-analytics');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
client.cwd = cwd;
|
||||
@@ -794,9 +794,6 @@ describe('build', () => {
|
||||
|
||||
const env = await fs.readJSON(join(output, 'static', 'env.json'));
|
||||
expect(Object.keys(env).includes('VERCEL_ANALYTICS_ID')).toEqual(true);
|
||||
await expect(client.stderr).toOutput(
|
||||
'Vercel Speed Insights auto-injection is deprecated in favor of @vercel/speed-insights package. Learn more: https://vercel.link/upgrate-to-speed-insights-package'
|
||||
);
|
||||
});
|
||||
|
||||
it('should load environment variables from `.vercel/.env.preview.local`', async () => {
|
||||
@@ -1250,43 +1247,74 @@ describe('build', () => {
|
||||
(await fs.readFile(join(output, 'static/index.txt'), 'utf8')).trim()
|
||||
).toEqual('marketing');
|
||||
});
|
||||
});
|
||||
|
||||
it('should create symlinks for duplicate references to Lambda / EdgeFunction instances', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log('Skipping test on Windows');
|
||||
return;
|
||||
}
|
||||
const cwd = fixture('functions-symlink');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
client.cwd = cwd;
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
it('should write to flags.json', async () => {
|
||||
const cwd = fixture('with-flags');
|
||||
const output = join(cwd, '.vercel', 'output');
|
||||
|
||||
// "functions" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions'));
|
||||
expect(functions.sort()).toEqual([
|
||||
'edge.func',
|
||||
'edge2.func',
|
||||
'lambda.func',
|
||||
'lambda2.func',
|
||||
]);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/lambda.func')).isDirectory()
|
||||
).toEqual(true);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/edge.func')).isDirectory()
|
||||
).toEqual(true);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/lambda2.func')).isSymbolicLink()
|
||||
).toEqual(true);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/edge2.func')).isSymbolicLink()
|
||||
).toEqual(true);
|
||||
expect(fs.readlinkSync(join(output, 'functions/lambda2.func'))).toEqual(
|
||||
'lambda.func'
|
||||
);
|
||||
expect(fs.readlinkSync(join(output, 'functions/edge2.func'))).toEqual(
|
||||
'edge.func'
|
||||
);
|
||||
client.cwd = cwd;
|
||||
client.setArgv('build', '--yes');
|
||||
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
expect(fs.existsSync(join(output, 'flags.json'))).toBe(true);
|
||||
expect(fs.readJSONSync(join(output, 'flags.json'))).toEqual({
|
||||
definitions: {
|
||||
'my-next-flag': {
|
||||
options: [{ value: true }, { value: false }],
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should detect framework version in monorepo app', async () => {
|
||||
const cwd = fixture('monorepo');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
client.cwd = cwd;
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
const config = await fs.readJSON(join(output, 'config.json'));
|
||||
expect(typeof config.framework.version).toEqual('string');
|
||||
});
|
||||
|
||||
it('should create symlinks for duplicate references to Lambda / EdgeFunction instances', async () => {
|
||||
if (process.platform === 'win32') {
|
||||
console.log('Skipping test on Windows');
|
||||
return;
|
||||
}
|
||||
const cwd = fixture('functions-symlink');
|
||||
const output = join(cwd, '.vercel/output');
|
||||
client.cwd = cwd;
|
||||
const exitCode = await build(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
|
||||
// "functions" directory has output Functions
|
||||
const functions = await fs.readdir(join(output, 'functions'));
|
||||
expect(functions.sort()).toEqual([
|
||||
'edge.func',
|
||||
'edge2.func',
|
||||
'lambda.func',
|
||||
'lambda2.func',
|
||||
]);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/lambda.func')).isDirectory()
|
||||
).toEqual(true);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/edge.func')).isDirectory()
|
||||
).toEqual(true);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/lambda2.func')).isSymbolicLink()
|
||||
).toEqual(true);
|
||||
expect(
|
||||
fs.lstatSync(join(output, 'functions/edge2.func')).isSymbolicLink()
|
||||
).toEqual(true);
|
||||
expect(fs.readlinkSync(join(output, 'functions/lambda2.func'))).toEqual(
|
||||
'lambda.func'
|
||||
);
|
||||
expect(fs.readlinkSync(join(output, 'functions/edge2.func'))).toEqual(
|
||||
'edge.func'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -416,4 +416,52 @@ describe('deploy', () => {
|
||||
version: 2,
|
||||
});
|
||||
});
|
||||
|
||||
it('should send `projectSettings.nodeVersion` based on `engines.node` package.json field', async () => {
|
||||
const user = useUser();
|
||||
useTeams('team_dummy');
|
||||
useProject({
|
||||
...defaultProject,
|
||||
name: 'node',
|
||||
id: 'QmbKpqpiUqbcke',
|
||||
});
|
||||
|
||||
let body: any;
|
||||
client.scenario.post(`/v13/deployments`, (req, res) => {
|
||||
body = req.body;
|
||||
res.json({
|
||||
creator: {
|
||||
uid: user.id,
|
||||
username: user.username,
|
||||
},
|
||||
id: 'dpl_',
|
||||
});
|
||||
});
|
||||
client.scenario.get(`/v13/deployments/dpl_`, (req, res) => {
|
||||
res.json({
|
||||
creator: {
|
||||
uid: user.id,
|
||||
username: user.username,
|
||||
},
|
||||
id: 'dpl_',
|
||||
readyState: 'READY',
|
||||
aliasAssigned: true,
|
||||
alias: [],
|
||||
});
|
||||
});
|
||||
|
||||
const repoRoot = setupUnitFixture('commands/deploy/node');
|
||||
client.cwd = repoRoot;
|
||||
client.setArgv('deploy');
|
||||
const exitCode = await deploy(client);
|
||||
expect(exitCode).toEqual(0);
|
||||
expect(body).toMatchObject({
|
||||
source: 'cli',
|
||||
version: 2,
|
||||
projectSettings: {
|
||||
nodeVersion: '20.x',
|
||||
sourceFilesOutsideRootDirectory: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -253,12 +253,12 @@ describe('validateConfig', () => {
|
||||
const error = validateConfig({
|
||||
functions: {
|
||||
'api/test.js': {
|
||||
memory: 3009,
|
||||
memory: 3010,
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(error!.message).toEqual(
|
||||
"Invalid vercel.json - `functions['api/test.js'].memory` should be <= 3008."
|
||||
"Invalid vercel.json - `functions['api/test.js'].memory` should be <= 3009."
|
||||
);
|
||||
expect(error!.link).toEqual(
|
||||
'https://vercel.com/docs/concepts/projects/project-configuration#functions'
|
||||
|
||||
@@ -1,5 +1,26 @@
|
||||
# @vercel/client
|
||||
|
||||
## 13.1.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`8ea93839c`](https://github.com/vercel/vercel/commit/8ea93839ccc70816f3ece9d7cfdb857aa7a4b015)]:
|
||||
- @vercel/build-utils@7.9.0
|
||||
|
||||
## 13.1.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`908e7837d`](https://github.com/vercel/vercel/commit/908e7837d55bc02e708f402c700e00208415e954), [`5e3656ec1`](https://github.com/vercel/vercel/commit/5e3656ec1b3f0561091636582715ba09ddd8cb2d)]:
|
||||
- @vercel/build-utils@7.8.0
|
||||
|
||||
## 13.1.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 13.1.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "13.1.3",
|
||||
"version": "13.1.6",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -37,7 +37,7 @@
|
||||
"typescript": "4.9.5"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
# @vercel/frameworks
|
||||
|
||||
## 3.0.0
|
||||
|
||||
### Major Changes
|
||||
|
||||
- Make "remix" framework preset supersede "vite" ([#11031](https://github.com/vercel/vercel/pull/11031))
|
||||
|
||||
## 2.0.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,5 +1,15 @@
|
||||
/** @type {import('@ts-jest/dist/types').InitialOptionsTsJest} */
|
||||
module.exports = {
|
||||
preset: 'ts-jest',
|
||||
testEnvironment: 'node',
|
||||
transform: {
|
||||
'^.+\\.tsx?$': 'ts-jest',
|
||||
'^.+\\.[tj]s$': [
|
||||
'ts-jest',
|
||||
{
|
||||
diagnostics: true,
|
||||
isolatedModules: true,
|
||||
tsconfig: 'test/tsconfig.json',
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "2.0.6",
|
||||
"version": "3.0.0",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
|
||||
@@ -202,11 +202,14 @@ export const frameworks = [
|
||||
description: 'A new Remix app — the result of running `npx create-remix`.',
|
||||
website: 'https://remix.run',
|
||||
sort: 6,
|
||||
supersedes: 'hydrogen',
|
||||
supersedes: ['hydrogen', 'vite'],
|
||||
useRuntime: { src: 'package.json', use: '@vercel/remix-builder' },
|
||||
ignoreRuntimes: ['@vercel/node'],
|
||||
detectors: {
|
||||
some: [
|
||||
{
|
||||
matchPackage: '@remix-run/dev',
|
||||
},
|
||||
{
|
||||
path: 'remix.config.js',
|
||||
},
|
||||
@@ -1734,7 +1737,7 @@ export const frameworks = [
|
||||
tagline: 'React framework for headless commerce',
|
||||
description: 'React framework for headless commerce',
|
||||
website: 'https://hydrogen.shopify.dev',
|
||||
supersedes: 'vite',
|
||||
supersedes: ['vite'],
|
||||
useRuntime: { src: 'package.json', use: '@vercel/hydrogen' },
|
||||
envPrefix: 'PUBLIC_',
|
||||
detectors: {
|
||||
|
||||
@@ -220,7 +220,7 @@ export interface Framework {
|
||||
*/
|
||||
defaultVersion?: string;
|
||||
/**
|
||||
* Slug of another framework preset in which this framework supersedes.
|
||||
* Array of slugs for other framework presets which this framework supersedes.
|
||||
*/
|
||||
supersedes?: string;
|
||||
supersedes?: string[];
|
||||
}
|
||||
|
||||
@@ -199,7 +199,7 @@ const Schema = {
|
||||
dependency: { type: 'string' },
|
||||
cachePattern: { type: 'string' },
|
||||
defaultVersion: { type: 'string' },
|
||||
supersedes: { type: 'string' },
|
||||
supersedes: { type: 'array', items: { type: 'string' } },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
@@ -1,5 +1,22 @@
|
||||
# @vercel/fs-detectors
|
||||
|
||||
## 5.2.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
## 5.2.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Make "remix" framework preset supersede "vite" ([#11031](https://github.com/vercel/vercel/pull/11031))
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`1333071a3`](https://github.com/vercel/vercel/commit/1333071a3a2d324679327bfdd4e872f8fd3521c6)]:
|
||||
- @vercel/frameworks@3.0.0
|
||||
|
||||
## 5.1.6
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/fs-detectors",
|
||||
"version": "5.1.6",
|
||||
"version": "5.2.1",
|
||||
"description": "Vercel filesystem detectors",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/frameworks": "2.0.6",
|
||||
"@vercel/frameworks": "3.0.0",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"glob": "8.0.3",
|
||||
"js-yaml": "4.1.0",
|
||||
@@ -37,7 +37,7 @@
|
||||
"@types/minimatch": "3.0.5",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "7.3.10",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"jest-junit": "16.0.0",
|
||||
"typescript": "4.9.5"
|
||||
}
|
||||
|
||||
@@ -612,11 +612,11 @@ function validateFunctions({ functions = {} }: Options) {
|
||||
|
||||
if (
|
||||
func.memory !== undefined &&
|
||||
(func.memory < 128 || func.memory > 3008)
|
||||
(func.memory < 128 || func.memory > 3009)
|
||||
) {
|
||||
return {
|
||||
code: 'invalid_function_memory',
|
||||
message: 'Functions must have a memory value between 128 and 3008',
|
||||
message: 'Functions must have a memory value between 128 and 3009',
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -143,7 +143,9 @@ function removeSupersededFramework(
|
||||
const framework = matches[index];
|
||||
if (framework) {
|
||||
if (framework.supersedes) {
|
||||
removeSupersededFramework(matches, framework.supersedes);
|
||||
for (const slug of framework.supersedes) {
|
||||
removeSupersededFramework(matches, slug);
|
||||
}
|
||||
}
|
||||
matches.splice(index, 1);
|
||||
}
|
||||
@@ -154,7 +156,9 @@ export function removeSupersededFrameworks(
|
||||
) {
|
||||
for (const match of matches.slice()) {
|
||||
if (match?.supersedes) {
|
||||
removeSupersededFramework(matches, match.supersedes);
|
||||
for (const slug of match.supersedes) {
|
||||
removeSupersededFramework(matches, slug);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -166,12 +166,12 @@ describe('removeSupersededFrameworks()', () => {
|
||||
const matches = [
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: ['vite'] },
|
||||
];
|
||||
removeSupersededFrameworks(matches);
|
||||
expect(matches).toEqual([
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'hydrogen', supersedes: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: ['vite'] },
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -179,13 +179,13 @@ describe('removeSupersededFrameworks()', () => {
|
||||
const matches = [
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'vite' },
|
||||
{ slug: 'hydrogen', supersedes: 'vite' },
|
||||
{ slug: 'remix', supersedes: 'hydrogen' },
|
||||
{ slug: 'hydrogen', supersedes: ['vite'] },
|
||||
{ slug: 'remix', supersedes: ['hydrogen'] },
|
||||
];
|
||||
removeSupersededFrameworks(matches);
|
||||
expect(matches).toEqual([
|
||||
{ slug: 'storybook' },
|
||||
{ slug: 'remix', supersedes: 'hydrogen' },
|
||||
{ slug: 'remix', supersedes: ['hydrogen'] },
|
||||
]);
|
||||
});
|
||||
});
|
||||
@@ -442,6 +442,20 @@ describe('detectFramework()', () => {
|
||||
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('storybook');
|
||||
});
|
||||
|
||||
it('Should detect Remix + Vite as `remix`', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'vite.config.ts': '',
|
||||
'package.json': JSON.stringify({
|
||||
dependencies: {
|
||||
'@remix-run/dev': 'latest',
|
||||
vite: 'latest',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
expect(await detectFramework({ fs, frameworkList })).toBe('remix');
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectFrameworks()', () => {
|
||||
@@ -497,6 +511,23 @@ describe('detectFrameworks()', () => {
|
||||
expect(slugs).toEqual(['nextjs', 'storybook']);
|
||||
});
|
||||
|
||||
it('Should detect Remix + Vite as `remix`', async () => {
|
||||
const fs = new VirtualFilesystem({
|
||||
'vite.config.ts': '',
|
||||
'package.json': JSON.stringify({
|
||||
dependencies: {
|
||||
'@remix-run/dev': 'latest',
|
||||
vite: 'latest',
|
||||
},
|
||||
}),
|
||||
});
|
||||
|
||||
const slugs = (await detectFrameworks({ fs, frameworkList })).map(
|
||||
f => f.slug
|
||||
);
|
||||
expect(slugs).toEqual(['remix']);
|
||||
});
|
||||
|
||||
it('Should detect "hydrogen" template as `hydrogen`', async () => {
|
||||
const fs = new LocalFileSystemDetector(join(EXAMPLES_DIR, 'hydrogen'));
|
||||
|
||||
|
||||
@@ -1,5 +1,26 @@
|
||||
# @vercel/gatsby-plugin-vercel-builder
|
||||
|
||||
## 2.0.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`8ea93839c`](https://github.com/vercel/vercel/commit/8ea93839ccc70816f3ece9d7cfdb857aa7a4b015)]:
|
||||
- @vercel/build-utils@7.9.0
|
||||
|
||||
## 2.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`908e7837d`](https://github.com/vercel/vercel/commit/908e7837d55bc02e708f402c700e00208415e954), [`5e3656ec1`](https://github.com/vercel/vercel/commit/5e3656ec1b3f0561091636582715ba09ddd8cb2d)]:
|
||||
- @vercel/build-utils@7.8.0
|
||||
|
||||
## 2.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 2.0.18
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/gatsby-plugin-vercel-builder",
|
||||
"version": "2.0.18",
|
||||
"version": "2.0.21",
|
||||
"main": "dist/index.js",
|
||||
"files": [
|
||||
"dist",
|
||||
@@ -20,7 +20,7 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@sinclair/typebox": "0.25.24",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"esbuild": "0.14.47",
|
||||
"etag": "1.8.1",
|
||||
|
||||
@@ -8,6 +8,7 @@ module.exports = {
|
||||
{
|
||||
diagnostics: true,
|
||||
isolatedModules: true,
|
||||
tsconfig: 'test/tsconfig.json',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "6.1.5",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"async-retry": "1.3.3",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"devDependencies": {
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"execa": "3.2.0",
|
||||
"fs-extra": "11.1.0",
|
||||
"jest-junit": "16.0.0"
|
||||
|
||||
@@ -1,5 +1,35 @@
|
||||
# @vercel/next
|
||||
|
||||
## 4.1.5
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Rename variants to flags and remove legacy flags ([#11121](https://github.com/vercel/vercel/pull/11121))
|
||||
|
||||
## 4.1.4
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Enable partial prerendering support for pre-generated pages ([#11183](https://github.com/vercel/vercel/pull/11183))
|
||||
|
||||
## 4.1.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Fix manifest with experimental flag ([#11192](https://github.com/vercel/vercel/pull/11192))
|
||||
|
||||
## 4.1.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update experimental bundle flag ([#11187](https://github.com/vercel/vercel/pull/11187))
|
||||
|
||||
- [next] Add flag for experimental grouping ([#11177](https://github.com/vercel/vercel/pull/11177))
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
- fix: missing experimental field ([#11184](https://github.com/vercel/vercel/pull/11184))
|
||||
|
||||
## 4.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -8,6 +8,7 @@ module.exports = {
|
||||
{
|
||||
diagnostics: true,
|
||||
isolatedModules: true,
|
||||
tsconfig: 'test/tsconfig.json',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/next",
|
||||
"version": "4.1.1",
|
||||
"version": "4.1.5",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/next-js",
|
||||
@@ -23,7 +23,7 @@
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.3"
|
||||
"@vercel/nft": "0.26.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
@@ -40,7 +40,7 @@
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/text-table": "0.2.1",
|
||||
"@types/webpack-sources": "3.2.0",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"async-sema": "3.0.1",
|
||||
"buffer-crc32": "0.2.13",
|
||||
|
||||
@@ -511,7 +511,7 @@ export const build: BuildV2 = async ({
|
||||
entryPath,
|
||||
outputDirectory
|
||||
);
|
||||
const omittedPrerenderRoutes = new Set(
|
||||
const omittedPrerenderRoutes: ReadonlySet<string> = new Set(
|
||||
Object.keys(prerenderManifest.omittedRoutes)
|
||||
);
|
||||
|
||||
@@ -1142,6 +1142,10 @@ export const build: BuildV2 = async ({
|
||||
appPathRoutesManifest,
|
||||
});
|
||||
|
||||
/**
|
||||
* This is a detection for preview mode that's required for the pages
|
||||
* router.
|
||||
*/
|
||||
const canUsePreviewMode = Object.keys(pages).some(page =>
|
||||
isApiPage(pages[page].fsPath)
|
||||
);
|
||||
@@ -1316,6 +1320,22 @@ export const build: BuildV2 = async ({
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* All of the routes that have `experimentalPPR` enabled.
|
||||
*/
|
||||
const experimentalPPRRoutes = new Set<string>();
|
||||
|
||||
for (const [route, { experimentalPPR }] of [
|
||||
...Object.entries(prerenderManifest.staticRoutes),
|
||||
...Object.entries(prerenderManifest.blockingFallbackRoutes),
|
||||
...Object.entries(prerenderManifest.fallbackRoutes),
|
||||
...Object.entries(prerenderManifest.omittedRoutes),
|
||||
]) {
|
||||
if (!experimentalPPR) continue;
|
||||
|
||||
experimentalPPRRoutes.add(route);
|
||||
}
|
||||
|
||||
if (requiredServerFilesManifest) {
|
||||
if (!routesManifest) {
|
||||
throw new Error(
|
||||
@@ -1371,6 +1391,7 @@ export const build: BuildV2 = async ({
|
||||
hasIsr404Page,
|
||||
hasIsr500Page,
|
||||
variantsManifest,
|
||||
experimentalPPRRoutes,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1883,17 +1904,18 @@ export const build: BuildV2 = async ({
|
||||
);
|
||||
}
|
||||
|
||||
dynamicRoutes = await getDynamicRoutes(
|
||||
dynamicRoutes = await getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
false,
|
||||
isDev: false,
|
||||
routesManifest,
|
||||
omittedPrerenderRoutes,
|
||||
omittedRoutes: omittedPrerenderRoutes,
|
||||
canUsePreviewMode,
|
||||
prerenderManifest.bypassToken || '',
|
||||
isServerMode
|
||||
).then(arr =>
|
||||
bypassToken: prerenderManifest.bypassToken || '',
|
||||
isServerMode,
|
||||
experimentalPPRRoutes,
|
||||
}).then(arr =>
|
||||
localizeDynamicRoutes(
|
||||
arr,
|
||||
dynamicPrefix,
|
||||
@@ -1912,17 +1934,18 @@ export const build: BuildV2 = async ({
|
||||
|
||||
// we need to include the prerenderManifest.omittedRoutes here
|
||||
// for the page to be able to be matched in the lambda for preview mode
|
||||
const completeDynamicRoutes = await getDynamicRoutes(
|
||||
const completeDynamicRoutes = await getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
false,
|
||||
isDev: false,
|
||||
routesManifest,
|
||||
undefined,
|
||||
omittedRoutes: undefined,
|
||||
canUsePreviewMode,
|
||||
prerenderManifest.bypassToken || '',
|
||||
isServerMode
|
||||
).then(arr =>
|
||||
bypassToken: prerenderManifest.bypassToken || '',
|
||||
isServerMode,
|
||||
experimentalPPRRoutes,
|
||||
}).then(arr =>
|
||||
arr.map(route => {
|
||||
route.src = route.src.replace('^', `^${dynamicPrefix}`);
|
||||
return route;
|
||||
@@ -2119,22 +2142,33 @@ export const build: BuildV2 = async ({
|
||||
appPathRoutesManifest,
|
||||
isSharedLambdas,
|
||||
canUsePreviewMode,
|
||||
omittedPrerenderRoutes,
|
||||
});
|
||||
|
||||
Object.keys(prerenderManifest.staticRoutes).forEach(route =>
|
||||
prerenderRoute(route, { isBlocking: false, isFallback: false })
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.staticRoutes).map(route =>
|
||||
prerenderRoute(route, {})
|
||||
)
|
||||
);
|
||||
Object.keys(prerenderManifest.fallbackRoutes).forEach(route =>
|
||||
prerenderRoute(route, { isBlocking: false, isFallback: true })
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.fallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isFallback: true })
|
||||
)
|
||||
);
|
||||
Object.keys(prerenderManifest.blockingFallbackRoutes).forEach(route =>
|
||||
prerenderRoute(route, { isBlocking: true, isFallback: false })
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.blockingFallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isBlocking: true })
|
||||
)
|
||||
);
|
||||
|
||||
if (static404Page && canUsePreviewMode) {
|
||||
omittedPrerenderRoutes.forEach(route => {
|
||||
prerenderRoute(route, { isOmitted: true });
|
||||
});
|
||||
await Promise.all(
|
||||
Array.from(omittedPrerenderRoutes).map(route =>
|
||||
prerenderRoute(route, { isOmitted: true })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// We still need to use lazyRoutes if the dataRoutes field
|
||||
|
||||
@@ -12,7 +12,6 @@ import {
|
||||
debug,
|
||||
glob,
|
||||
Files,
|
||||
Flag,
|
||||
BuildResultV2Typical as BuildResult,
|
||||
NodejsLambda,
|
||||
} from '@vercel/build-utils';
|
||||
@@ -47,11 +46,12 @@ import {
|
||||
UnwrapPromise,
|
||||
getOperationType,
|
||||
FunctionsConfigManifestV1,
|
||||
VariantsManifestLegacy,
|
||||
VariantsManifest,
|
||||
RSC_CONTENT_TYPE,
|
||||
RSC_PREFETCH_SUFFIX,
|
||||
normalizePrefetches,
|
||||
CreateLambdaFromPseudoLayersOptions,
|
||||
getPostponeResumePathname,
|
||||
} from './utils';
|
||||
import {
|
||||
nodeFileTrace,
|
||||
@@ -142,6 +142,7 @@ export async function serverBuild({
|
||||
lambdaCompressedByteLimit,
|
||||
requiredServerFilesManifest,
|
||||
variantsManifest,
|
||||
experimentalPPRRoutes,
|
||||
}: {
|
||||
appPathRoutesManifest?: Record<string, string>;
|
||||
dynamicPages: string[];
|
||||
@@ -151,7 +152,7 @@ export async function serverBuild({
|
||||
pagesDir: string;
|
||||
baseDir: string;
|
||||
canUsePreviewMode: boolean;
|
||||
omittedPrerenderRoutes: Set<string>;
|
||||
omittedPrerenderRoutes: ReadonlySet<string>;
|
||||
localePrefixed404: boolean;
|
||||
staticPages: { [key: string]: FileFsRef };
|
||||
lambdaAppPaths: { [key: string]: FileFsRef };
|
||||
@@ -182,10 +183,15 @@ export async function serverBuild({
|
||||
imagesManifest?: NextImagesManifest;
|
||||
prerenderManifest: NextPrerenderedRoutes;
|
||||
requiredServerFilesManifest: NextRequiredServerFilesManifest;
|
||||
variantsManifest: VariantsManifestLegacy | null;
|
||||
variantsManifest: VariantsManifest | null;
|
||||
experimentalPPRRoutes: ReadonlySet<string>;
|
||||
}): Promise<BuildResult> {
|
||||
lambdaPages = Object.assign({}, lambdaPages, lambdaAppPaths);
|
||||
|
||||
const experimentalAllowBundling = Boolean(
|
||||
process.env.NEXT_EXPERIMENTAL_FUNCTION_BUNDLING
|
||||
);
|
||||
|
||||
const lambdas: { [key: string]: Lambda } = {};
|
||||
const prerenders: { [key: string]: Prerender } = {};
|
||||
const lambdaPageKeys = Object.keys(lambdaPages);
|
||||
@@ -266,10 +272,11 @@ export async function serverBuild({
|
||||
nextVersion,
|
||||
CORRECT_MIDDLEWARE_ORDER_VERSION
|
||||
);
|
||||
const isCorrectManifests = semver.gte(
|
||||
nextVersion,
|
||||
CORRECTED_MANIFESTS_VERSION
|
||||
);
|
||||
// experimental bundling prevents filtering manifests
|
||||
// as we don't know what to filter by at this stage
|
||||
const isCorrectManifests =
|
||||
!experimentalAllowBundling &&
|
||||
semver.gte(nextVersion, CORRECTED_MANIFESTS_VERSION);
|
||||
|
||||
let hasStatic500 = !!staticPages[path.posix.join(entryDirectory, '500')];
|
||||
|
||||
@@ -348,19 +355,7 @@ export async function serverBuild({
|
||||
internalPages.push('404.js');
|
||||
}
|
||||
|
||||
const experimentalPPRRoutes = new Set<string>();
|
||||
|
||||
for (const [route, { experimentalPPR }] of [
|
||||
...Object.entries(prerenderManifest.staticRoutes),
|
||||
...Object.entries(prerenderManifest.blockingFallbackRoutes),
|
||||
...Object.entries(prerenderManifest.fallbackRoutes),
|
||||
]) {
|
||||
if (!experimentalPPR) continue;
|
||||
|
||||
experimentalPPRRoutes.add(route);
|
||||
}
|
||||
|
||||
const prerenderRoutes = new Set<string>([
|
||||
const prerenderRoutes: ReadonlySet<string> = new Set<string>([
|
||||
...(canUsePreviewMode ? omittedPrerenderRoutes : []),
|
||||
...Object.keys(prerenderManifest.blockingFallbackRoutes),
|
||||
...Object.keys(prerenderManifest.fallbackRoutes),
|
||||
@@ -768,7 +763,7 @@ export async function serverBuild({
|
||||
.filter(Boolean) as string[];
|
||||
|
||||
let traceResult: NodeFileTraceResult | undefined;
|
||||
let parentFilesMap: Map<string, Set<string>> | undefined;
|
||||
let parentFilesMap: ReadonlyMap<string, Set<string>> | undefined;
|
||||
|
||||
if (pathsToTrace.length > 0) {
|
||||
traceResult = await nodeFileTrace(pathsToTrace, {
|
||||
@@ -883,6 +878,7 @@ export async function serverBuild({
|
||||
const pageExtensions = requiredServerFilesManifest.config?.pageExtensions;
|
||||
|
||||
const pageLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -904,6 +900,7 @@ export async function serverBuild({
|
||||
}
|
||||
|
||||
const appRouterLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -922,6 +919,7 @@ export async function serverBuild({
|
||||
});
|
||||
|
||||
const appRouteHandlersLambdaGroups = await getPageLambdaGroups({
|
||||
experimentalAllowBundling,
|
||||
entryPath: projectDir,
|
||||
config,
|
||||
functionsConfigManifest,
|
||||
@@ -1163,6 +1161,7 @@ export async function serverBuild({
|
||||
maxDuration: group.maxDuration,
|
||||
isStreaming: group.isStreaming,
|
||||
nextVersion,
|
||||
experimentalAllowBundling,
|
||||
};
|
||||
|
||||
const lambda = await createLambdaFromPseudoLayers(options);
|
||||
@@ -1176,7 +1175,7 @@ export async function serverBuild({
|
||||
// lambda for the page for revalidation.
|
||||
let revalidate: NodejsLambda | undefined;
|
||||
if (isPPR) {
|
||||
if (isPPR && !options.isStreaming) {
|
||||
if (!options.isStreaming) {
|
||||
throw new Error("Invariant: PPR lambda isn't streaming");
|
||||
}
|
||||
|
||||
@@ -1188,24 +1187,28 @@ export async function serverBuild({
|
||||
});
|
||||
}
|
||||
|
||||
for (const page of group.pages) {
|
||||
const pageNoExt = page.replace(/\.js$/, '');
|
||||
let isPrerender = prerenderRoutes.has(
|
||||
path.join('/', pageNoExt === 'index' ? '' : pageNoExt)
|
||||
);
|
||||
for (const pageFilename of group.pages) {
|
||||
// This is the name of the page, where the root is `index`.
|
||||
const pageName = pageFilename.replace(/\.js$/, '');
|
||||
|
||||
// This is the name of the page prefixed with a `/`, where the root is
|
||||
// `/index`.
|
||||
const pagePath = path.posix.join('/', pageName);
|
||||
|
||||
// This is the routable pathname for the page, where the root is `/`.
|
||||
const pagePathname = pagePath === '/index' ? '/' : pagePath;
|
||||
|
||||
let isPrerender = prerenderRoutes.has(pagePathname);
|
||||
|
||||
if (!isPrerender && routesManifest?.i18n) {
|
||||
isPrerender = routesManifest.i18n.locales.some(locale => {
|
||||
return prerenderRoutes.has(
|
||||
path.join('/', locale, pageNoExt === 'index' ? '' : pageNoExt)
|
||||
path.join('/', locale, pageName === 'index' ? '' : pageName)
|
||||
);
|
||||
});
|
||||
}
|
||||
let outputName = path.posix.join(entryDirectory, pageNoExt);
|
||||
|
||||
if (!group.isAppRouter && !group.isAppRouteHandler) {
|
||||
outputName = normalizeIndexOutput(outputName, true);
|
||||
}
|
||||
let outputName = path.posix.join(entryDirectory, pageName);
|
||||
|
||||
// If this is a PPR page, then we should prefix the output name.
|
||||
if (isPPR) {
|
||||
@@ -1213,24 +1216,56 @@ export async function serverBuild({
|
||||
throw new Error("Invariant: PPR lambda isn't set");
|
||||
}
|
||||
|
||||
// Get the get the base path prefixed route, without the index
|
||||
// normalization.
|
||||
outputName = path.posix.join(entryDirectory, pageNoExt);
|
||||
// Assign the revalidate lambda to the output name. That's used to
|
||||
// perform the initial static shell render.
|
||||
lambdas[outputName] = revalidate;
|
||||
|
||||
const pprOutputName = path.posix.join(
|
||||
entryDirectory,
|
||||
'/_next/postponed/resume',
|
||||
pageNoExt
|
||||
);
|
||||
lambdas[pprOutputName] = lambda;
|
||||
// If this isn't an omitted page, then we should add the link from the
|
||||
// page to the postpone resume lambda.
|
||||
if (!omittedPrerenderRoutes.has(pagePathname)) {
|
||||
const key = getPostponeResumePathname(entryDirectory, pageName);
|
||||
lambdas[key] = lambda;
|
||||
|
||||
// We want to add the `experimentalStreamingLambdaPath` to this
|
||||
// output.
|
||||
experimentalStreamingLambdaPaths.set(outputName, key);
|
||||
} else {
|
||||
// As this is an omitted page, we should generate the experimental
|
||||
// partial prerendering resume route for each of these routes that
|
||||
// support partial prerendering. This is because the routes that
|
||||
// haven't been omitted will have rewrite rules in place to rewrite
|
||||
// the original request `/blog/my-slug` to the dynamic path
|
||||
// `/blog/[slug]?nxtPslug=my-slug`.
|
||||
for (const [
|
||||
routePathname,
|
||||
{ srcRoute, experimentalPPR },
|
||||
] of Object.entries(prerenderManifest.staticRoutes)) {
|
||||
// If the srcRoute doesn't match or this doesn't support
|
||||
// experimental partial prerendering, then we can skip this route.
|
||||
if (srcRoute !== pagePathname || !experimentalPPR) continue;
|
||||
|
||||
// If this route is the same as the page route, then we can skip
|
||||
// it, because we've already added the lambda to the output.
|
||||
if (routePathname === pagePathname) continue;
|
||||
|
||||
const key = getPostponeResumePathname(
|
||||
entryDirectory,
|
||||
routePathname
|
||||
);
|
||||
lambdas[key] = lambda;
|
||||
|
||||
outputName = path.posix.join(entryDirectory, routePathname);
|
||||
experimentalStreamingLambdaPaths.set(outputName, key);
|
||||
}
|
||||
}
|
||||
|
||||
// We want to add the `experimentalStreamingLambdaPath` to this
|
||||
// output.
|
||||
experimentalStreamingLambdaPaths.set(outputName, pprOutputName);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!group.isAppRouter && !group.isAppRouteHandler) {
|
||||
outputName = normalizeIndexOutput(outputName, true);
|
||||
}
|
||||
|
||||
// we add locale prefixed outputs for SSR pages,
|
||||
// this is handled in onPrerenderRoute for SSG pages
|
||||
if (
|
||||
@@ -1238,7 +1273,7 @@ export async function serverBuild({
|
||||
!isPrerender &&
|
||||
!group.isAppRouter &&
|
||||
(!isCorrectLocaleAPIRoutes ||
|
||||
!(pageNoExt === 'api' || pageNoExt.startsWith('api/')))
|
||||
!(pageName === 'api' || pageName.startsWith('api/')))
|
||||
) {
|
||||
for (const locale of i18n.locales) {
|
||||
lambdas[
|
||||
@@ -1246,7 +1281,7 @@ export async function serverBuild({
|
||||
path.posix.join(
|
||||
entryDirectory,
|
||||
locale,
|
||||
pageNoExt === 'index' ? '' : pageNoExt
|
||||
pageName === 'index' ? '' : pageName
|
||||
),
|
||||
true
|
||||
)
|
||||
@@ -1279,6 +1314,7 @@ export async function serverBuild({
|
||||
hasPages404: routesManifest.pages404,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
omittedPrerenderRoutes,
|
||||
});
|
||||
|
||||
await Promise.all(
|
||||
@@ -1286,11 +1322,13 @@ export async function serverBuild({
|
||||
prerenderRoute(route, {})
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.fallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isFallback: true })
|
||||
)
|
||||
);
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(prerenderManifest.blockingFallbackRoutes).map(route =>
|
||||
prerenderRoute(route, { isBlocking: true })
|
||||
@@ -1299,9 +1337,9 @@ export async function serverBuild({
|
||||
|
||||
if (static404Page && canUsePreviewMode) {
|
||||
await Promise.all(
|
||||
[...omittedPrerenderRoutes].map(route => {
|
||||
return prerenderRoute(route, { isOmitted: true });
|
||||
})
|
||||
Array.from(omittedPrerenderRoutes).map(route =>
|
||||
prerenderRoute(route, { isOmitted: true })
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1310,6 +1348,7 @@ export async function serverBuild({
|
||||
if (routesManifest?.i18n) {
|
||||
route = normalizeLocalePath(route, routesManifest.i18n.locales).pathname;
|
||||
}
|
||||
|
||||
delete lambdas[
|
||||
normalizeIndexOutput(
|
||||
path.posix.join('./', entryDirectory, route === '/' ? '/index' : route),
|
||||
@@ -1333,19 +1372,19 @@ export async function serverBuild({
|
||||
middleware.staticRoutes.length > 0 &&
|
||||
semver.gte(nextVersion, NEXT_DATA_MIDDLEWARE_RESOLVING_VERSION);
|
||||
|
||||
const dynamicRoutes = await getDynamicRoutes(
|
||||
const dynamicRoutes = await getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
false,
|
||||
isDev: false,
|
||||
routesManifest,
|
||||
omittedPrerenderRoutes,
|
||||
omittedRoutes: omittedPrerenderRoutes,
|
||||
canUsePreviewMode,
|
||||
prerenderManifest.bypassToken || '',
|
||||
true,
|
||||
middleware.dynamicRouteMap,
|
||||
experimental.ppr
|
||||
).then(arr =>
|
||||
bypassToken: prerenderManifest.bypassToken || '',
|
||||
isServerMode: true,
|
||||
dynamicMiddlewareRouteMap: middleware.dynamicRouteMap,
|
||||
experimentalPPRRoutes,
|
||||
}).then(arr =>
|
||||
localizeDynamicRoutes(
|
||||
arr,
|
||||
dynamicPrefix,
|
||||
@@ -1539,18 +1578,50 @@ export async function serverBuild({
|
||||
'RSC, Next-Router-State-Tree, Next-Router-Prefetch';
|
||||
const appNotFoundPath = path.posix.join('.', entryDirectory, '_not-found');
|
||||
|
||||
const flags: Flag[] = variantsManifest
|
||||
? Object.entries(variantsManifest).map(([key, value]) => ({
|
||||
key,
|
||||
...value,
|
||||
metadata: value.metadata ?? {},
|
||||
}))
|
||||
: [];
|
||||
|
||||
if (experimental.ppr && !rscPrefetchHeader) {
|
||||
throw new Error("Invariant: cannot use PPR without 'rsc.prefetchHeader'");
|
||||
}
|
||||
|
||||
// If we're using the Experimental Partial Prerendering, we should ensure that
|
||||
// all the routes that support it (and are listed) have configured lambdas.
|
||||
// This only applies to routes that do not have fallbacks enabled (these are
|
||||
// routes that have `dynamicParams = false` defined.
|
||||
if (experimental.ppr) {
|
||||
for (const { srcRoute, dataRoute, experimentalPPR } of Object.values(
|
||||
prerenderManifest.staticRoutes
|
||||
)) {
|
||||
// Only apply this to the routes that support experimental PPR and
|
||||
// that also have their `dataRoute` and `srcRoute` defined.
|
||||
if (!experimentalPPR || !dataRoute || !srcRoute) continue;
|
||||
|
||||
// If the srcRoute is not omitted, then we don't need to do anything. This
|
||||
// is the indicator that a route should only have it's prerender defined
|
||||
// and not a lambda.
|
||||
if (!omittedPrerenderRoutes.has(srcRoute)) continue;
|
||||
|
||||
// The lambda paths have their leading `/` stripped.
|
||||
const srcPathname = srcRoute.substring(1);
|
||||
const dataPathname = dataRoute.substring(1);
|
||||
|
||||
// If we already have an associated lambda for the `.rsc` route, then
|
||||
// we can skip this.
|
||||
const dataPathnameExists = dataPathname in lambdas;
|
||||
if (dataPathnameExists) continue;
|
||||
|
||||
// We require that the source route has a lambda associated with it. If
|
||||
// it doesn't this is an error.
|
||||
const srcPathnameExists = srcPathname in lambdas;
|
||||
if (!srcPathnameExists) {
|
||||
throw new Error(
|
||||
`Invariant: Expected to have a lambda for the source route: ${srcPathname}`
|
||||
);
|
||||
}
|
||||
|
||||
// Associate the data pathname with the source pathname's lambda.
|
||||
lambdas[dataPathname] = lambdas[srcPathname];
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
wildcard: wildcardConfig,
|
||||
images: getImagesConfig(imagesManifest),
|
||||
@@ -2274,6 +2345,6 @@ export async function serverBuild({
|
||||
]),
|
||||
],
|
||||
framework: { version: nextVersion },
|
||||
flags,
|
||||
flags: variantsManifest || undefined,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ import {
|
||||
EdgeFunction,
|
||||
Images,
|
||||
File,
|
||||
FlagDefinitions,
|
||||
} from '@vercel/build-utils';
|
||||
import { NodeFileTraceReasons } from '@vercel/nft';
|
||||
import type {
|
||||
@@ -304,19 +305,31 @@ export async function getRoutesManifest(
|
||||
return routesManifest;
|
||||
}
|
||||
|
||||
export async function getDynamicRoutes(
|
||||
entryPath: string,
|
||||
entryDirectory: string,
|
||||
dynamicPages: string[],
|
||||
isDev?: boolean,
|
||||
routesManifest?: RoutesManifest,
|
||||
omittedRoutes?: Set<string>,
|
||||
canUsePreviewMode?: boolean,
|
||||
bypassToken?: string,
|
||||
isServerMode?: boolean,
|
||||
dynamicMiddlewareRouteMap?: Map<string, RouteWithSrc>,
|
||||
experimentalPPR?: boolean
|
||||
): Promise<RouteWithSrc[]> {
|
||||
export async function getDynamicRoutes({
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages,
|
||||
isDev,
|
||||
routesManifest,
|
||||
omittedRoutes,
|
||||
canUsePreviewMode,
|
||||
bypassToken,
|
||||
isServerMode,
|
||||
dynamicMiddlewareRouteMap,
|
||||
experimentalPPRRoutes,
|
||||
}: {
|
||||
entryPath: string;
|
||||
entryDirectory: string;
|
||||
dynamicPages: string[];
|
||||
isDev?: boolean;
|
||||
routesManifest?: RoutesManifest;
|
||||
omittedRoutes?: ReadonlySet<string>;
|
||||
canUsePreviewMode?: boolean;
|
||||
bypassToken?: string;
|
||||
isServerMode?: boolean;
|
||||
dynamicMiddlewareRouteMap?: ReadonlyMap<string, RouteWithSrc>;
|
||||
experimentalPPRRoutes: ReadonlySet<string>;
|
||||
}): Promise<RouteWithSrc[]> {
|
||||
if (routesManifest) {
|
||||
switch (routesManifest.version) {
|
||||
case 1:
|
||||
@@ -389,7 +402,7 @@ export async function getDynamicRoutes(
|
||||
];
|
||||
}
|
||||
|
||||
if (experimentalPPR) {
|
||||
if (experimentalPPRRoutes.has(page)) {
|
||||
let dest = route.dest?.replace(/($|\?)/, '.prefetch.rsc$1');
|
||||
|
||||
if (page === '/' || page === '/index') {
|
||||
@@ -442,7 +455,9 @@ export async function getDynamicRoutes(
|
||||
let getRouteRegex: ((pageName: string) => { re: RegExp }) | undefined =
|
||||
undefined;
|
||||
|
||||
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
|
||||
let getSortedRoutes:
|
||||
| ((normalizedPages: ReadonlyArray<string>) => string[])
|
||||
| undefined;
|
||||
|
||||
try {
|
||||
const resolved = require_.resolve('next-server/dist/lib/router/utils', {
|
||||
@@ -645,10 +660,10 @@ export function filterStaticPages(
|
||||
}
|
||||
|
||||
export function getFilesMapFromReasons(
|
||||
fileList: Set<string>,
|
||||
fileList: ReadonlySet<string>,
|
||||
reasons: NodeFileTraceReasons,
|
||||
ignoreFn?: (file: string, parent?: string) => boolean
|
||||
) {
|
||||
): ReadonlyMap<string, Set<string>> {
|
||||
// this uses the reasons tree to collect files specific to a
|
||||
// certain parent allowing us to not have to trace each parent
|
||||
// separately
|
||||
@@ -804,6 +819,7 @@ export interface CreateLambdaFromPseudoLayersOptions
|
||||
layers: PseudoLayer[];
|
||||
isStreaming?: boolean;
|
||||
nextVersion?: string;
|
||||
experimentalAllowBundling?: boolean;
|
||||
}
|
||||
|
||||
// measured with 1, 2, 5, 10, and `os.cpus().length || 5`
|
||||
@@ -815,6 +831,7 @@ export async function createLambdaFromPseudoLayers({
|
||||
layers,
|
||||
isStreaming,
|
||||
nextVersion,
|
||||
experimentalAllowBundling,
|
||||
...lambdaOptions
|
||||
}: CreateLambdaFromPseudoLayersOptions) {
|
||||
await createLambdaSema.acquire();
|
||||
@@ -862,6 +879,7 @@ export async function createLambdaFromPseudoLayers({
|
||||
slug: 'nextjs',
|
||||
version: nextVersion,
|
||||
},
|
||||
experimentalAllowBundling,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -914,6 +932,10 @@ export type NextPrerenderedRoutes = {
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
* Routes that have their fallback behavior is disabled. All routes would've
|
||||
* been provided in the top-level `routes` key (`staticRoutes`).
|
||||
*/
|
||||
omittedRoutes: {
|
||||
[route: string]: {
|
||||
routeRegex: string;
|
||||
@@ -1293,8 +1315,6 @@ export async function getPrerenderManifest(
|
||||
prefetchDataRouteRegex,
|
||||
};
|
||||
} else {
|
||||
// Fallback behavior is disabled, all routes would've been provided
|
||||
// in the top-level `routes` key (`staticRoutes`).
|
||||
ret.omittedRoutes[lazyRoute] = {
|
||||
experimentalBypassFor,
|
||||
experimentalPPR,
|
||||
@@ -1361,7 +1381,7 @@ async function getSourceFilePathFromPage({
|
||||
}: {
|
||||
workPath: string;
|
||||
page: string;
|
||||
pageExtensions?: string[];
|
||||
pageExtensions?: ReadonlyArray<string>;
|
||||
}) {
|
||||
const usesSrcDir = await usesSrcDirectory(workPath);
|
||||
const extensionsToTry = pageExtensions || ['js', 'jsx', 'ts', 'tsx'];
|
||||
@@ -1502,13 +1522,14 @@ export async function getPageLambdaGroups({
|
||||
internalPages,
|
||||
pageExtensions,
|
||||
inversedAppPathManifest,
|
||||
experimentalAllowBundling,
|
||||
}: {
|
||||
entryPath: string;
|
||||
config: Config;
|
||||
functionsConfigManifest?: FunctionsConfigManifestV1;
|
||||
pages: string[];
|
||||
prerenderRoutes: Set<string>;
|
||||
experimentalPPRRoutes: Set<string> | undefined;
|
||||
pages: ReadonlyArray<string>;
|
||||
prerenderRoutes: ReadonlySet<string>;
|
||||
experimentalPPRRoutes: ReadonlySet<string> | undefined;
|
||||
pageTraces: {
|
||||
[page: string]: {
|
||||
[key: string]: FileFsRef;
|
||||
@@ -1521,9 +1542,10 @@ export async function getPageLambdaGroups({
|
||||
initialPseudoLayer: PseudoLayerResult;
|
||||
initialPseudoLayerUncompressed: number;
|
||||
lambdaCompressedByteLimit: number;
|
||||
internalPages: string[];
|
||||
pageExtensions?: string[];
|
||||
internalPages: ReadonlyArray<string>;
|
||||
pageExtensions?: ReadonlyArray<string>;
|
||||
inversedAppPathManifest?: Record<string, string>;
|
||||
experimentalAllowBundling?: boolean;
|
||||
}) {
|
||||
const groups: Array<LambdaGroup> = [];
|
||||
|
||||
@@ -1563,42 +1585,46 @@ export async function getPageLambdaGroups({
|
||||
opts = { ...vercelConfigOpts, ...opts };
|
||||
}
|
||||
|
||||
let matchingGroup = groups.find(group => {
|
||||
const matches =
|
||||
group.maxDuration === opts.maxDuration &&
|
||||
group.memory === opts.memory &&
|
||||
group.isPrerenders === isPrerenderRoute &&
|
||||
group.isExperimentalPPR === isExperimentalPPR;
|
||||
let matchingGroup = experimentalAllowBundling
|
||||
? undefined
|
||||
: groups.find(group => {
|
||||
const matches =
|
||||
group.maxDuration === opts.maxDuration &&
|
||||
group.memory === opts.memory &&
|
||||
group.isPrerenders === isPrerenderRoute &&
|
||||
group.isExperimentalPPR === isExperimentalPPR;
|
||||
|
||||
if (matches) {
|
||||
let newTracedFilesSize = group.pseudoLayerBytes;
|
||||
let newTracedFilesUncompressedSize = group.pseudoLayerUncompressedBytes;
|
||||
if (matches) {
|
||||
let newTracedFilesSize = group.pseudoLayerBytes;
|
||||
let newTracedFilesUncompressedSize =
|
||||
group.pseudoLayerUncompressedBytes;
|
||||
|
||||
for (const newPage of newPages) {
|
||||
Object.keys(pageTraces[newPage] || {}).map(file => {
|
||||
if (!group.pseudoLayer[file]) {
|
||||
const item = tracedPseudoLayer[file] as PseudoFile;
|
||||
for (const newPage of newPages) {
|
||||
Object.keys(pageTraces[newPage] || {}).map(file => {
|
||||
if (!group.pseudoLayer[file]) {
|
||||
const item = tracedPseudoLayer[file] as PseudoFile;
|
||||
|
||||
newTracedFilesSize += item.compBuffer?.byteLength || 0;
|
||||
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
|
||||
newTracedFilesSize += item.compBuffer?.byteLength || 0;
|
||||
newTracedFilesUncompressedSize += item.uncompressedSize || 0;
|
||||
}
|
||||
});
|
||||
newTracedFilesSize +=
|
||||
compressedPages[newPage].compBuffer.byteLength;
|
||||
newTracedFilesUncompressedSize +=
|
||||
compressedPages[newPage].uncompressedSize;
|
||||
}
|
||||
});
|
||||
newTracedFilesSize += compressedPages[newPage].compBuffer.byteLength;
|
||||
newTracedFilesUncompressedSize +=
|
||||
compressedPages[newPage].uncompressedSize;
|
||||
}
|
||||
|
||||
const underUncompressedLimit =
|
||||
newTracedFilesUncompressedSize <
|
||||
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
|
||||
const underCompressedLimit =
|
||||
newTracedFilesSize <
|
||||
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
|
||||
const underUncompressedLimit =
|
||||
newTracedFilesUncompressedSize <
|
||||
MAX_UNCOMPRESSED_LAMBDA_SIZE - LAMBDA_RESERVED_UNCOMPRESSED_SIZE;
|
||||
const underCompressedLimit =
|
||||
newTracedFilesSize <
|
||||
lambdaCompressedByteLimit - LAMBDA_RESERVED_COMPRESSED_SIZE;
|
||||
|
||||
return underUncompressedLimit && underCompressedLimit;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
return underUncompressedLimit && underCompressedLimit;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
|
||||
if (matchingGroup) {
|
||||
matchingGroup.pages.push(page);
|
||||
@@ -1906,12 +1932,13 @@ type OnPrerenderRouteArgs = {
|
||||
isServerMode: boolean;
|
||||
canUsePreviewMode: boolean;
|
||||
lambdas: { [key: string]: Lambda };
|
||||
experimentalStreamingLambdaPaths: Map<string, string> | undefined;
|
||||
experimentalStreamingLambdaPaths: ReadonlyMap<string, string> | undefined;
|
||||
prerenders: { [key: string]: Prerender | File };
|
||||
pageLambdaMap: { [key: string]: string };
|
||||
routesManifest?: RoutesManifest;
|
||||
isCorrectNotFoundRoutes?: boolean;
|
||||
isEmptyAllowQueryForPrendered?: boolean;
|
||||
omittedPrerenderRoutes: ReadonlySet<string>;
|
||||
};
|
||||
let prerenderGroup = 1;
|
||||
|
||||
@@ -1948,6 +1975,7 @@ export const onPrerenderRoute =
|
||||
routesManifest,
|
||||
isCorrectNotFoundRoutes,
|
||||
isEmptyAllowQueryForPrendered,
|
||||
omittedPrerenderRoutes,
|
||||
} = prerenderRouteArgs;
|
||||
|
||||
if (isBlocking && isFallback) {
|
||||
@@ -2372,25 +2400,31 @@ export const onPrerenderRoute =
|
||||
sourcePath = srcRoute;
|
||||
}
|
||||
|
||||
// The `experimentalStreamingLambdaPaths` stores the page without the
|
||||
// leading `/` and with the `/` rewritten to be `index`. We should
|
||||
// normalize the key so that it matches that key in the map.
|
||||
let key = srcRoute || routeKey;
|
||||
if (key === '/') {
|
||||
key = 'index';
|
||||
} else {
|
||||
if (!key.startsWith('/')) {
|
||||
throw new Error("Invariant: key doesn't start with /");
|
||||
let experimentalStreamingLambdaPath: string | undefined;
|
||||
if (experimentalPPR) {
|
||||
if (!experimentalStreamingLambdaPaths) {
|
||||
throw new Error(
|
||||
"Invariant: experimentalStreamingLambdaPaths doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
key = key.substring(1);
|
||||
// If a source route exists, and it's not listed as an omitted route,
|
||||
// then use the src route as the basis for the experimental streaming
|
||||
// lambda path. If the route doesn't have a source route or it's not
|
||||
// omitted, then use the more specific `routeKey` as the basis.
|
||||
if (srcRoute && !omittedPrerenderRoutes.has(srcRoute)) {
|
||||
experimentalStreamingLambdaPath =
|
||||
experimentalStreamingLambdaPaths.get(
|
||||
pathnameToOutputName(entryDirectory, srcRoute)
|
||||
);
|
||||
} else {
|
||||
experimentalStreamingLambdaPath =
|
||||
experimentalStreamingLambdaPaths.get(
|
||||
pathnameToOutputName(entryDirectory, routeKey)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
key = path.posix.join(entryDirectory, key);
|
||||
|
||||
const experimentalStreamingLambdaPath =
|
||||
experimentalStreamingLambdaPaths?.get(key);
|
||||
|
||||
prerenders[outputPathPage] = new Prerender({
|
||||
expiration: initialRevalidate,
|
||||
lambda,
|
||||
@@ -2593,6 +2627,10 @@ export async function getStaticFiles(
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Strips the trailing `/index` from the output name if it's not the root if
|
||||
* the server mode is enabled.
|
||||
*/
|
||||
export function normalizeIndexOutput(
|
||||
outputName: string,
|
||||
isServerMode: boolean
|
||||
@@ -2613,6 +2651,19 @@ export function getNextServerPath(nextVersion: string) {
|
||||
: 'next/dist/next-server/server';
|
||||
}
|
||||
|
||||
export function pathnameToOutputName(entryDirectory: string, pathname: string) {
|
||||
if (pathname === '/') pathname = '/index';
|
||||
return path.posix.join(entryDirectory, pathname);
|
||||
}
|
||||
|
||||
export function getPostponeResumePathname(
|
||||
entryDirectory: string,
|
||||
pathname: string
|
||||
): string {
|
||||
if (pathname === '/') pathname = '/index';
|
||||
return path.posix.join(entryDirectory, '_next/postponed/resume', pathname);
|
||||
}
|
||||
|
||||
// update to leverage
|
||||
export function updateRouteSrc(
|
||||
route: Route,
|
||||
@@ -2821,7 +2872,7 @@ export async function getMiddlewareBundle({
|
||||
appPathRoutesManifest: Record<string, string>;
|
||||
}): Promise<{
|
||||
staticRoutes: Route[];
|
||||
dynamicRouteMap: Map<string, RouteWithSrc>;
|
||||
dynamicRouteMap: ReadonlyMap<string, RouteWithSrc>;
|
||||
edgeFunctions: Record<string, EdgeFunction>;
|
||||
}> {
|
||||
const middlewareManifest = await getMiddlewareManifest(
|
||||
@@ -3257,19 +3308,14 @@ export function isApiPage(page: string | undefined) {
|
||||
.match(/(serverless|server)\/pages\/api(\/|\.js$)/);
|
||||
}
|
||||
|
||||
/** @deprecated */
|
||||
export type VariantsManifestLegacy = Record<
|
||||
string,
|
||||
{
|
||||
defaultValue?: unknown;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
>;
|
||||
export type VariantsManifest = {
|
||||
definitions: FlagDefinitions;
|
||||
};
|
||||
|
||||
export async function getVariantsManifest(
|
||||
entryPath: string,
|
||||
outputDirectory: string
|
||||
): Promise<null | VariantsManifestLegacy> {
|
||||
): Promise<null | VariantsManifest> {
|
||||
const pathVariantsManifest = path.join(
|
||||
entryPath,
|
||||
outputDirectory,
|
||||
@@ -3283,7 +3329,7 @@ export async function getVariantsManifest(
|
||||
|
||||
if (!hasVariantsManifest) return null;
|
||||
|
||||
const variantsManifest: VariantsManifestLegacy = await fs.readJSON(
|
||||
const variantsManifest: VariantsManifest = await fs.readJSON(
|
||||
pathVariantsManifest
|
||||
);
|
||||
|
||||
@@ -3305,7 +3351,7 @@ export async function getServerlessPages(params: {
|
||||
glob('**/route.js', appDir),
|
||||
glob('**/_not-found.js', appDir),
|
||||
]).then(items => Object.assign(...items))
|
||||
: Promise.resolve({}),
|
||||
: Promise.resolve({} as Record<string, FileFsRef>),
|
||||
getMiddlewareManifest(params.entryPath, params.outputDirectory),
|
||||
]);
|
||||
|
||||
|
||||
18
packages/next/test/fixtures/00-app-dir-ppr-full/app/no-fallback/[slug]/page.jsx
vendored
Normal file
18
packages/next/test/fixtures/00-app-dir-ppr-full/app/no-fallback/[slug]/page.jsx
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
import React, { Suspense } from 'react'
|
||||
import { Dynamic } from '../../../components/dynamic'
|
||||
|
||||
export const dynamicParams = false;
|
||||
|
||||
const slugs = ['a', 'b', 'c'];
|
||||
|
||||
export function generateStaticParams() {
|
||||
return slugs.map((slug) => ({ slug }));
|
||||
}
|
||||
|
||||
export default function NoFallbackPage({ params: { slug } }) {
|
||||
return (
|
||||
<Suspense fallback={<Dynamic pathname={`/no-fallback/${slug}`} fallback />}>
|
||||
<Dynamic pathname={`/no-fallback/${slug}`} />
|
||||
</Suspense>
|
||||
)
|
||||
}
|
||||
@@ -19,11 +19,23 @@ const pages = [
|
||||
{ pathname: '/no-suspense/nested/a', dynamic: true },
|
||||
{ pathname: '/no-suspense/nested/b', dynamic: true },
|
||||
{ pathname: '/no-suspense/nested/c', dynamic: true },
|
||||
{ pathname: '/no-fallback/a', dynamic: true },
|
||||
{ pathname: '/no-fallback/b', dynamic: true },
|
||||
{ pathname: '/no-fallback/c', dynamic: true },
|
||||
// TODO: uncomment when we've fixed the 404 case for force-dynamic pages
|
||||
// { pathname: '/dynamic/force-dynamic', dynamic: 'force-dynamic' },
|
||||
{ pathname: '/dynamic/force-static', dynamic: 'force-static' },
|
||||
];
|
||||
|
||||
const cases = {
|
||||
404: [
|
||||
// For routes that do not support fallback (they had `dynamicParams` set to
|
||||
// `false`), we shouldn't see any fallback behavior for routes not defined
|
||||
// in `getStaticParams`.
|
||||
{ pathname: '/no-fallback/non-existent' },
|
||||
],
|
||||
};
|
||||
|
||||
const ctx = {};
|
||||
|
||||
describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
@@ -49,6 +61,14 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
expect(html).toContain('</html>');
|
||||
}
|
||||
);
|
||||
|
||||
it.each(cases[404])(
|
||||
'should return 404 for $pathname',
|
||||
async ({ pathname }) => {
|
||||
const res = await fetch(`${ctx.deploymentUrl}${pathname}`);
|
||||
expect(res.status).toEqual(404);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('prefetch RSC payloads should return', () => {
|
||||
@@ -88,6 +108,16 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
it.each(cases[404])(
|
||||
'should return 404 for $pathname',
|
||||
async ({ pathname }) => {
|
||||
const res = await fetch(`${ctx.deploymentUrl}${pathname}`, {
|
||||
headers: { RSC: 1, 'Next-Router-Prefetch': '1' },
|
||||
});
|
||||
expect(res.status).toEqual(404);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
describe('dynamic RSC payloads should return', () => {
|
||||
@@ -122,5 +152,15 @@ describe(`${__dirname.split(path.sep).pop()}`, () => {
|
||||
expect(text).not.toContain(expected);
|
||||
}
|
||||
});
|
||||
|
||||
it.each(cases[404])(
|
||||
'should return 404 for $pathname',
|
||||
async ({ pathname }) => {
|
||||
const res = await fetch(`${ctx.deploymentUrl}${pathname}`, {
|
||||
headers: { RSC: 1 },
|
||||
});
|
||||
expect(res.status).toEqual(404);
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"next": "canary",
|
||||
"react": "experimental",
|
||||
"react-dom": "experimental"
|
||||
"react": "18.2.0",
|
||||
"react-dom": "18.2.0"
|
||||
},
|
||||
"ignoreNextjsUpdates": true
|
||||
}
|
||||
|
||||
@@ -1,5 +1,32 @@
|
||||
# @vercel/node
|
||||
|
||||
## 3.0.23
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`8ea93839c`](https://github.com/vercel/vercel/commit/8ea93839ccc70816f3ece9d7cfdb857aa7a4b015)]:
|
||||
- @vercel/build-utils@7.9.0
|
||||
|
||||
## 3.0.22
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`908e7837d`](https://github.com/vercel/vercel/commit/908e7837d55bc02e708f402c700e00208415e954), [`5e3656ec1`](https://github.com/vercel/vercel/commit/5e3656ec1b3f0561091636582715ba09ddd8cb2d)]:
|
||||
- @vercel/build-utils@7.8.0
|
||||
|
||||
## 3.0.21
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [[`37b193c84`](https://github.com/vercel/vercel/commit/37b193c845d8b63d93bb0017fbc1a6a35306ef1f)]:
|
||||
- @vercel/build-utils@7.7.1
|
||||
|
||||
## 3.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
## 3.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -8,6 +8,7 @@ module.exports = {
|
||||
{
|
||||
diagnostics: true,
|
||||
isolatedModules: true,
|
||||
tsconfig: 'test/tsconfig.json',
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "3.0.19",
|
||||
"version": "3.0.23",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -24,9 +24,9 @@
|
||||
"@edge-runtime/primitives": "4.1.0",
|
||||
"@edge-runtime/vm": "3.2.0",
|
||||
"@types/node": "14.18.33",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/nft": "0.26.3",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/static-config": "3.0.0",
|
||||
"async-listen": "3.0.0",
|
||||
"cjs-module-lexer": "1.2.3",
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
"@types/jest": "27.4.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/which": "3.0.0",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "11.1.1",
|
||||
"jest-junit": "16.0.0",
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
# @vercel/redwood
|
||||
|
||||
## 2.0.8
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
## 2.0.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/redwood",
|
||||
"version": "2.0.7",
|
||||
"version": "2.0.8",
|
||||
"main": "./dist/index.js",
|
||||
"license": "Apache-2.0",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
@@ -20,7 +20,7 @@
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.3",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/routing-utils": "3.1.0",
|
||||
"semver": "6.3.1"
|
||||
},
|
||||
@@ -28,7 +28,7 @@
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "6.0.0",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"execa": "3.2.0",
|
||||
"fs-extra": "11.1.0",
|
||||
"jest-junit": "16.0.0"
|
||||
|
||||
@@ -1,5 +1,51 @@
|
||||
# @vercel/remix-builder
|
||||
|
||||
## 2.1.3
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Improve hueristics for detecting Remix + Vite ([#11256](https://github.com/vercel/vercel/pull/11256))
|
||||
|
||||
## 2.1.2
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.8.1 ([#11241](https://github.com/vercel/vercel/pull/11241))
|
||||
|
||||
## 2.1.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- [build-utils] increase max memory limit ([#11209](https://github.com/vercel/vercel/pull/11209))
|
||||
|
||||
- Remove usage of `ensureResolvable()` in Vite builds ([#11213](https://github.com/vercel/vercel/pull/11213))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.8.0 ([#11206](https://github.com/vercel/vercel/pull/11206))
|
||||
|
||||
- Ensure the symlink directory exists in `ensureSymlink()` ([#11205](https://github.com/vercel/vercel/pull/11205))
|
||||
|
||||
## 2.1.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Remix Vite plugin support ([#11031](https://github.com/vercel/vercel/pull/11031))
|
||||
|
||||
## 2.0.20
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Don't install Remix fork when not using split configuration ([#11152](https://github.com/vercel/vercel/pull/11152))
|
||||
|
||||
- Add `serverBundles` post-build sanity check and fallback ([#11153](https://github.com/vercel/vercel/pull/11153))
|
||||
|
||||
- bump `@vercel/nft@0.26.4` ([#11155](https://github.com/vercel/vercel/pull/11155))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.6.0 ([#11162](https://github.com/vercel/vercel/pull/11162))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.7.0 ([#11180](https://github.com/vercel/vercel/pull/11180))
|
||||
|
||||
- Update `@remix-run/dev` fork to v2.7.2 ([#11186](https://github.com/vercel/vercel/pull/11186))
|
||||
|
||||
## 2.0.19
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
{
|
||||
"name": "@vercel/remix-builder",
|
||||
"version": "2.0.19",
|
||||
"version": "2.1.3",
|
||||
"license": "Apache-2.0",
|
||||
"main": "./dist/index.js",
|
||||
"homepage": "https://vercel.com/docs",
|
||||
"sideEffects": false,
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
@@ -13,7 +14,7 @@
|
||||
"build": "node ../../utils/build-builder.mjs",
|
||||
"test": "jest --reporters=default --reporters=jest-junit --env node --verbose --bail --runInBand",
|
||||
"test-unit": "pnpm test test/unit.*test.*",
|
||||
"test-e2e": "pnpm test test/integration.test.ts",
|
||||
"test-e2e": "pnpm test test/integration-*.test.ts",
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"files": [
|
||||
@@ -21,16 +22,17 @@
|
||||
"defaults"
|
||||
],
|
||||
"dependencies": {
|
||||
"@vercel/nft": "0.26.3",
|
||||
"@vercel/error-utils": "2.0.2",
|
||||
"@vercel/nft": "0.26.4",
|
||||
"@vercel/static-config": "3.0.0",
|
||||
"ts-morph": "12.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.5.1",
|
||||
"@remix-run/dev": "npm:@vercel/remix-run-dev@2.8.1",
|
||||
"@types/jest": "27.5.1",
|
||||
"@types/node": "14.18.33",
|
||||
"@types/semver": "7.3.13",
|
||||
"@vercel/build-utils": "7.7.0",
|
||||
"@vercel/build-utils": "7.9.0",
|
||||
"jest-junit": "16.0.0",
|
||||
"path-to-regexp": "6.2.1",
|
||||
"semver": "7.5.2"
|
||||
|
||||
814
packages/remix/src/build-legacy.ts
Normal file
814
packages/remix/src/build-legacy.ts
Normal file
@@ -0,0 +1,814 @@
|
||||
import { Project } from 'ts-morph';
|
||||
import { readFileSync, promises as fs, existsSync } from 'fs';
|
||||
import { basename, dirname, extname, join, posix, relative, sep } from 'path';
|
||||
import {
|
||||
debug,
|
||||
download,
|
||||
execCommand,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
getEnvForPackageManager,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
glob,
|
||||
EdgeFunction,
|
||||
NodejsLambda,
|
||||
rename,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
scanParentDirs,
|
||||
} from '@vercel/build-utils';
|
||||
import { getConfig } from '@vercel/static-config';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import type {
|
||||
BuildV2,
|
||||
Files,
|
||||
NodeVersion,
|
||||
PackageJson,
|
||||
BuildResultV2Typical,
|
||||
} from '@vercel/build-utils';
|
||||
import type { ConfigRoute } from '@remix-run/dev/dist/config/routes';
|
||||
import type { BaseFunctionConfig } from '@vercel/static-config';
|
||||
import {
|
||||
calculateRouteConfigHash,
|
||||
findConfig,
|
||||
getPathFromRoute,
|
||||
getRegExpFromPath,
|
||||
getResolvedRouteConfig,
|
||||
isLayoutRoute,
|
||||
ResolvedRouteConfig,
|
||||
ResolvedNodeRouteConfig,
|
||||
ResolvedEdgeRouteConfig,
|
||||
findEntry,
|
||||
chdirAndReadConfig,
|
||||
resolveSemverMinMax,
|
||||
ensureResolvable,
|
||||
isESM,
|
||||
} from './utils';
|
||||
import { patchHydrogenServer } from './hydrogen';
|
||||
|
||||
interface ServerBundle {
|
||||
serverBuildPath: string;
|
||||
routes: string[];
|
||||
}
|
||||
|
||||
const remixBuilderPkg = JSON.parse(
|
||||
readFileSync(join(__dirname, '../package.json'), 'utf8')
|
||||
);
|
||||
const remixRunDevForkVersion =
|
||||
remixBuilderPkg.devDependencies['@remix-run/dev'];
|
||||
|
||||
const DEFAULTS_PATH = join(__dirname, '../defaults');
|
||||
|
||||
const edgeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-edge.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
const nodeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-node.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Minimum supported version of the `@vercel/remix` package
|
||||
const VERCEL_REMIX_MIN_VERSION = '1.10.0';
|
||||
|
||||
// Minimum supported version of the `@vercel/remix-run-dev` forked compiler
|
||||
const REMIX_RUN_DEV_MIN_VERSION = '1.15.0';
|
||||
|
||||
// Maximum version of `@vercel/remix-run-dev` fork
|
||||
// (and also `@vercel/remix` since they get published at the same time)
|
||||
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice(
|
||||
remixRunDevForkVersion.lastIndexOf('@') + 1
|
||||
);
|
||||
|
||||
export const build: BuildV2 = async ({
|
||||
entrypoint,
|
||||
files,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config,
|
||||
meta = {},
|
||||
}) => {
|
||||
const { installCommand, buildCommand } = config;
|
||||
|
||||
await download(files, workPath, meta);
|
||||
|
||||
const mountpoint = dirname(entrypoint);
|
||||
const entrypointFsDirname = join(workPath, mountpoint);
|
||||
|
||||
// Run "Install Command"
|
||||
const nodeVersion = await getNodeVersion(
|
||||
entrypointFsDirname,
|
||||
undefined,
|
||||
config,
|
||||
meta
|
||||
);
|
||||
|
||||
const { cliType, packageJsonPath, lockfileVersion, lockfilePath } =
|
||||
await scanParentDirs(entrypointFsDirname);
|
||||
|
||||
if (!packageJsonPath) {
|
||||
throw new Error('Failed to locate `package.json` file in your project');
|
||||
}
|
||||
|
||||
const [lockfileRaw, pkgRaw] = await Promise.all([
|
||||
lockfilePath ? fs.readFile(lockfilePath) : null,
|
||||
fs.readFile(packageJsonPath, 'utf8'),
|
||||
]);
|
||||
const pkg = JSON.parse(pkgRaw);
|
||||
|
||||
const spawnOpts = getSpawnOptions(meta, nodeVersion);
|
||||
if (!spawnOpts.env) {
|
||||
spawnOpts.env = {};
|
||||
}
|
||||
|
||||
spawnOpts.env = getEnvForPackageManager({
|
||||
cliType,
|
||||
lockfileVersion,
|
||||
nodeVersion,
|
||||
env: spawnOpts.env,
|
||||
});
|
||||
|
||||
if (typeof installCommand === 'string') {
|
||||
if (installCommand.trim()) {
|
||||
console.log(`Running "install" command: \`${installCommand}\`...`);
|
||||
await execCommand(installCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
console.log(`Skipping "install" command...`);
|
||||
}
|
||||
} else {
|
||||
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
||||
}
|
||||
|
||||
const isHydrogen2 = Boolean(
|
||||
pkg.dependencies?.['@shopify/remix-oxygen'] ||
|
||||
pkg.devDependencies?.['@shopify/remix-oxygen']
|
||||
);
|
||||
|
||||
// Determine the version of Remix based on the `@remix-run/dev`
|
||||
// package version.
|
||||
const remixRunDevPath = await ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/dev'
|
||||
);
|
||||
const remixRunDevPkg = JSON.parse(
|
||||
readFileSync(join(remixRunDevPath, 'package.json'), 'utf8')
|
||||
);
|
||||
const remixVersion = remixRunDevPkg.version;
|
||||
|
||||
const remixConfig = await chdirAndReadConfig(
|
||||
remixRunDevPath,
|
||||
entrypointFsDirname,
|
||||
packageJsonPath
|
||||
);
|
||||
const { serverEntryPoint, appDirectory } = remixConfig;
|
||||
const remixRoutes = Object.values(remixConfig.routes);
|
||||
|
||||
let depsModified = false;
|
||||
|
||||
const remixRunDevPkgVersion: string | undefined =
|
||||
pkg.dependencies?.['@remix-run/dev'] ||
|
||||
pkg.devDependencies?.['@remix-run/dev'];
|
||||
|
||||
const serverBundlesMap = new Map<string, ConfigRoute[]>();
|
||||
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
|
||||
|
||||
// Read the `export const config` (if any) for each route
|
||||
const project = new Project();
|
||||
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
|
||||
for (const route of remixRoutes) {
|
||||
const routePath = join(remixConfig.appDirectory, route.file);
|
||||
let staticConfig = getConfig(project, routePath);
|
||||
if (staticConfig && isHydrogen2) {
|
||||
console.log(
|
||||
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
|
||||
);
|
||||
staticConfig = null;
|
||||
}
|
||||
staticConfigsMap.set(route, staticConfig);
|
||||
}
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
const config = getResolvedRouteConfig(
|
||||
route,
|
||||
remixConfig.routes,
|
||||
staticConfigsMap,
|
||||
isHydrogen2
|
||||
);
|
||||
resolvedConfigsMap.set(route, config);
|
||||
}
|
||||
|
||||
// Figure out which routes belong to which server bundles
|
||||
// based on having common static config properties
|
||||
for (const route of remixRoutes) {
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const config = resolvedConfigsMap.get(route);
|
||||
if (!config) {
|
||||
throw new Error(`Expected resolved config for "${route.id}"`);
|
||||
}
|
||||
const hash = calculateRouteConfigHash(config);
|
||||
|
||||
let routesForHash = serverBundlesMap.get(hash);
|
||||
if (!Array.isArray(routesForHash)) {
|
||||
routesForHash = [];
|
||||
serverBundlesMap.set(hash, routesForHash);
|
||||
}
|
||||
|
||||
routesForHash.push(route);
|
||||
}
|
||||
|
||||
let serverBundles: ServerBundle[] = Array.from(
|
||||
serverBundlesMap.entries()
|
||||
).map(([hash, routes]) => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
|
||||
return {
|
||||
serverBuildPath: isHydrogen2
|
||||
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
|
||||
: `${relative(
|
||||
entrypointFsDirname,
|
||||
dirname(remixConfig.serverBuildPath)
|
||||
)}/build-${runtime}-${hash}.js`,
|
||||
routes: routes.map(r => r.id),
|
||||
};
|
||||
});
|
||||
|
||||
// If the project is *not* relying on split configurations, then set
|
||||
// the `serverBuildPath` to the default Remix path, since the forked
|
||||
// Remix compiler will not be used
|
||||
if (!isHydrogen2 && serverBundles.length === 1) {
|
||||
// `serverBuildTarget` and `serverBuildPath` are undefined with
|
||||
// our remix config modifications, so use the default build path
|
||||
serverBundles[0].serverBuildPath = 'build/index.js';
|
||||
}
|
||||
|
||||
// If the project is relying on split configurations, then override
|
||||
// the official `@remix-run/dev` package with the Vercel fork,
|
||||
// which supports the `serverBundles` config
|
||||
if (
|
||||
serverBundles.length > 1 &&
|
||||
!isHydrogen2 &&
|
||||
remixRunDevPkg.name !== '@vercel/remix-run-dev' &&
|
||||
!remixRunDevPkgVersion?.startsWith('https:')
|
||||
) {
|
||||
const remixDevForkVersion = resolveSemverMinMax(
|
||||
REMIX_RUN_DEV_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
// Remove `@remix-run/dev`, add `@vercel/remix-run-dev`
|
||||
if (pkg.devDependencies['@remix-run/dev']) {
|
||||
delete pkg.devDependencies['@remix-run/dev'];
|
||||
pkg.devDependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
} else {
|
||||
delete pkg.dependencies['@remix-run/dev'];
|
||||
pkg.dependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
}
|
||||
depsModified = true;
|
||||
}
|
||||
|
||||
// `app/entry.server.tsx` and `app/entry.client.tsx` are optional in Remix,
|
||||
// so if either of those files are missing then add our own versions.
|
||||
const userEntryServerFile = findEntry(appDirectory, 'entry.server');
|
||||
if (!userEntryServerFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.server.jsx'),
|
||||
join(appDirectory, 'entry.server.jsx')
|
||||
);
|
||||
if (!pkg.dependencies['@vercel/remix']) {
|
||||
// Dependency version resolution logic
|
||||
// 1. Users app is on 1.9.0 -> we install the 1.10.0 (minimum) version of `@vercel/remix`.
|
||||
// 2. Users app is on 1.11.0 (a version greater than 1.10.0 and less than the known max
|
||||
// published version) -> we install the (matching) 1.11.0 version of `@vercel/remix`.
|
||||
// 3. Users app is on something greater than our latest version of the fork -> we install
|
||||
// the latest known published version of `@vercel/remix`.
|
||||
const vercelRemixVersion = resolveSemverMinMax(
|
||||
VERCEL_REMIX_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
pkg.dependencies['@vercel/remix'] = vercelRemixVersion;
|
||||
depsModified = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (depsModified) {
|
||||
await fs.writeFile(packageJsonPath, JSON.stringify(pkg, null, 2) + '\n');
|
||||
|
||||
// Bypass `--frozen-lockfile` enforcement by removing
|
||||
// env vars that are considered to be CI
|
||||
const nonCiEnv = { ...spawnOpts.env };
|
||||
delete nonCiEnv.CI;
|
||||
delete nonCiEnv.VERCEL;
|
||||
delete nonCiEnv.NOW_BUILDER;
|
||||
|
||||
// Purposefully not passing `meta` here to avoid
|
||||
// the optimization that prevents `npm install`
|
||||
// from running a second time
|
||||
await runNpmInstall(
|
||||
entrypointFsDirname,
|
||||
[],
|
||||
{
|
||||
...spawnOpts,
|
||||
env: nonCiEnv,
|
||||
},
|
||||
undefined,
|
||||
nodeVersion
|
||||
);
|
||||
}
|
||||
|
||||
const userEntryClientFile = findEntry(
|
||||
remixConfig.appDirectory,
|
||||
'entry.client'
|
||||
);
|
||||
if (!userEntryClientFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.client.react.jsx'),
|
||||
join(appDirectory, 'entry.client.jsx')
|
||||
);
|
||||
}
|
||||
|
||||
let remixConfigWrapped = false;
|
||||
let serverEntryPointAbs: string | undefined;
|
||||
let originalServerEntryPoint: string | undefined;
|
||||
const remixConfigPath = findConfig(entrypointFsDirname, 'remix.config');
|
||||
const renamedRemixConfigPath = remixConfigPath
|
||||
? `${remixConfigPath}.original${extname(remixConfigPath)}`
|
||||
: undefined;
|
||||
|
||||
try {
|
||||
// We need to patch the `remix.config.js` file to force some values necessary
|
||||
// for a build that works on either Node.js or the Edge runtime
|
||||
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) {
|
||||
await fs.rename(remixConfigPath, renamedRemixConfigPath);
|
||||
|
||||
let patchedConfig: string;
|
||||
// Figure out if the `remix.config` file is using ESM syntax
|
||||
if (isESM(renamedRemixConfigPath)) {
|
||||
patchedConfig = `import config from './${basename(
|
||||
renamedRemixConfigPath
|
||||
)}';
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
export default config;`;
|
||||
} else {
|
||||
patchedConfig = `const config = require('./${basename(
|
||||
renamedRemixConfigPath
|
||||
)}');
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
module.exports = config;`;
|
||||
}
|
||||
await fs.writeFile(remixConfigPath, patchedConfig);
|
||||
remixConfigWrapped = true;
|
||||
}
|
||||
|
||||
// For Hydrogen v2, patch the `server.ts` file to be Vercel-compatible
|
||||
if (isHydrogen2) {
|
||||
if (remixConfig.serverEntryPoint) {
|
||||
serverEntryPointAbs = join(
|
||||
entrypointFsDirname,
|
||||
remixConfig.serverEntryPoint
|
||||
);
|
||||
originalServerEntryPoint = await fs.readFile(
|
||||
serverEntryPointAbs,
|
||||
'utf8'
|
||||
);
|
||||
const patchedServerEntryPoint = patchHydrogenServer(
|
||||
project,
|
||||
serverEntryPointAbs
|
||||
);
|
||||
if (patchedServerEntryPoint) {
|
||||
debug(
|
||||
`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}`
|
||||
);
|
||||
await fs.writeFile(serverEntryPointAbs, patchedServerEntryPoint);
|
||||
}
|
||||
} else {
|
||||
console.log('WARN: No "server" field found in Remix config');
|
||||
}
|
||||
}
|
||||
|
||||
// Make `remix build` output production mode
|
||||
spawnOpts.env.NODE_ENV = 'production';
|
||||
|
||||
// Run "Build Command"
|
||||
if (buildCommand) {
|
||||
debug(`Executing build command "${buildCommand}"`);
|
||||
await execCommand(buildCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
if (hasScript('vercel-build', pkg)) {
|
||||
debug(`Executing "yarn vercel-build"`);
|
||||
await runPackageJsonScript(
|
||||
entrypointFsDirname,
|
||||
'vercel-build',
|
||||
spawnOpts
|
||||
);
|
||||
} else if (hasScript('build', pkg)) {
|
||||
debug(`Executing "yarn build"`);
|
||||
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
|
||||
} else {
|
||||
await execCommand('remix build', {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
const cleanupOps: Promise<void>[] = [];
|
||||
// Clean up our patched `remix.config.js` to be polite
|
||||
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.rename(renamedRemixConfigPath, remixConfigPath)
|
||||
.then(() => debug(`Restored original "${remixConfigPath}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original server entrypoint if it was modified (for Hydrogen v2)
|
||||
if (serverEntryPointAbs && originalServerEntryPoint) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(serverEntryPointAbs, originalServerEntryPoint)
|
||||
.then(() => debug(`Restored original "${serverEntryPointAbs}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original `package.json` file and lockfile
|
||||
if (depsModified) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(packageJsonPath, pkgRaw)
|
||||
.then(() => debug(`Restored original "${packageJsonPath}" file`))
|
||||
);
|
||||
if (lockfilePath && lockfileRaw) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(lockfilePath, lockfileRaw)
|
||||
.then(() => debug(`Restored original "${lockfilePath}" file`))
|
||||
);
|
||||
}
|
||||
}
|
||||
await Promise.all(cleanupOps);
|
||||
}
|
||||
|
||||
// This needs to happen before we run NFT to create the Node/Edge functions
|
||||
await Promise.all([
|
||||
ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/server-runtime'
|
||||
),
|
||||
!isHydrogen2
|
||||
? ensureResolvable(entrypointFsDirname, repoRootPath, '@remix-run/node')
|
||||
: null,
|
||||
]);
|
||||
|
||||
const staticDir = join(entrypointFsDirname, 'public');
|
||||
|
||||
// Do a sanity check to ensure that the server bundles `serverBuildPath` was actually created.
|
||||
// If it was not, then that usually means the Vercel forked Remix compiler was not used and
|
||||
// thus only a singular server bundle was produced.
|
||||
const serverBundlesRespected = existsSync(
|
||||
join(entrypointFsDirname, serverBundles[0].serverBuildPath)
|
||||
);
|
||||
if (!serverBundlesRespected) {
|
||||
console.warn(
|
||||
'WARN: `serverBundles` configuration failed. Falling back to a singular server bundle.'
|
||||
);
|
||||
serverBundles = [
|
||||
{
|
||||
serverBuildPath: 'build/index.js',
|
||||
routes: serverBundles.flatMap(b => b.routes),
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
const [staticFiles, buildAssets, ...functions] = await Promise.all([
|
||||
glob('**', staticDir),
|
||||
glob('**', remixConfig.assetsBuildDirectory),
|
||||
...serverBundles.map(bundle => {
|
||||
const firstRoute = remixConfig.routes[bundle.routes[0]];
|
||||
const config = resolvedConfigsMap.get(firstRoute) ?? {
|
||||
runtime: 'nodejs',
|
||||
};
|
||||
const serverBuildPath = join(entrypointFsDirname, bundle.serverBuildPath);
|
||||
|
||||
if (config.runtime === 'edge') {
|
||||
return createRenderEdgeFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
serverBuildPath,
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}
|
||||
|
||||
return createRenderNodeFunction(
|
||||
nodeVersion,
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
serverBuildPath,
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}),
|
||||
]);
|
||||
|
||||
const transformedBuildAssets = rename(buildAssets, name => {
|
||||
return posix.join('./', remixConfig.publicPath, name);
|
||||
});
|
||||
|
||||
const output: BuildResultV2Typical['output'] = {
|
||||
...staticFiles,
|
||||
...transformedBuildAssets,
|
||||
};
|
||||
const routes: any[] = [
|
||||
{
|
||||
src: `^/${remixConfig.publicPath.replace(/^\/|\/$/g, '')}/(.*)$`,
|
||||
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
];
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
// Layout routes don't get a function / route added
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const { path, rePath } = getPathFromRoute(route, remixConfig.routes);
|
||||
|
||||
// If the route is a pathless layout route (at the root level)
|
||||
// and doesn't have any sub-routes, then a function should not be created.
|
||||
if (!path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const funcIndex = serverBundles.findIndex(bundle => {
|
||||
return bundle.routes.includes(route.id);
|
||||
});
|
||||
const func = functions[funcIndex];
|
||||
|
||||
if (!func) {
|
||||
throw new Error(`Could not determine server bundle for "${route.id}"`);
|
||||
}
|
||||
|
||||
output[path] = func;
|
||||
|
||||
// If this is a dynamic route then add a Vercel route
|
||||
const re = getRegExpFromPath(rePath);
|
||||
if (re) {
|
||||
routes.push({
|
||||
src: re.source,
|
||||
dest: path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add a 404 path for not found pages to be server-side rendered by Remix.
|
||||
// Use an edge function bundle if one was generated, otherwise use Node.js.
|
||||
if (!output['404']) {
|
||||
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex(
|
||||
routes => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime;
|
||||
return runtime === 'edge';
|
||||
}
|
||||
);
|
||||
const func =
|
||||
edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0];
|
||||
output['404'] = func;
|
||||
}
|
||||
routes.push({
|
||||
src: '/(.*)',
|
||||
dest: '/404',
|
||||
});
|
||||
|
||||
return { routes, output, framework: { version: remixVersion } };
|
||||
};
|
||||
|
||||
function hasScript(scriptName: string, pkg: PackageJson | null) {
|
||||
const scripts = (pkg && pkg.scripts) || {};
|
||||
return typeof scripts[scriptName] === 'string';
|
||||
}
|
||||
|
||||
async function createRenderNodeFunction(
|
||||
nodeVersion: NodeVersion,
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedNodeRouteConfig
|
||||
): Promise<NodejsLambda> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-node.mjs` file into the "build" directory
|
||||
const nodeServerSrc = await nodeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
nodeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
|
||||
const fn = new NodejsLambda({
|
||||
files,
|
||||
handler,
|
||||
runtime: nodeVersion.runtime,
|
||||
shouldAddHelpers: false,
|
||||
shouldAddSourcemapSupport: false,
|
||||
operationType: 'SSR',
|
||||
supportsResponseStreaming: true,
|
||||
regions: config.regions,
|
||||
memory: config.memory,
|
||||
maxDuration: config.maxDuration,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function createRenderEdgeFunction(
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedEdgeRouteConfig
|
||||
): Promise<EdgeFunction> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-edge.mjs` file into the "build" directory
|
||||
const edgeServerSrc = await edgeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
edgeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
let remixRunVercelPkgJson: string | undefined;
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
|
||||
async readFile(fsPath) {
|
||||
let source: Buffer | string;
|
||||
try {
|
||||
source = await fs.readFile(fsPath);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (basename(fsPath) === 'package.json') {
|
||||
// For Edge Functions, patch "main" field to prefer "browser" or "module"
|
||||
const pkgJson = JSON.parse(source.toString());
|
||||
|
||||
// When `@remix-run/vercel` is detected, we need to modify the `package.json`
|
||||
// to include the "browser" field so that the proper Edge entrypoint file
|
||||
// is used. This is a temporary stop gap until this PR is merged:
|
||||
// https://github.com/remix-run/remix/pull/5537
|
||||
if (pkgJson.name === '@remix-run/vercel') {
|
||||
pkgJson.browser = 'dist/edge.js';
|
||||
pkgJson.dependencies['@remix-run/server-runtime'] =
|
||||
pkgJson.dependencies['@remix-run/node'];
|
||||
|
||||
if (!remixRunVercelPkgJson) {
|
||||
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + '\n';
|
||||
|
||||
// Copy in the edge entrypoint so that NFT can properly resolve it
|
||||
const vercelEdgeEntrypointPath = join(
|
||||
DEFAULTS_PATH,
|
||||
'vercel-edge-entrypoint.js'
|
||||
);
|
||||
const vercelEdgeEntrypointDest = join(
|
||||
dirname(fsPath),
|
||||
'dist/edge.js'
|
||||
);
|
||||
await fs.copyFile(
|
||||
vercelEdgeEntrypointPath,
|
||||
vercelEdgeEntrypointDest
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const prop of ['browser', 'module']) {
|
||||
const val = pkgJson[prop];
|
||||
if (typeof val === 'string') {
|
||||
pkgJson.main = val;
|
||||
|
||||
// Return the modified `package.json` to nft
|
||||
source = JSON.stringify(pkgJson);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return source;
|
||||
},
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
if (
|
||||
remixRunVercelPkgJson &&
|
||||
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
|
||||
) {
|
||||
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
|
||||
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
|
||||
} else {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
}
|
||||
|
||||
const fn = new EdgeFunction({
|
||||
files,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: handler,
|
||||
regions: config.regions,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function writeEntrypointFile(
|
||||
path: string,
|
||||
data: string,
|
||||
rootDir: string
|
||||
) {
|
||||
try {
|
||||
await fs.writeFile(path, data);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT') {
|
||||
throw new Error(
|
||||
`The "${relative(
|
||||
rootDir,
|
||||
dirname(path)
|
||||
)}" directory does not exist. Please contact support at https://vercel.com/help.`
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
452
packages/remix/src/build-vite.ts
Normal file
452
packages/remix/src/build-vite.ts
Normal file
@@ -0,0 +1,452 @@
|
||||
import { readFileSync, promises as fs, statSync, existsSync } from 'fs';
|
||||
import { basename, dirname, join, relative, sep } from 'path';
|
||||
import { isErrnoException } from '@vercel/error-utils';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import {
|
||||
BuildResultV2Typical,
|
||||
debug,
|
||||
execCommand,
|
||||
getEnvForPackageManager,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
glob,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
scanParentDirs,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
EdgeFunction,
|
||||
NodejsLambda,
|
||||
} from '@vercel/build-utils';
|
||||
import {
|
||||
getPathFromRoute,
|
||||
getRegExpFromPath,
|
||||
getRemixVersion,
|
||||
hasScript,
|
||||
logNftWarnings,
|
||||
} from './utils';
|
||||
import type { BuildV2, Files, NodeVersion } from '@vercel/build-utils';
|
||||
|
||||
const DEFAULTS_PATH = join(__dirname, '../defaults');
|
||||
|
||||
const edgeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-edge.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
const nodeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-node.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
interface RemixBuildResult {
|
||||
buildManifest: {
|
||||
serverBundles?: Record<
|
||||
string,
|
||||
{ id: string; file: string; config: Record<string, unknown> }
|
||||
>;
|
||||
routeIdToServerBundleId?: Record<string, string>;
|
||||
routes: Record<
|
||||
string,
|
||||
{
|
||||
id: string;
|
||||
file: string;
|
||||
path?: string;
|
||||
index?: boolean;
|
||||
parentId?: string;
|
||||
config: Record<string, unknown>;
|
||||
}
|
||||
>;
|
||||
};
|
||||
remixConfig: {
|
||||
buildDirectory: string;
|
||||
};
|
||||
viteConfig?: {
|
||||
build?: {
|
||||
assetsDir: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export const build: BuildV2 = async ({
|
||||
entrypoint,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config,
|
||||
meta = {},
|
||||
}) => {
|
||||
const { installCommand, buildCommand } = config;
|
||||
const mountpoint = dirname(entrypoint);
|
||||
const entrypointFsDirname = join(workPath, mountpoint);
|
||||
|
||||
// Run "Install Command"
|
||||
const nodeVersion = await getNodeVersion(
|
||||
entrypointFsDirname,
|
||||
undefined,
|
||||
config,
|
||||
meta
|
||||
);
|
||||
|
||||
const { cliType, lockfileVersion, packageJson } = await scanParentDirs(
|
||||
entrypointFsDirname,
|
||||
true
|
||||
);
|
||||
|
||||
const spawnOpts = getSpawnOptions(meta, nodeVersion);
|
||||
if (!spawnOpts.env) {
|
||||
spawnOpts.env = {};
|
||||
}
|
||||
|
||||
spawnOpts.env = getEnvForPackageManager({
|
||||
cliType,
|
||||
lockfileVersion,
|
||||
nodeVersion,
|
||||
env: spawnOpts.env,
|
||||
});
|
||||
|
||||
if (typeof installCommand === 'string') {
|
||||
if (installCommand.trim()) {
|
||||
console.log(`Running "install" command: \`${installCommand}\`...`);
|
||||
await execCommand(installCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
console.log(`Skipping "install" command...`);
|
||||
}
|
||||
} else {
|
||||
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
||||
}
|
||||
|
||||
// Determine the version of Remix based on the `@remix-run/dev`
|
||||
// package version.
|
||||
const remixVersion = await getRemixVersion(entrypointFsDirname, repoRootPath);
|
||||
|
||||
// Run "Build Command"
|
||||
if (buildCommand) {
|
||||
debug(`Executing build command "${buildCommand}"`);
|
||||
await execCommand(buildCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
if (hasScript('vercel-build', packageJson)) {
|
||||
debug(`Executing "vercel-build" script`);
|
||||
await runPackageJsonScript(
|
||||
entrypointFsDirname,
|
||||
'vercel-build',
|
||||
spawnOpts
|
||||
);
|
||||
} else if (hasScript('build', packageJson)) {
|
||||
debug(`Executing "build" script`);
|
||||
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
|
||||
} else {
|
||||
await execCommand('remix build', {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const remixBuildResultPath = join(
|
||||
entrypointFsDirname,
|
||||
'.vercel/remix-build-result.json'
|
||||
);
|
||||
let remixBuildResult: RemixBuildResult | undefined;
|
||||
try {
|
||||
const remixBuildResultContents = readFileSync(remixBuildResultPath, 'utf8');
|
||||
remixBuildResult = JSON.parse(remixBuildResultContents);
|
||||
} catch (err: unknown) {
|
||||
if (!isErrnoException(err) || err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
// The project has not configured the `vercelPreset()`
|
||||
// Preset in the "vite.config" file. Attempt to check
|
||||
// for the default build output directory.
|
||||
const buildDirectory = join(entrypointFsDirname, 'build');
|
||||
if (statSync(buildDirectory).isDirectory()) {
|
||||
console.warn('WARN: The `vercelPreset()` Preset was not detected.');
|
||||
remixBuildResult = {
|
||||
buildManifest: {
|
||||
routes: {
|
||||
root: {
|
||||
path: '',
|
||||
id: 'root',
|
||||
file: 'app/root.tsx',
|
||||
config: {},
|
||||
},
|
||||
'routes/_index': {
|
||||
file: 'app/routes/_index.tsx',
|
||||
id: 'routes/_index',
|
||||
index: true,
|
||||
parentId: 'root',
|
||||
config: {},
|
||||
},
|
||||
},
|
||||
},
|
||||
remixConfig: {
|
||||
buildDirectory,
|
||||
},
|
||||
};
|
||||
// Detect if a server build exists (won't be the case when `ssr: false`)
|
||||
const serverPath = 'build/server/index.js';
|
||||
if (existsSync(join(entrypointFsDirname, serverPath))) {
|
||||
remixBuildResult.buildManifest.routeIdToServerBundleId = {
|
||||
'routes/_index': '',
|
||||
};
|
||||
remixBuildResult.buildManifest.serverBundles = {
|
||||
'': {
|
||||
id: '',
|
||||
file: serverPath,
|
||||
config: {},
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!remixBuildResult) {
|
||||
throw new Error(
|
||||
'Could not determine build output directory. Please configure the `vercelPreset()` Preset from the `@vercel/remix` npm package'
|
||||
);
|
||||
}
|
||||
|
||||
const { buildManifest, remixConfig, viteConfig } = remixBuildResult;
|
||||
|
||||
const staticDir = join(remixConfig.buildDirectory, 'client');
|
||||
const serverBundles = Object.values(buildManifest.serverBundles ?? {});
|
||||
|
||||
const [staticFiles, ...functions] = await Promise.all([
|
||||
glob('**', staticDir),
|
||||
...serverBundles.map(bundle => {
|
||||
if (bundle.config.runtime === 'edge') {
|
||||
return createRenderEdgeFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.file),
|
||||
undefined,
|
||||
remixVersion,
|
||||
bundle.config
|
||||
);
|
||||
}
|
||||
|
||||
return createRenderNodeFunction(
|
||||
nodeVersion,
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.file),
|
||||
undefined,
|
||||
remixVersion,
|
||||
bundle.config
|
||||
);
|
||||
}),
|
||||
]);
|
||||
|
||||
const functionsMap = new Map<string, EdgeFunction | NodejsLambda>();
|
||||
for (let i = 0; i < serverBundles.length; i++) {
|
||||
functionsMap.set(serverBundles[i].id, functions[i]);
|
||||
}
|
||||
|
||||
const output: BuildResultV2Typical['output'] = staticFiles;
|
||||
const assetsDir = viteConfig?.build?.assetsDir || 'assets';
|
||||
const routes: any[] = [
|
||||
{
|
||||
src: `^/${assetsDir}/(.*)$`,
|
||||
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
];
|
||||
|
||||
for (const [id, functionId] of Object.entries(
|
||||
buildManifest.routeIdToServerBundleId ?? {}
|
||||
)) {
|
||||
const route = buildManifest.routes[id];
|
||||
const { path, rePath } = getPathFromRoute(route, buildManifest.routes);
|
||||
|
||||
// If the route is a pathless layout route (at the root level)
|
||||
// and doesn't have any sub-routes, then a function should not be created.
|
||||
if (!path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const func = functionsMap.get(functionId);
|
||||
if (!func) {
|
||||
throw new Error(`Could not determine server bundle for "${id}"`);
|
||||
}
|
||||
|
||||
output[path] = func;
|
||||
|
||||
// If this is a dynamic route then add a Vercel route
|
||||
const re = getRegExpFromPath(rePath);
|
||||
if (re) {
|
||||
routes.push({
|
||||
src: re.source,
|
||||
dest: path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// For the 404 case, invoke the Function (or serve the static file
|
||||
// for `ssr: false` mode) at the `/` path. Remix will serve its 404 route.
|
||||
routes.push({
|
||||
src: '/(.*)',
|
||||
dest: '/',
|
||||
});
|
||||
|
||||
return { routes, output, framework: { version: remixVersion } };
|
||||
};
|
||||
|
||||
async function createRenderNodeFunction(
|
||||
nodeVersion: NodeVersion,
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: /*TODO: ResolvedNodeRouteConfig*/ any
|
||||
): Promise<NodejsLambda> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-node.mjs` file into the "build" directory
|
||||
const nodeServerSrc = await nodeServerSrcPromise;
|
||||
await fs.writeFile(
|
||||
handlerPath,
|
||||
nodeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
});
|
||||
|
||||
logNftWarnings(trace.warnings, '@remix-run/node');
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
|
||||
const fn = new NodejsLambda({
|
||||
files,
|
||||
handler,
|
||||
runtime: nodeVersion.runtime,
|
||||
shouldAddHelpers: false,
|
||||
shouldAddSourcemapSupport: false,
|
||||
operationType: 'SSR',
|
||||
supportsResponseStreaming: true,
|
||||
regions: config.regions,
|
||||
memory: config.memory,
|
||||
maxDuration: config.maxDuration,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function createRenderEdgeFunction(
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: /* TODO: ResolvedEdgeRouteConfig*/ any
|
||||
): Promise<EdgeFunction> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-edge.mjs` file into the "build" directory
|
||||
const edgeServerSrc = await edgeServerSrcPromise;
|
||||
await fs.writeFile(
|
||||
handlerPath,
|
||||
edgeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
let remixRunVercelPkgJson: string | undefined;
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
|
||||
async readFile(fsPath) {
|
||||
let source: Buffer | string;
|
||||
try {
|
||||
source = await fs.readFile(fsPath);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (basename(fsPath) === 'package.json') {
|
||||
// For Edge Functions, patch "main" field to prefer "browser" or "module"
|
||||
const pkgJson = JSON.parse(source.toString());
|
||||
|
||||
for (const prop of ['browser', 'module']) {
|
||||
const val = pkgJson[prop];
|
||||
if (typeof val === 'string') {
|
||||
pkgJson.main = val;
|
||||
|
||||
// Return the modified `package.json` to nft
|
||||
source = JSON.stringify(pkgJson);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return source;
|
||||
},
|
||||
});
|
||||
|
||||
logNftWarnings(trace.warnings, '@remix-run/server-runtime');
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
if (
|
||||
remixRunVercelPkgJson &&
|
||||
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
|
||||
) {
|
||||
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
|
||||
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
|
||||
} else {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
}
|
||||
|
||||
const fn = new EdgeFunction({
|
||||
files,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: handler,
|
||||
regions: config.regions,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
@@ -1,786 +1,8 @@
|
||||
import { Project } from 'ts-morph';
|
||||
import { readFileSync, promises as fs } from 'fs';
|
||||
import { basename, dirname, extname, join, posix, relative, sep } from 'path';
|
||||
import {
|
||||
debug,
|
||||
download,
|
||||
execCommand,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
getEnvForPackageManager,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
glob,
|
||||
EdgeFunction,
|
||||
NodejsLambda,
|
||||
rename,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
scanParentDirs,
|
||||
} from '@vercel/build-utils';
|
||||
import { getConfig } from '@vercel/static-config';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import type {
|
||||
BuildV2,
|
||||
Files,
|
||||
NodeVersion,
|
||||
PackageJson,
|
||||
BuildResultV2Typical,
|
||||
} from '@vercel/build-utils';
|
||||
import type { ConfigRoute } from '@remix-run/dev/dist/config/routes';
|
||||
import type { BaseFunctionConfig } from '@vercel/static-config';
|
||||
import {
|
||||
calculateRouteConfigHash,
|
||||
findConfig,
|
||||
getPathFromRoute,
|
||||
getRegExpFromPath,
|
||||
getResolvedRouteConfig,
|
||||
isLayoutRoute,
|
||||
ResolvedRouteConfig,
|
||||
ResolvedNodeRouteConfig,
|
||||
ResolvedEdgeRouteConfig,
|
||||
findEntry,
|
||||
chdirAndReadConfig,
|
||||
resolveSemverMinMax,
|
||||
ensureResolvable,
|
||||
isESM,
|
||||
} from './utils';
|
||||
import { patchHydrogenServer } from './hydrogen';
|
||||
import { build as buildVite } from './build-vite';
|
||||
import { build as buildLegacy } from './build-legacy';
|
||||
import { isVite } from './utils';
|
||||
import type { BuildV2 } from '@vercel/build-utils';
|
||||
|
||||
interface ServerBundle {
|
||||
serverBuildPath: string;
|
||||
routes: string[];
|
||||
}
|
||||
|
||||
const remixBuilderPkg = JSON.parse(
|
||||
readFileSync(join(__dirname, '../package.json'), 'utf8')
|
||||
);
|
||||
const remixRunDevForkVersion =
|
||||
remixBuilderPkg.devDependencies['@remix-run/dev'];
|
||||
|
||||
const DEFAULTS_PATH = join(__dirname, '../defaults');
|
||||
|
||||
const edgeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-edge.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
const nodeServerSrcPromise = fs.readFile(
|
||||
join(DEFAULTS_PATH, 'server-node.mjs'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
// Minimum supported version of the `@vercel/remix` package
|
||||
const VERCEL_REMIX_MIN_VERSION = '1.10.0';
|
||||
|
||||
// Minimum supported version of the `@vercel/remix-run-dev` forked compiler
|
||||
const REMIX_RUN_DEV_MIN_VERSION = '1.15.0';
|
||||
|
||||
// Maximum version of `@vercel/remix-run-dev` fork
|
||||
// (and also `@vercel/remix` since they get published at the same time)
|
||||
const REMIX_RUN_DEV_MAX_VERSION = remixRunDevForkVersion.slice(
|
||||
remixRunDevForkVersion.lastIndexOf('@') + 1
|
||||
);
|
||||
|
||||
export const build: BuildV2 = async ({
|
||||
entrypoint,
|
||||
files,
|
||||
workPath,
|
||||
repoRootPath,
|
||||
config,
|
||||
meta = {},
|
||||
}) => {
|
||||
const { installCommand, buildCommand } = config;
|
||||
|
||||
await download(files, workPath, meta);
|
||||
|
||||
const mountpoint = dirname(entrypoint);
|
||||
const entrypointFsDirname = join(workPath, mountpoint);
|
||||
|
||||
// Run "Install Command"
|
||||
const nodeVersion = await getNodeVersion(
|
||||
entrypointFsDirname,
|
||||
undefined,
|
||||
config,
|
||||
meta
|
||||
);
|
||||
|
||||
const { cliType, packageJsonPath, lockfileVersion, lockfilePath } =
|
||||
await scanParentDirs(entrypointFsDirname);
|
||||
|
||||
if (!packageJsonPath) {
|
||||
throw new Error('Failed to locate `package.json` file in your project');
|
||||
}
|
||||
|
||||
const [lockfileRaw, pkgRaw] = await Promise.all([
|
||||
lockfilePath ? fs.readFile(lockfilePath) : null,
|
||||
fs.readFile(packageJsonPath, 'utf8'),
|
||||
]);
|
||||
const pkg = JSON.parse(pkgRaw);
|
||||
|
||||
const spawnOpts = getSpawnOptions(meta, nodeVersion);
|
||||
if (!spawnOpts.env) {
|
||||
spawnOpts.env = {};
|
||||
}
|
||||
|
||||
spawnOpts.env = getEnvForPackageManager({
|
||||
cliType,
|
||||
lockfileVersion,
|
||||
nodeVersion,
|
||||
env: spawnOpts.env,
|
||||
});
|
||||
|
||||
if (typeof installCommand === 'string') {
|
||||
if (installCommand.trim()) {
|
||||
console.log(`Running "install" command: \`${installCommand}\`...`);
|
||||
await execCommand(installCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
console.log(`Skipping "install" command...`);
|
||||
}
|
||||
} else {
|
||||
await runNpmInstall(entrypointFsDirname, [], spawnOpts, meta, nodeVersion);
|
||||
}
|
||||
|
||||
const isHydrogen2 = Boolean(
|
||||
pkg.dependencies?.['@shopify/remix-oxygen'] ||
|
||||
pkg.devDependencies?.['@shopify/remix-oxygen']
|
||||
);
|
||||
|
||||
// Determine the version of Remix based on the `@remix-run/dev`
|
||||
// package version.
|
||||
const remixRunDevPath = await ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/dev'
|
||||
);
|
||||
const remixRunDevPkg = JSON.parse(
|
||||
readFileSync(join(remixRunDevPath, 'package.json'), 'utf8')
|
||||
);
|
||||
const remixVersion = remixRunDevPkg.version;
|
||||
|
||||
const remixConfig = await chdirAndReadConfig(
|
||||
remixRunDevPath,
|
||||
entrypointFsDirname,
|
||||
packageJsonPath
|
||||
);
|
||||
const { serverEntryPoint, appDirectory } = remixConfig;
|
||||
const remixRoutes = Object.values(remixConfig.routes);
|
||||
|
||||
let depsModified = false;
|
||||
|
||||
const remixRunDevPkgVersion: string | undefined =
|
||||
pkg.dependencies?.['@remix-run/dev'] ||
|
||||
pkg.devDependencies?.['@remix-run/dev'];
|
||||
|
||||
// Override the official `@remix-run/dev` package with the
|
||||
// Vercel fork, which supports the `serverBundles` config
|
||||
if (
|
||||
!isHydrogen2 &&
|
||||
remixRunDevPkg.name !== '@vercel/remix-run-dev' &&
|
||||
!remixRunDevPkgVersion?.startsWith('https:')
|
||||
) {
|
||||
const remixDevForkVersion = resolveSemverMinMax(
|
||||
REMIX_RUN_DEV_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
// Remove `@remix-run/dev`, add `@vercel/remix-run-dev`
|
||||
if (pkg.devDependencies['@remix-run/dev']) {
|
||||
delete pkg.devDependencies['@remix-run/dev'];
|
||||
pkg.devDependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
} else {
|
||||
delete pkg.dependencies['@remix-run/dev'];
|
||||
pkg.dependencies['@vercel/remix-run-dev'] = remixDevForkVersion;
|
||||
}
|
||||
depsModified = true;
|
||||
}
|
||||
|
||||
// `app/entry.server.tsx` and `app/entry.client.tsx` are optional in Remix,
|
||||
// so if either of those files are missing then add our own versions.
|
||||
const userEntryServerFile = findEntry(appDirectory, 'entry.server');
|
||||
if (!userEntryServerFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.server.jsx'),
|
||||
join(appDirectory, 'entry.server.jsx')
|
||||
);
|
||||
if (!pkg.dependencies['@vercel/remix']) {
|
||||
// Dependency version resolution logic
|
||||
// 1. Users app is on 1.9.0 -> we install the 1.10.0 (minimum) version of `@vercel/remix`.
|
||||
// 2. Users app is on 1.11.0 (a version greater than 1.10.0 and less than the known max
|
||||
// published version) -> we install the (matching) 1.11.0 version of `@vercel/remix`.
|
||||
// 3. Users app is on something greater than our latest version of the fork -> we install
|
||||
// the latest known published version of `@vercel/remix`.
|
||||
const vercelRemixVersion = resolveSemverMinMax(
|
||||
VERCEL_REMIX_MIN_VERSION,
|
||||
REMIX_RUN_DEV_MAX_VERSION,
|
||||
remixVersion
|
||||
);
|
||||
pkg.dependencies['@vercel/remix'] = vercelRemixVersion;
|
||||
depsModified = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (depsModified) {
|
||||
await fs.writeFile(packageJsonPath, JSON.stringify(pkg, null, 2) + '\n');
|
||||
|
||||
// Bypass `--frozen-lockfile` enforcement by removing
|
||||
// env vars that are considered to be CI
|
||||
const nonCiEnv = { ...spawnOpts.env };
|
||||
delete nonCiEnv.CI;
|
||||
delete nonCiEnv.VERCEL;
|
||||
delete nonCiEnv.NOW_BUILDER;
|
||||
|
||||
// Purposefully not passing `meta` here to avoid
|
||||
// the optimization that prevents `npm install`
|
||||
// from running a second time
|
||||
await runNpmInstall(
|
||||
entrypointFsDirname,
|
||||
[],
|
||||
{
|
||||
...spawnOpts,
|
||||
env: nonCiEnv,
|
||||
},
|
||||
undefined,
|
||||
nodeVersion
|
||||
);
|
||||
}
|
||||
|
||||
const userEntryClientFile = findEntry(
|
||||
remixConfig.appDirectory,
|
||||
'entry.client'
|
||||
);
|
||||
if (!userEntryClientFile) {
|
||||
await fs.copyFile(
|
||||
join(DEFAULTS_PATH, 'entry.client.react.jsx'),
|
||||
join(appDirectory, 'entry.client.jsx')
|
||||
);
|
||||
}
|
||||
|
||||
let remixConfigWrapped = false;
|
||||
let serverEntryPointAbs: string | undefined;
|
||||
let originalServerEntryPoint: string | undefined;
|
||||
const remixConfigPath = findConfig(entrypointFsDirname, 'remix.config');
|
||||
const renamedRemixConfigPath = remixConfigPath
|
||||
? `${remixConfigPath}.original${extname(remixConfigPath)}`
|
||||
: undefined;
|
||||
|
||||
// These get populated inside the try/catch below
|
||||
let serverBundles: ServerBundle[];
|
||||
const serverBundlesMap = new Map<string, ConfigRoute[]>();
|
||||
const resolvedConfigsMap = new Map<ConfigRoute, ResolvedRouteConfig>();
|
||||
|
||||
try {
|
||||
// Read the `export const config` (if any) for each route
|
||||
const project = new Project();
|
||||
const staticConfigsMap = new Map<ConfigRoute, BaseFunctionConfig | null>();
|
||||
for (const route of remixRoutes) {
|
||||
const routePath = join(remixConfig.appDirectory, route.file);
|
||||
let staticConfig = getConfig(project, routePath);
|
||||
if (staticConfig && isHydrogen2) {
|
||||
console.log(
|
||||
'WARN: `export const config` is currently not supported for Hydrogen v2 apps'
|
||||
);
|
||||
staticConfig = null;
|
||||
}
|
||||
staticConfigsMap.set(route, staticConfig);
|
||||
}
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
const config = getResolvedRouteConfig(
|
||||
route,
|
||||
remixConfig.routes,
|
||||
staticConfigsMap,
|
||||
isHydrogen2
|
||||
);
|
||||
resolvedConfigsMap.set(route, config);
|
||||
}
|
||||
|
||||
// Figure out which routes belong to which server bundles
|
||||
// based on having common static config properties
|
||||
for (const route of remixRoutes) {
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const config = resolvedConfigsMap.get(route);
|
||||
if (!config) {
|
||||
throw new Error(`Expected resolved config for "${route.id}"`);
|
||||
}
|
||||
const hash = calculateRouteConfigHash(config);
|
||||
|
||||
let routesForHash = serverBundlesMap.get(hash);
|
||||
if (!Array.isArray(routesForHash)) {
|
||||
routesForHash = [];
|
||||
serverBundlesMap.set(hash, routesForHash);
|
||||
}
|
||||
|
||||
routesForHash.push(route);
|
||||
}
|
||||
|
||||
serverBundles = Array.from(serverBundlesMap.entries()).map(
|
||||
([hash, routes]) => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime ?? 'nodejs';
|
||||
return {
|
||||
serverBuildPath: isHydrogen2
|
||||
? relative(entrypointFsDirname, remixConfig.serverBuildPath)
|
||||
: `${relative(
|
||||
entrypointFsDirname,
|
||||
dirname(remixConfig.serverBuildPath)
|
||||
)}/build-${runtime}-${hash}.js`,
|
||||
routes: routes.map(r => r.id),
|
||||
};
|
||||
}
|
||||
);
|
||||
|
||||
// We need to patch the `remix.config.js` file to force some values necessary
|
||||
// for a build that works on either Node.js or the Edge runtime
|
||||
if (!isHydrogen2 && remixConfigPath && renamedRemixConfigPath) {
|
||||
await fs.rename(remixConfigPath, renamedRemixConfigPath);
|
||||
|
||||
let patchedConfig: string;
|
||||
// Figure out if the `remix.config` file is using ESM syntax
|
||||
if (isESM(renamedRemixConfigPath)) {
|
||||
patchedConfig = `import config from './${basename(
|
||||
renamedRemixConfigPath
|
||||
)}';
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
export default config;`;
|
||||
} else {
|
||||
patchedConfig = `const config = require('./${basename(
|
||||
renamedRemixConfigPath
|
||||
)}');
|
||||
config.serverBuildTarget = undefined;
|
||||
config.serverModuleFormat = '${pkg.type === 'module' ? 'esm' : 'cjs'}';
|
||||
config.serverPlatform = 'node';
|
||||
config.serverBuildPath = undefined;
|
||||
config.serverBundles = ${JSON.stringify(serverBundles)};
|
||||
module.exports = config;`;
|
||||
}
|
||||
await fs.writeFile(remixConfigPath, patchedConfig);
|
||||
remixConfigWrapped = true;
|
||||
}
|
||||
|
||||
// For Hydrogen v2, patch the `server.ts` file to be Vercel-compatible
|
||||
if (isHydrogen2) {
|
||||
if (remixConfig.serverEntryPoint) {
|
||||
serverEntryPointAbs = join(
|
||||
entrypointFsDirname,
|
||||
remixConfig.serverEntryPoint
|
||||
);
|
||||
originalServerEntryPoint = await fs.readFile(
|
||||
serverEntryPointAbs,
|
||||
'utf8'
|
||||
);
|
||||
const patchedServerEntryPoint = patchHydrogenServer(
|
||||
project,
|
||||
serverEntryPointAbs
|
||||
);
|
||||
if (patchedServerEntryPoint) {
|
||||
debug(
|
||||
`Patched Hydrogen server file: ${remixConfig.serverEntryPoint}`
|
||||
);
|
||||
await fs.writeFile(serverEntryPointAbs, patchedServerEntryPoint);
|
||||
}
|
||||
} else {
|
||||
console.log('WARN: No "server" field found in Remix config');
|
||||
}
|
||||
}
|
||||
|
||||
// Make `remix build` output production mode
|
||||
spawnOpts.env.NODE_ENV = 'production';
|
||||
|
||||
// Run "Build Command"
|
||||
if (buildCommand) {
|
||||
debug(`Executing build command "${buildCommand}"`);
|
||||
await execCommand(buildCommand, {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
} else {
|
||||
if (hasScript('vercel-build', pkg)) {
|
||||
debug(`Executing "yarn vercel-build"`);
|
||||
await runPackageJsonScript(
|
||||
entrypointFsDirname,
|
||||
'vercel-build',
|
||||
spawnOpts
|
||||
);
|
||||
} else if (hasScript('build', pkg)) {
|
||||
debug(`Executing "yarn build"`);
|
||||
await runPackageJsonScript(entrypointFsDirname, 'build', spawnOpts);
|
||||
} else {
|
||||
await execCommand('remix build', {
|
||||
...spawnOpts,
|
||||
cwd: entrypointFsDirname,
|
||||
});
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
const cleanupOps: Promise<void>[] = [];
|
||||
// Clean up our patched `remix.config.js` to be polite
|
||||
if (remixConfigWrapped && remixConfigPath && renamedRemixConfigPath) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.rename(renamedRemixConfigPath, remixConfigPath)
|
||||
.then(() => debug(`Restored original "${remixConfigPath}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original server entrypoint if it was modified (for Hydrogen v2)
|
||||
if (serverEntryPointAbs && originalServerEntryPoint) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(serverEntryPointAbs, originalServerEntryPoint)
|
||||
.then(() => debug(`Restored original "${serverEntryPointAbs}" file`))
|
||||
);
|
||||
}
|
||||
// Restore original `package.json` file and lockfile
|
||||
if (depsModified) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(packageJsonPath, pkgRaw)
|
||||
.then(() => debug(`Restored original "${packageJsonPath}" file`))
|
||||
);
|
||||
if (lockfilePath && lockfileRaw) {
|
||||
cleanupOps.push(
|
||||
fs
|
||||
.writeFile(lockfilePath, lockfileRaw)
|
||||
.then(() => debug(`Restored original "${lockfilePath}" file`))
|
||||
);
|
||||
}
|
||||
}
|
||||
await Promise.all(cleanupOps);
|
||||
}
|
||||
|
||||
// This needs to happen before we run NFT to create the Node/Edge functions
|
||||
await Promise.all([
|
||||
ensureResolvable(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
'@remix-run/server-runtime'
|
||||
),
|
||||
!isHydrogen2
|
||||
? ensureResolvable(entrypointFsDirname, repoRootPath, '@remix-run/node')
|
||||
: null,
|
||||
]);
|
||||
|
||||
const staticDir = join(entrypointFsDirname, 'public');
|
||||
|
||||
const [staticFiles, buildAssets, ...functions] = await Promise.all([
|
||||
glob('**', staticDir),
|
||||
glob('**', remixConfig.assetsBuildDirectory),
|
||||
...serverBundles.map(bundle => {
|
||||
const firstRoute = remixConfig.routes[bundle.routes[0]];
|
||||
const config = resolvedConfigsMap.get(firstRoute) ?? {
|
||||
runtime: 'nodejs',
|
||||
};
|
||||
|
||||
if (config.runtime === 'edge') {
|
||||
return createRenderEdgeFunction(
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.serverBuildPath),
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}
|
||||
|
||||
return createRenderNodeFunction(
|
||||
nodeVersion,
|
||||
entrypointFsDirname,
|
||||
repoRootPath,
|
||||
join(entrypointFsDirname, bundle.serverBuildPath),
|
||||
serverEntryPoint,
|
||||
remixVersion,
|
||||
config
|
||||
);
|
||||
}),
|
||||
]);
|
||||
|
||||
const transformedBuildAssets = rename(buildAssets, name => {
|
||||
return posix.join('./', remixConfig.publicPath, name);
|
||||
});
|
||||
|
||||
const output: BuildResultV2Typical['output'] = {
|
||||
...staticFiles,
|
||||
...transformedBuildAssets,
|
||||
};
|
||||
const routes: any[] = [
|
||||
{
|
||||
src: `^/${remixConfig.publicPath.replace(/^\/|\/$/g, '')}/(.*)$`,
|
||||
headers: { 'cache-control': 'public, max-age=31536000, immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
];
|
||||
|
||||
for (const route of remixRoutes) {
|
||||
// Layout routes don't get a function / route added
|
||||
if (isLayoutRoute(route.id, remixRoutes)) continue;
|
||||
|
||||
const { path, rePath } = getPathFromRoute(route, remixConfig.routes);
|
||||
|
||||
// If the route is a pathless layout route (at the root level)
|
||||
// and doesn't have any sub-routes, then a function should not be created.
|
||||
if (!path) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const funcIndex = serverBundles.findIndex(bundle => {
|
||||
return bundle.routes.includes(route.id);
|
||||
});
|
||||
const func = functions[funcIndex];
|
||||
|
||||
if (!func) {
|
||||
throw new Error(`Could not determine server bundle for "${route.id}"`);
|
||||
}
|
||||
|
||||
output[path] = func;
|
||||
|
||||
// If this is a dynamic route then add a Vercel route
|
||||
const re = getRegExpFromPath(rePath);
|
||||
if (re) {
|
||||
routes.push({
|
||||
src: re.source,
|
||||
dest: path,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Add a 404 path for not found pages to be server-side rendered by Remix.
|
||||
// Use an edge function bundle if one was generated, otherwise use Node.js.
|
||||
if (!output['404']) {
|
||||
const edgeFunctionIndex = Array.from(serverBundlesMap.values()).findIndex(
|
||||
routes => {
|
||||
const runtime = resolvedConfigsMap.get(routes[0])?.runtime;
|
||||
return runtime === 'edge';
|
||||
}
|
||||
);
|
||||
const func =
|
||||
edgeFunctionIndex !== -1 ? functions[edgeFunctionIndex] : functions[0];
|
||||
output['404'] = func;
|
||||
}
|
||||
routes.push({
|
||||
src: '/(.*)',
|
||||
dest: '/404',
|
||||
});
|
||||
|
||||
return { routes, output, framework: { version: remixVersion } };
|
||||
export const build: BuildV2 = opts => {
|
||||
return isVite(opts.workPath) ? buildVite(opts) : buildLegacy(opts);
|
||||
};
|
||||
|
||||
function hasScript(scriptName: string, pkg: PackageJson | null) {
|
||||
const scripts = (pkg && pkg.scripts) || {};
|
||||
return typeof scripts[scriptName] === 'string';
|
||||
}
|
||||
|
||||
async function createRenderNodeFunction(
|
||||
nodeVersion: NodeVersion,
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedNodeRouteConfig
|
||||
): Promise<NodejsLambda> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-node.mjs` file into the "build" directory
|
||||
const nodeServerSrc = await nodeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
nodeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
|
||||
const fn = new NodejsLambda({
|
||||
files,
|
||||
handler,
|
||||
runtime: nodeVersion.runtime,
|
||||
shouldAddHelpers: false,
|
||||
shouldAddSourcemapSupport: false,
|
||||
operationType: 'SSR',
|
||||
supportsResponseStreaming: true,
|
||||
regions: config.regions,
|
||||
memory: config.memory,
|
||||
maxDuration: config.maxDuration,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function createRenderEdgeFunction(
|
||||
entrypointDir: string,
|
||||
rootDir: string,
|
||||
serverBuildPath: string,
|
||||
serverEntryPoint: string | undefined,
|
||||
remixVersion: string,
|
||||
config: ResolvedEdgeRouteConfig
|
||||
): Promise<EdgeFunction> {
|
||||
const files: Files = {};
|
||||
|
||||
let handler = relative(rootDir, serverBuildPath);
|
||||
let handlerPath = join(rootDir, handler);
|
||||
if (!serverEntryPoint) {
|
||||
const baseServerBuildPath = basename(serverBuildPath, '.js');
|
||||
handler = join(dirname(handler), `server-${baseServerBuildPath}.mjs`);
|
||||
handlerPath = join(rootDir, handler);
|
||||
|
||||
// Copy the `server-edge.mjs` file into the "build" directory
|
||||
const edgeServerSrc = await edgeServerSrcPromise;
|
||||
await writeEntrypointFile(
|
||||
handlerPath,
|
||||
edgeServerSrc.replace(
|
||||
'@remix-run/dev/server-build',
|
||||
`./${baseServerBuildPath}.js`
|
||||
),
|
||||
rootDir
|
||||
);
|
||||
}
|
||||
|
||||
let remixRunVercelPkgJson: string | undefined;
|
||||
|
||||
// Trace the handler with `@vercel/nft`
|
||||
const trace = await nodeFileTrace([handlerPath], {
|
||||
base: rootDir,
|
||||
processCwd: entrypointDir,
|
||||
conditions: ['edge-light', 'browser', 'module', 'import', 'require'],
|
||||
async readFile(fsPath) {
|
||||
let source: Buffer | string;
|
||||
try {
|
||||
source = await fs.readFile(fsPath);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT' || err.code === 'EISDIR') {
|
||||
return null;
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
if (basename(fsPath) === 'package.json') {
|
||||
// For Edge Functions, patch "main" field to prefer "browser" or "module"
|
||||
const pkgJson = JSON.parse(source.toString());
|
||||
|
||||
// When `@remix-run/vercel` is detected, we need to modify the `package.json`
|
||||
// to include the "browser" field so that the proper Edge entrypoint file
|
||||
// is used. This is a temporary stop gap until this PR is merged:
|
||||
// https://github.com/remix-run/remix/pull/5537
|
||||
if (pkgJson.name === '@remix-run/vercel') {
|
||||
pkgJson.browser = 'dist/edge.js';
|
||||
pkgJson.dependencies['@remix-run/server-runtime'] =
|
||||
pkgJson.dependencies['@remix-run/node'];
|
||||
|
||||
if (!remixRunVercelPkgJson) {
|
||||
remixRunVercelPkgJson = JSON.stringify(pkgJson, null, 2) + '\n';
|
||||
|
||||
// Copy in the edge entrypoint so that NFT can properly resolve it
|
||||
const vercelEdgeEntrypointPath = join(
|
||||
DEFAULTS_PATH,
|
||||
'vercel-edge-entrypoint.js'
|
||||
);
|
||||
const vercelEdgeEntrypointDest = join(
|
||||
dirname(fsPath),
|
||||
'dist/edge.js'
|
||||
);
|
||||
await fs.copyFile(
|
||||
vercelEdgeEntrypointPath,
|
||||
vercelEdgeEntrypointDest
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (const prop of ['browser', 'module']) {
|
||||
const val = pkgJson[prop];
|
||||
if (typeof val === 'string') {
|
||||
pkgJson.main = val;
|
||||
|
||||
// Return the modified `package.json` to nft
|
||||
source = JSON.stringify(pkgJson);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return source;
|
||||
},
|
||||
});
|
||||
|
||||
for (const warning of trace.warnings) {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
|
||||
for (const file of trace.fileList) {
|
||||
if (
|
||||
remixRunVercelPkgJson &&
|
||||
file.endsWith(`@remix-run${sep}vercel${sep}package.json`)
|
||||
) {
|
||||
// Use the modified `@remix-run/vercel` package.json which contains "browser" field
|
||||
files[file] = new FileBlob({ data: remixRunVercelPkgJson });
|
||||
} else {
|
||||
files[file] = await FileFsRef.fromFsPath({ fsPath: join(rootDir, file) });
|
||||
}
|
||||
}
|
||||
|
||||
const fn = new EdgeFunction({
|
||||
files,
|
||||
deploymentTarget: 'v8-worker',
|
||||
entrypoint: handler,
|
||||
regions: config.regions,
|
||||
framework: {
|
||||
slug: 'remix',
|
||||
version: remixVersion,
|
||||
},
|
||||
});
|
||||
|
||||
return fn;
|
||||
}
|
||||
|
||||
async function writeEntrypointFile(
|
||||
path: string,
|
||||
data: string,
|
||||
rootDir: string
|
||||
) {
|
||||
try {
|
||||
await fs.writeFile(path, data);
|
||||
} catch (err: any) {
|
||||
if (err.code === 'ENOENT') {
|
||||
throw new Error(
|
||||
`The "${relative(
|
||||
rootDir,
|
||||
dirname(path)
|
||||
)}" directory does not exist. Please contact support at https://vercel.com/help.`
|
||||
);
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import semver from 'semver';
|
||||
import { existsSync, promises as fs } from 'fs';
|
||||
import { existsSync, readFileSync, promises as fs } from 'fs';
|
||||
import { basename, dirname, join, relative, resolve, sep } from 'path';
|
||||
import { pathToRegexp, Key } from 'path-to-regexp';
|
||||
import { debug } from '@vercel/build-utils';
|
||||
import { debug, type PackageJson } from '@vercel/build-utils';
|
||||
import { walkParentDirs } from '@vercel/build-utils';
|
||||
import { createRequire } from 'module';
|
||||
import type {
|
||||
@@ -58,8 +58,12 @@ export function findEntry(dir: string, basename: string): string | undefined {
|
||||
|
||||
const configExts = ['.js', '.cjs', '.mjs'];
|
||||
|
||||
export function findConfig(dir: string, basename: string): string | undefined {
|
||||
for (const ext of configExts) {
|
||||
export function findConfig(
|
||||
dir: string,
|
||||
basename: string,
|
||||
exts = configExts
|
||||
): string | undefined {
|
||||
for (const ext of exts) {
|
||||
const name = basename + ext;
|
||||
const file = join(dir, name);
|
||||
if (existsSync(file)) return file;
|
||||
@@ -355,6 +359,7 @@ async function ensureSymlink(
|
||||
}
|
||||
}
|
||||
|
||||
await fs.mkdir(symlinkDir, { recursive: true });
|
||||
await fs.symlink(relativeTarget, symlinkPath);
|
||||
debug(`Created symlink for "${pkgName}"`);
|
||||
}
|
||||
@@ -369,3 +374,76 @@ export function isESM(path: string): boolean {
|
||||
}
|
||||
return isESM;
|
||||
}
|
||||
|
||||
export function hasScript(scriptName: string, pkg?: PackageJson) {
|
||||
const scripts = pkg?.scripts || {};
|
||||
return typeof scripts[scriptName] === 'string';
|
||||
}
|
||||
|
||||
export async function getRemixVersion(
|
||||
dir: string,
|
||||
base: string
|
||||
): Promise<string> {
|
||||
const resolvedPath = require_.resolve('@remix-run/dev', { paths: [dir] });
|
||||
const pkgPath = await walkParentDirs({
|
||||
base,
|
||||
start: dirname(resolvedPath),
|
||||
filename: 'package.json',
|
||||
});
|
||||
if (!pkgPath) {
|
||||
throw new Error(
|
||||
`Failed to find \`package.json\` file for "@remix-run/dev"`
|
||||
);
|
||||
}
|
||||
const { version } = JSON.parse(
|
||||
await fs.readFile(pkgPath, 'utf8')
|
||||
) as PackageJson;
|
||||
if (typeof version !== 'string') {
|
||||
throw new Error(`Missing "version" field`);
|
||||
}
|
||||
return version;
|
||||
}
|
||||
|
||||
export function logNftWarnings(warnings: Set<Error>, required?: string) {
|
||||
for (const warning of warnings) {
|
||||
const m = warning.message.match(/^Failed to resolve dependency "(.+)"/);
|
||||
if (m) {
|
||||
if (m[1] === required) {
|
||||
throw new Error(
|
||||
`Missing required "${required}" package. Please add it to your \`package.json\` file.`
|
||||
);
|
||||
} else {
|
||||
console.warn(`WARN: ${m[0]}`);
|
||||
}
|
||||
} else {
|
||||
debug(`Warning from trace: ${warning.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function isVite(dir: string): boolean {
|
||||
const viteConfig = findConfig(dir, 'vite.config', ['.js', '.ts']);
|
||||
if (!viteConfig) return false;
|
||||
|
||||
const remixConfig = findConfig(dir, 'remix.config');
|
||||
if (!remixConfig) return true;
|
||||
|
||||
// `remix.config` and `vite.config` exist, so check a couple other ways
|
||||
|
||||
// Is `vite:build` found in the `package.json` "build" script?
|
||||
const pkg: PackageJson = JSON.parse(
|
||||
readFileSync(join(dir, 'package.json'), 'utf8')
|
||||
);
|
||||
if (pkg.scripts?.build && /\bvite:build\b/.test(pkg.scripts.build)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Is `@remix-run/dev` package found in `vite.config`?
|
||||
const viteConfigContents = readFileSync(viteConfig, 'utf8');
|
||||
if (/['"]@remix-run\/dev['"]/.test(viteConfigContents)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// If none of those conditions matched, then treat it as a legacy project and print a warning
|
||||
return false;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user