mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
66 Commits
add/static
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7d7f3df980 | ||
|
|
5cf0c316e9 | ||
|
|
f4501433c8 | ||
|
|
19831593ce | ||
|
|
5d85bb1426 | ||
|
|
f194d54b0c | ||
|
|
6542086843 | ||
|
|
2721b3449d | ||
|
|
adb284519a | ||
|
|
b2d91f3121 | ||
|
|
32664cd13b | ||
|
|
db468c489a | ||
|
|
edd9bb506c | ||
|
|
a72549a290 | ||
|
|
4aa6a13912 | ||
|
|
81ea0082f1 | ||
|
|
6dff0875f5 | ||
|
|
30aa392c0a | ||
|
|
c4fc060030 | ||
|
|
3fa08bf64f | ||
|
|
43056bde1f | ||
|
|
a49966b9b4 | ||
|
|
7f55de71bb | ||
|
|
db8e36e04c | ||
|
|
82924bb5c4 | ||
|
|
18b5fac93e | ||
|
|
a6012e600b | ||
|
|
c3abf73f58 | ||
|
|
4873b8b379 | ||
|
|
6248139281 | ||
|
|
507a5de3cd | ||
|
|
be1c78e72f | ||
|
|
c277c649c6 | ||
|
|
ed1dacd276 | ||
|
|
144e890bfa | ||
|
|
af097c2c06 | ||
|
|
873a582986 | ||
|
|
986b4c0b1a | ||
|
|
14071819ac | ||
|
|
2a8588a0c5 | ||
|
|
0f7e89f76c | ||
|
|
e68ed33a88 | ||
|
|
d3e98cdb73 | ||
|
|
bf4e77110f | ||
|
|
5b5197d2c5 | ||
|
|
a6ccf6c180 | ||
|
|
8d848ebe8b | ||
|
|
6ef2c16d63 | ||
|
|
6c71ceaaeb | ||
|
|
1dcb6dfc6f | ||
|
|
4fd24575e5 | ||
|
|
8714f1905e | ||
|
|
2e69f2513d | ||
|
|
979e4b674a | ||
|
|
07fa47bcfb | ||
|
|
307c4fc377 | ||
|
|
44868d79b6 | ||
|
|
df9a4afa5c | ||
|
|
8a6869bae2 | ||
|
|
a3fc3c1ca7 | ||
|
|
44037c58be | ||
|
|
1a9419b690 | ||
|
|
93d0e5966c | ||
|
|
306f3a1312 | ||
|
|
9c67e8115e | ||
|
|
b890ac1e44 |
@@ -1,11 +1,10 @@
|
||||
node_modules
|
||||
dist
|
||||
examples
|
||||
packages/build-utils/test/fixtures
|
||||
packages/*/test/fixtures
|
||||
packages/cli/@types
|
||||
packages/cli/download
|
||||
packages/cli/dist
|
||||
packages/cli/test/fixtures
|
||||
packages/cli/test/dev/fixtures
|
||||
packages/cli/bin
|
||||
packages/cli/link
|
||||
@@ -13,6 +12,6 @@ packages/cli/src/util/dev/templates/*.ts
|
||||
packages/client/tests/fixtures
|
||||
packages/client/lib
|
||||
packages/node/src/bridge.ts
|
||||
packages/node/test/fixtures
|
||||
packages/node-bridge/bridge.js
|
||||
packages/node-bridge/launcher.js
|
||||
packages/middleware/src/entries.js
|
||||
|
||||
2
.github/workflows/cancel.yml
vendored
2
.github/workflows/cancel.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 2
|
||||
steps:
|
||||
- uses: styfle/cancel-workflow-action@0.4.1
|
||||
- uses: styfle/cancel-workflow-action@0.9.1
|
||||
with:
|
||||
workflow_id: 849295, 849296, 849297, 849298
|
||||
access_token: ${{ github.token }}
|
||||
|
||||
2
.github/workflows/test-integration-dev.yml
vendored
2
.github/workflows/test-integration-dev.yml
vendored
@@ -11,7 +11,7 @@ on:
|
||||
jobs:
|
||||
test:
|
||||
name: Dev
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 75
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -27,3 +27,4 @@ test/lib/deployment/failed-page.txt
|
||||
/public
|
||||
__pycache__
|
||||
.vercel
|
||||
.output
|
||||
|
||||
@@ -332,7 +332,7 @@ This is an abstract enumeration type that is implemented by one of the following
|
||||
- `nodejs10.x`
|
||||
- `go1.x`
|
||||
- `java11`
|
||||
- `python3.8`
|
||||
- `python3.9`
|
||||
- `python3.6`
|
||||
- `dotnetcore2.1`
|
||||
- `ruby2.5`
|
||||
@@ -398,12 +398,12 @@ This utility allows you to _scan_ the filesystem and return a [`Files`](#files)
|
||||
The following trivial example downloads everything to the filesystem, only to return it back (therefore just re-creating the passed-in [`Files`](#files)):
|
||||
|
||||
```js
|
||||
const { glob, download } = require('@vercel/build-utils')
|
||||
const { glob, download } = require('@vercel/build-utils');
|
||||
|
||||
exports.build = ({ files, workPath }) => {
|
||||
await download(files, workPath)
|
||||
return glob('**', workPath)
|
||||
}
|
||||
await download(files, workPath);
|
||||
return glob('**', workPath);
|
||||
};
|
||||
```
|
||||
|
||||
### `getWritableDirectory()`
|
||||
|
||||
@@ -14,8 +14,6 @@ const frameworks = (_frameworks as Framework[])
|
||||
sort: undefined,
|
||||
dependency: undefined,
|
||||
defaultRoutes: undefined,
|
||||
devCommand: undefined,
|
||||
buildCommand: undefined,
|
||||
};
|
||||
|
||||
if (framework.logo) {
|
||||
|
||||
@@ -9,7 +9,7 @@
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "11.1.2",
|
||||
"next": "12.0.1",
|
||||
"react": "17.0.2",
|
||||
"react-dom": "17.0.2"
|
||||
},
|
||||
|
||||
@@ -43,7 +43,14 @@
|
||||
core-js-pure "^3.16.0"
|
||||
regenerator-runtime "^0.13.4"
|
||||
|
||||
"@babel/runtime@7.15.3", "@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2":
|
||||
"@babel/runtime@7.15.4":
|
||||
version "7.15.4"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.15.4.tgz#fd17d16bfdf878e6dd02d19753a39fa8a8d9c84a"
|
||||
integrity sha512-99catp6bHCaxr4sJ/DbTGgHS4+Rs2RVd2g7iOap6SLGPDknRK9ztKNsE/Fg6QhSeh1FGE5f6gHGQmvvn3I3xhw==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.13.4"
|
||||
|
||||
"@babel/runtime@^7.10.2", "@babel/runtime@^7.11.2":
|
||||
version "7.15.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.15.3.tgz#2e1c2880ca118e5b2f9988322bd8a7656a32502b"
|
||||
integrity sha512-OvwMLqNXkCXSz1kSm58sEsNuhqOx/fKpnUnKnFB5v8uDda5bLNEHNgKPvhDN6IU0LDcnHQ90LlJ0Q6jnyBSIBA==
|
||||
@@ -112,10 +119,10 @@
|
||||
resolved "https://registry.yarnpkg.com/@napi-rs/triples/-/triples-1.0.3.tgz#76d6d0c3f4d16013c61e45dfca5ff1e6c31ae53c"
|
||||
integrity sha512-jDJTpta+P4p1NZTFVLHJ/TLFVYVcOqv6l8xwOeBKNPMgY/zDYH/YH7SJbvrr/h1RcS9GzbPcLKGzpuK9cV56UA==
|
||||
|
||||
"@next/env@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-11.1.2.tgz#27996efbbc54c5f949f5e8c0a156e3aa48369b99"
|
||||
integrity sha512-+fteyVdQ7C/OoulfcF6vd1Yk0FEli4453gr8kSFbU8sKseNSizYq6df5MKz/AjwLptsxrUeIkgBdAzbziyJ3mA==
|
||||
"@next/env@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/env/-/env-12.0.1.tgz#d57141ef1fe844f6f7c18cdaf29a712788c18ca4"
|
||||
integrity sha512-+eJ8mQbAcV/ZILRAgIx9xwDg6hrqm6m/7QLfEvsf2BPnsh+fwU4Xf1zgcbyqD2V4ja4OTWG6ow+Hiukgap3mZQ==
|
||||
|
||||
"@next/eslint-plugin-next@11.1.0":
|
||||
version "11.1.0"
|
||||
@@ -124,15 +131,15 @@
|
||||
dependencies:
|
||||
glob "7.1.7"
|
||||
|
||||
"@next/polyfill-module@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/polyfill-module/-/polyfill-module-11.1.2.tgz#1fe92c364fdc81add775a16c678f5057c6aace98"
|
||||
integrity sha512-xZmixqADM3xxtqBV0TpAwSFzWJP0MOQzRfzItHXf1LdQHWb0yofHHC+7eOrPFic8+ZGz5y7BdPkkgR1S25OymA==
|
||||
"@next/polyfill-module@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/polyfill-module/-/polyfill-module-12.0.1.tgz#d20abf06f686ee7a8bd0d9056accfd0662f19e87"
|
||||
integrity sha512-fTrndwGuvrQO+4myVGcPtsYI4/tmZBhHHJId7MSHWz+9gW4NFgsmDlr8OI9Th2ZXpqk5WHLsTYQ+dLiQp1zV4g==
|
||||
|
||||
"@next/react-dev-overlay@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/react-dev-overlay/-/react-dev-overlay-11.1.2.tgz#73795dc5454b7af168bac93df7099965ebb603be"
|
||||
integrity sha512-rDF/mGY2NC69mMg2vDqzVpCOlWqnwPUXB2zkARhvknUHyS6QJphPYv9ozoPJuoT/QBs49JJd9KWaAzVBvq920A==
|
||||
"@next/react-dev-overlay@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/react-dev-overlay/-/react-dev-overlay-12.0.1.tgz#134299ae44fad5a59448d5e671518074f37cba95"
|
||||
integrity sha512-dLv1to40bvadbr0VO8pBsbr9ddgktCLilfejOpEFQkOOrdQBUuIfegqqEDiCL9THkAO3QGYY4t/ZPfv9wrxLZQ==
|
||||
dependencies:
|
||||
"@babel/code-frame" "7.12.11"
|
||||
anser "1.4.9"
|
||||
@@ -144,32 +151,67 @@
|
||||
shell-quote "1.7.2"
|
||||
source-map "0.8.0-beta.0"
|
||||
stacktrace-parser "0.1.10"
|
||||
strip-ansi "6.0.0"
|
||||
strip-ansi "6.0.1"
|
||||
|
||||
"@next/react-refresh-utils@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/react-refresh-utils/-/react-refresh-utils-11.1.2.tgz#44ea40d8e773e4b77bad85e24f6ac041d5e4b4a5"
|
||||
integrity sha512-hsoJmPfhVqjZ8w4IFzoo8SyECVnN+8WMnImTbTKrRUHOVJcYMmKLL7xf7T0ft00tWwAl/3f3Q3poWIN2Ueql/Q==
|
||||
"@next/react-refresh-utils@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/react-refresh-utils/-/react-refresh-utils-12.0.1.tgz#219be7a81696a7bd1e2d4ee397ca100eb8262f23"
|
||||
integrity sha512-CjTBR9a6ai+2fUT8KFya9AiTaCnfDY34H6pDmtdJdkD+vY08AwtPpv10kzsgNEhsL06210yVzH59IsEQLBIllA==
|
||||
|
||||
"@next/swc-darwin-arm64@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-11.1.2.tgz#93226c38db488c4b62b30a53b530e87c969b8251"
|
||||
integrity sha512-hZuwOlGOwBZADA8EyDYyjx3+4JGIGjSHDHWrmpI7g5rFmQNltjlbaefAbiU5Kk7j3BUSDwt30quJRFv3nyJQ0w==
|
||||
"@next/swc-android-arm64@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-android-arm64/-/swc-android-arm64-12.0.1.tgz#c776853e0911c12fcc69a69cd7ab111dff29f8d2"
|
||||
integrity sha512-zI/6zsZuO2igknzHzfaQep0PeD3d4/qdjXUcQLwLHJQtGdhPvZFMke1z3BBWZqePHVsR1JPjE4QTii7udF5qsQ==
|
||||
|
||||
"@next/swc-darwin-x64@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-11.1.2.tgz#792003989f560c00677b5daeff360b35b510db83"
|
||||
integrity sha512-PGOp0E1GisU+EJJlsmJVGE+aPYD0Uh7zqgsrpD3F/Y3766Ptfbe1lEPPWnRDl+OzSSrSrX1lkyM/Jlmh5OwNvA==
|
||||
"@next/swc-darwin-arm64@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-arm64/-/swc-darwin-arm64-12.0.1.tgz#bee9c5932511c993ab384ef9aedb86c02532d41e"
|
||||
integrity sha512-vRfHz7rEt9+TTfwi3uY9ObUSLhzMmgVZ96b+yOSmZ6Kxs/V46IXHOLawCnoldXylpskZ/+HTWcrB1D3aimGeZA==
|
||||
|
||||
"@next/swc-linux-x64-gnu@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-11.1.2.tgz#8216b2ae1f21f0112958735c39dd861088108f37"
|
||||
integrity sha512-YcDHTJjn/8RqvyJVB6pvEKXihDcdrOwga3GfMv/QtVeLphTouY4BIcEUfrG5+26Nf37MP1ywN3RRl1TxpurAsQ==
|
||||
"@next/swc-darwin-x64@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-darwin-x64/-/swc-darwin-x64-12.0.1.tgz#a0bdcbcf11b8b62190ec8e0406cecbbcc810b7fc"
|
||||
integrity sha512-mM7QLIqRUqR8I74gbZ4Uq+dY8k3Whrs98wK+vPurmDTBhXhaVnAYblEkEwe0DJGqlmjD4w6faYfCydmFI69jqw==
|
||||
|
||||
"@next/swc-win32-x64-msvc@11.1.2":
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-11.1.2.tgz#e15824405df137129918205e43cb5e9339589745"
|
||||
integrity sha512-e/pIKVdB+tGQYa1cW3sAeHm8gzEri/HYLZHT4WZojrUxgWXqx8pk7S7Xs47uBcFTqBDRvK3EcQpPLf3XdVsDdg==
|
||||
"@next/swc-linux-arm-gnueabihf@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm-gnueabihf/-/swc-linux-arm-gnueabihf-12.0.1.tgz#d0132637288f452ad5c6a6161e42aebcd4355f82"
|
||||
integrity sha512-QF5LVyAWTah5i1p/yG4a8nTGRXerHoDkS3kWYCdjcwlALOiAJ9m0GUTks/O47izNokBAbZnL7URUdvtGFjP0Ng==
|
||||
|
||||
"@next/swc-linux-arm64-gnu@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-gnu/-/swc-linux-arm64-gnu-12.0.1.tgz#6b310344b9bac7700eaff8f4c536540b1226e378"
|
||||
integrity sha512-ETFUh373WsjUJJr32GHSDlVSgwFwS+EJUJuSH40Pr4xB6250YxuRk8ccF6QR5LHqTL4tbbVEEfCD8sZVnccP8w==
|
||||
|
||||
"@next/swc-linux-arm64-musl@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-arm64-musl/-/swc-linux-arm64-musl-12.0.1.tgz#8ab1fc81d18bbb70bb15bcc4250382257bba6298"
|
||||
integrity sha512-pfnXNjKywXyp2DJsjFhkfOlvcNu9xa8HgEhCUKXm1OZ4pGnpeb1+UD4t5Pn9b9ggiWPzauZK1abR/9nShvbSzw==
|
||||
|
||||
"@next/swc-linux-x64-gnu@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-gnu/-/swc-linux-x64-gnu-12.0.1.tgz#a664268aedec402da5df34efac1b337d9b0e492e"
|
||||
integrity sha512-d9cXS27Ar7TTtA3BJ8gxosDDdVNSFy4MQiwsszKlEiqfGrnINeXKdVgeiOa+xxq+JxNvPzonp4sbX6k8InIocg==
|
||||
|
||||
"@next/swc-linux-x64-musl@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-linux-x64-musl/-/swc-linux-x64-musl-12.0.1.tgz#4b7e5fee5a62adb6d9c9aad1a4aa00a6a09b53dc"
|
||||
integrity sha512-4SAmi7riavU6TFGX7wQFioFi/vx8uJ2/Cx7ZfrYiZzzKmmuu2eM8onW1kcKu+aQD777x/kvzW4+2pWkM2gyPOA==
|
||||
|
||||
"@next/swc-win32-arm64-msvc@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-arm64-msvc/-/swc-win32-arm64-msvc-12.0.1.tgz#9ff0c2a2f00f41d40bd44d6da195bdf649d807c6"
|
||||
integrity sha512-JRad3QyXvs5zDkeEmc6z5tEvm/ZZnjnsBY921zWw7OIcIZR5wAs+1AnRVjIxHTEHSExxOvBgPyEMpgVkB8OyxQ==
|
||||
|
||||
"@next/swc-win32-ia32-msvc@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-ia32-msvc/-/swc-win32-ia32-msvc-12.0.1.tgz#5a927ec832b184ce2e35f8ec668daa34175e47d0"
|
||||
integrity sha512-ierQmzVWPi6a7PqrdgfI6nrQ/SWJ9W5jllByyQeFIOKhOzZiz030Tw+U6V7NqE3gGNeRwpj56Iya8nUb3hlM1g==
|
||||
|
||||
"@next/swc-win32-x64-msvc@12.0.1":
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/@next/swc-win32-x64-msvc/-/swc-win32-x64-msvc-12.0.1.tgz#fe145cf7baf79564159a87a545e430f96c835578"
|
||||
integrity sha512-li3CCXpdMX0+wJlQpy0xZmHCgHMebaBf5X2BIAJrv8cQXYc6dejeojttXLFNCF0dNAo3UzlbP6h7N+8p6Wbakw==
|
||||
|
||||
"@node-rs/helper@1.2.1":
|
||||
version "1.2.1"
|
||||
@@ -258,6 +300,16 @@ acorn-jsx@^5.3.1:
|
||||
resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937"
|
||||
integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==
|
||||
|
||||
acorn@8.5.0:
|
||||
version "8.5.0"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.5.0.tgz#4512ccb99b3698c752591e9bb4472e38ad43cee2"
|
||||
integrity sha512-yXbYeFy+jUuYd3/CDcg2NkIYE991XYX/bje7LmjJigUciaeO1JR4XxXgCIV1/Zc/dRuFEyw1L0pbA+qynJkW5Q==
|
||||
|
||||
acorn@^6.2.1:
|
||||
version "6.4.2"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.2.tgz#35866fd710528e92de10cf06016498e47e39e1e6"
|
||||
integrity sha512-XtGIhXwF8YM8bJhGxG5kXgjkEuNGLTkoYqVE+KMR+aspr4KGYmKYg7yUe3KghyQ9yheNwLnjmzh/7+gfDBmHCQ==
|
||||
|
||||
acorn@^7.4.0:
|
||||
version "7.4.1"
|
||||
resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa"
|
||||
@@ -298,6 +350,11 @@ ansi-regex@^5.0.0:
|
||||
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75"
|
||||
integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==
|
||||
|
||||
ansi-regex@^5.0.1:
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304"
|
||||
integrity sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==
|
||||
|
||||
ansi-styles@^3.2.1:
|
||||
version "3.2.1"
|
||||
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
|
||||
@@ -390,24 +447,11 @@ assert@2.0.0:
|
||||
object-is "^1.0.1"
|
||||
util "^0.12.0"
|
||||
|
||||
assert@^1.1.1:
|
||||
version "1.5.0"
|
||||
resolved "https://registry.yarnpkg.com/assert/-/assert-1.5.0.tgz#55c109aaf6e0aefdb3dc4b71240c70bf574b18eb"
|
||||
integrity sha512-EDsgawzwoun2CZkCgtxJbv392v4nbk9XDD06zI+kQYoBM/3RBWLlEyJARDOmhAAosBjWACEkKL6S+lIZtcAubA==
|
||||
dependencies:
|
||||
object-assign "^4.1.1"
|
||||
util "0.10.3"
|
||||
|
||||
ast-types-flow@^0.0.7:
|
||||
version "0.0.7"
|
||||
resolved "https://registry.yarnpkg.com/ast-types-flow/-/ast-types-flow-0.0.7.tgz#f70b735c6bca1a5c9c22d982c3e39e7feba3bdad"
|
||||
integrity sha1-9wtzXGvKGlycItmCw+Oef+ujva0=
|
||||
|
||||
ast-types@0.13.2:
|
||||
version "0.13.2"
|
||||
resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.13.2.tgz#df39b677a911a83f3a049644fb74fdded23cea48"
|
||||
integrity sha512-uWMHxJxtfj/1oZClOxDEV1sQ1HCDkA4MG8Gr69KKeBjEVH0R84WlejZ0y2DcwyBlpAEMltmVYkVgqfLFb2oyiA==
|
||||
|
||||
astral-regex@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-2.0.0.tgz#483143c567aeed4785759c0865786dc77d7d2e31"
|
||||
@@ -532,7 +576,7 @@ browserify-sign@^4.0.0:
|
||||
readable-stream "^3.6.0"
|
||||
safe-buffer "^5.2.0"
|
||||
|
||||
browserify-zlib@0.2.0, browserify-zlib@^0.2.0:
|
||||
browserify-zlib@0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/browserify-zlib/-/browserify-zlib-0.2.0.tgz#2869459d9aa3be245fe8fe2ca1f46e2e7f54d73f"
|
||||
integrity sha512-Z942RysHXmJrhqk88FmKBVq/v5tqmSkDz7p54G/MGyjMnCFFnC79XWNbg+Vta8W6Wb2qtSZTSxIGkJrRpCFEiA==
|
||||
@@ -563,15 +607,6 @@ buffer@5.6.0:
|
||||
base64-js "^1.0.2"
|
||||
ieee754 "^1.1.4"
|
||||
|
||||
buffer@^4.3.0:
|
||||
version "4.9.2"
|
||||
resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8"
|
||||
integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==
|
||||
dependencies:
|
||||
base64-js "^1.0.2"
|
||||
ieee754 "^1.1.4"
|
||||
isarray "^1.0.0"
|
||||
|
||||
builtin-status-codes@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz#85982878e21b98e1c66425e03d0174788f569ee8"
|
||||
@@ -692,12 +727,7 @@ concat-map@0.0.1:
|
||||
resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
|
||||
integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=
|
||||
|
||||
console-browserify@^1.1.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336"
|
||||
integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==
|
||||
|
||||
constants-browserify@1.0.0, constants-browserify@^1.0.0:
|
||||
constants-browserify@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75"
|
||||
integrity sha1-wguW2MYXdIqvHBYCF2DNJ/y4y3U=
|
||||
@@ -714,11 +744,6 @@ core-js-pure@^3.16.0:
|
||||
resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.16.1.tgz#b997df2669c957a5b29f06e95813a171f993592e"
|
||||
integrity sha512-TyofCdMzx0KMhi84mVRS8rL1XsRk2SPUNz2azmth53iRN0/08Uim9fdhQTaZTG1LqaXHYVci4RDHka6WrXfnvg==
|
||||
|
||||
core-util-is@~1.0.0:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
|
||||
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
|
||||
|
||||
create-ecdh@^4.0.0:
|
||||
version "4.0.4"
|
||||
resolved "https://registry.yarnpkg.com/create-ecdh/-/create-ecdh-4.0.4.tgz#d6e7f4bffa66736085a0762fd3a632684dabcc4e"
|
||||
@@ -759,7 +784,7 @@ cross-spawn@^7.0.2:
|
||||
shebang-command "^2.0.0"
|
||||
which "^2.0.1"
|
||||
|
||||
crypto-browserify@3.12.0, crypto-browserify@^3.11.0:
|
||||
crypto-browserify@3.12.0:
|
||||
version "3.12.0"
|
||||
resolved "https://registry.yarnpkg.com/crypto-browserify/-/crypto-browserify-3.12.0.tgz#396cf9f3137f03e4b8e532c58f698254e00f80ec"
|
||||
integrity sha512-fz4spIh+znjO2VjL+IdhEpRJ3YN6sMzITSBijk6FK2UvTqruSQW+/cCZTSNsMiZNvUeq0CqurF+dAbyiGOY6Wg==
|
||||
@@ -886,11 +911,6 @@ domain-browser@4.19.0:
|
||||
resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-4.19.0.tgz#1093e17c0a17dbd521182fe90d49ac1370054af1"
|
||||
integrity sha512-fRA+BaAWOR/yr/t7T9E9GJztHPeFjj8U35ajyAjCDtAAnTn1Rc1f6W6VGPJrO1tkQv9zWu+JRof7z6oQtiYVFQ==
|
||||
|
||||
domain-browser@^1.1.1:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/domain-browser/-/domain-browser-1.2.0.tgz#3d31f50191a6749dd1375a7f522e823d42e54eda"
|
||||
integrity sha512-jnjyiM6eRyZl2H+W8Q/zLMA481hzi0eszAaBUzIVnmYVDBbnLxVNnfu1HgEBvCbL+71FrxMl3E6lpKH7Ge3OXA==
|
||||
|
||||
electron-to-chromium@^1.3.723:
|
||||
version "1.3.802"
|
||||
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.802.tgz#0afa989321de3e904ac653ee79e0d642883731a1"
|
||||
@@ -1219,7 +1239,7 @@ etag@1.8.1:
|
||||
resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
|
||||
integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc=
|
||||
|
||||
events@^3.0.0:
|
||||
events@3.3.0:
|
||||
version "3.3.0"
|
||||
resolved "https://registry.yarnpkg.com/events/-/events-3.3.0.tgz#31a95ad0a924e2d2c419a813aeb2c4e878ea7400"
|
||||
integrity sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==
|
||||
@@ -1486,7 +1506,7 @@ http-errors@1.7.3:
|
||||
statuses ">= 1.5.0 < 2"
|
||||
toidentifier "1.0.0"
|
||||
|
||||
https-browserify@1.0.0, https-browserify@^1.0.0:
|
||||
https-browserify@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
|
||||
integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=
|
||||
@@ -1548,21 +1568,11 @@ inflight@^1.0.4:
|
||||
once "^1.3.0"
|
||||
wrappy "1"
|
||||
|
||||
inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.1, inherits@~2.0.3, inherits@~2.0.4:
|
||||
inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, inherits@~2.0.3, inherits@~2.0.4:
|
||||
version "2.0.4"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
|
||||
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
|
||||
|
||||
inherits@2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.1.tgz#b17d08d326b4423e568eff719f91b0b1cbdf69f1"
|
||||
integrity sha1-sX0I0ya0Qj5Wjv9xn5GwscvfafE=
|
||||
|
||||
inherits@2.0.3:
|
||||
version "2.0.3"
|
||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
|
||||
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
|
||||
|
||||
internal-slot@^1.0.3:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/internal-slot/-/internal-slot-1.0.3.tgz#7347e307deeea2faac2ac6205d4bc7d34967f59c"
|
||||
@@ -1706,11 +1716,6 @@ is-typed-array@^1.1.3, is-typed-array@^1.1.6:
|
||||
foreach "^2.0.5"
|
||||
has-tostringtag "^1.0.0"
|
||||
|
||||
isarray@^1.0.0, isarray@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
|
||||
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
|
||||
|
||||
isexe@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||
@@ -1952,32 +1957,30 @@ nanoid@^3.1.23:
|
||||
resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.23.tgz#f744086ce7c2bc47ee0a8472574d5c78e4183a81"
|
||||
integrity sha512-FiB0kzdP0FFVGDKlRLEQ1BgDzU87dy5NnzjeW9YZNt+/c3+q82EQDUwniSAUxp/F0gFNI1ZhKU1FqYsMuqZVnw==
|
||||
|
||||
native-url@0.3.4:
|
||||
version "0.3.4"
|
||||
resolved "https://registry.yarnpkg.com/native-url/-/native-url-0.3.4.tgz#29c943172aed86c63cee62c8c04db7f5756661f8"
|
||||
integrity sha512-6iM8R99ze45ivyH8vybJ7X0yekIcPf5GgLV5K0ENCbmRcaRIDoj37BC8iLEmaaBfqqb8enuZ5p0uhY+lVAbAcA==
|
||||
dependencies:
|
||||
querystring "^0.2.0"
|
||||
|
||||
natural-compare@^1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
|
||||
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
|
||||
|
||||
next@11.1.2:
|
||||
version "11.1.2"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-11.1.2.tgz#527475787a9a362f1bc916962b0c0655cc05bc91"
|
||||
integrity sha512-azEYL0L+wFjv8lstLru3bgvrzPvK0P7/bz6B/4EJ9sYkXeW8r5Bjh78D/Ol7VOg0EIPz0CXoe72hzAlSAXo9hw==
|
||||
neo-async@^2.6.1:
|
||||
version "2.6.2"
|
||||
resolved "https://registry.yarnpkg.com/neo-async/-/neo-async-2.6.2.tgz#b4aafb93e3aeb2d8174ca53cf163ab7d7308305f"
|
||||
integrity sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==
|
||||
|
||||
next@12.0.1:
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/next/-/next-12.0.1.tgz#7b82a73bc185bfda7372e7e8309f9b38e6be9cb0"
|
||||
integrity sha512-4MNXAbD9+Tmtejg0TOKbaP52Cgu4mIn2ejKMLHWV0acxWGkkcE7QvdZwvg5pkg3fQBMrgucOxxtmw4D7yWaZvg==
|
||||
dependencies:
|
||||
"@babel/runtime" "7.15.3"
|
||||
"@babel/runtime" "7.15.4"
|
||||
"@hapi/accept" "5.0.2"
|
||||
"@next/env" "11.1.2"
|
||||
"@next/polyfill-module" "11.1.2"
|
||||
"@next/react-dev-overlay" "11.1.2"
|
||||
"@next/react-refresh-utils" "11.1.2"
|
||||
"@next/env" "12.0.1"
|
||||
"@next/polyfill-module" "12.0.1"
|
||||
"@next/react-dev-overlay" "12.0.1"
|
||||
"@next/react-refresh-utils" "12.0.1"
|
||||
"@node-rs/helper" "1.2.1"
|
||||
acorn "8.5.0"
|
||||
assert "2.0.0"
|
||||
ast-types "0.13.2"
|
||||
browserify-zlib "0.2.0"
|
||||
browserslist "4.16.6"
|
||||
buffer "5.6.0"
|
||||
@@ -1990,40 +1993,48 @@ next@11.1.2:
|
||||
domain-browser "4.19.0"
|
||||
encoding "0.1.13"
|
||||
etag "1.8.1"
|
||||
events "3.3.0"
|
||||
find-cache-dir "3.3.1"
|
||||
get-orientation "1.1.2"
|
||||
https-browserify "1.0.0"
|
||||
image-size "1.0.0"
|
||||
jest-worker "27.0.0-next.5"
|
||||
native-url "0.3.4"
|
||||
node-fetch "2.6.1"
|
||||
node-html-parser "1.4.9"
|
||||
node-libs-browser "^2.2.1"
|
||||
os-browserify "0.3.0"
|
||||
p-limit "3.1.0"
|
||||
path-browserify "1.0.1"
|
||||
pnp-webpack-plugin "1.6.4"
|
||||
postcss "8.2.15"
|
||||
process "0.11.10"
|
||||
querystring-es3 "0.2.1"
|
||||
raw-body "2.4.1"
|
||||
react-is "17.0.2"
|
||||
react-refresh "0.8.3"
|
||||
react-server-dom-webpack "0.0.0-experimental-3c4c1c470-20211021"
|
||||
regenerator-runtime "0.13.4"
|
||||
stream-browserify "3.0.0"
|
||||
stream-http "3.1.1"
|
||||
string_decoder "1.3.0"
|
||||
styled-jsx "4.0.1"
|
||||
styled-jsx "5.0.0-beta.3"
|
||||
timers-browserify "2.0.12"
|
||||
tty-browserify "0.0.1"
|
||||
use-subscription "1.5.1"
|
||||
util "0.12.4"
|
||||
vm-browserify "1.1.2"
|
||||
watchpack "2.1.1"
|
||||
web-streams-polyfill "3.0.3"
|
||||
optionalDependencies:
|
||||
"@next/swc-darwin-arm64" "11.1.2"
|
||||
"@next/swc-darwin-x64" "11.1.2"
|
||||
"@next/swc-linux-x64-gnu" "11.1.2"
|
||||
"@next/swc-win32-x64-msvc" "11.1.2"
|
||||
"@next/swc-android-arm64" "12.0.1"
|
||||
"@next/swc-darwin-arm64" "12.0.1"
|
||||
"@next/swc-darwin-x64" "12.0.1"
|
||||
"@next/swc-linux-arm-gnueabihf" "12.0.1"
|
||||
"@next/swc-linux-arm64-gnu" "12.0.1"
|
||||
"@next/swc-linux-arm64-musl" "12.0.1"
|
||||
"@next/swc-linux-x64-gnu" "12.0.1"
|
||||
"@next/swc-linux-x64-musl" "12.0.1"
|
||||
"@next/swc-win32-arm64-msvc" "12.0.1"
|
||||
"@next/swc-win32-ia32-msvc" "12.0.1"
|
||||
"@next/swc-win32-x64-msvc" "12.0.1"
|
||||
|
||||
node-fetch@2.6.1:
|
||||
version "2.6.1"
|
||||
@@ -2037,35 +2048,6 @@ node-html-parser@1.4.9:
|
||||
dependencies:
|
||||
he "1.2.0"
|
||||
|
||||
node-libs-browser@^2.2.1:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/node-libs-browser/-/node-libs-browser-2.2.1.tgz#b64f513d18338625f90346d27b0d235e631f6425"
|
||||
integrity sha512-h/zcD8H9kaDZ9ALUWwlBUDo6TKF8a7qBSCSEGfjTVIYeqsioSKaAX+BN7NgiMGp6iSIXZ3PxgCu8KS3b71YK5Q==
|
||||
dependencies:
|
||||
assert "^1.1.1"
|
||||
browserify-zlib "^0.2.0"
|
||||
buffer "^4.3.0"
|
||||
console-browserify "^1.1.0"
|
||||
constants-browserify "^1.0.0"
|
||||
crypto-browserify "^3.11.0"
|
||||
domain-browser "^1.1.1"
|
||||
events "^3.0.0"
|
||||
https-browserify "^1.0.0"
|
||||
os-browserify "^0.3.0"
|
||||
path-browserify "0.0.1"
|
||||
process "^0.11.10"
|
||||
punycode "^1.2.4"
|
||||
querystring-es3 "^0.2.0"
|
||||
readable-stream "^2.3.3"
|
||||
stream-browserify "^2.0.1"
|
||||
stream-http "^2.7.2"
|
||||
string_decoder "^1.0.0"
|
||||
timers-browserify "^2.0.4"
|
||||
tty-browserify "0.0.0"
|
||||
url "^0.11.0"
|
||||
util "^0.11.0"
|
||||
vm-browserify "^1.0.1"
|
||||
|
||||
node-releases@^1.1.71:
|
||||
version "1.1.74"
|
||||
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.74.tgz#e5866488080ebaa70a93b91144ccde06f3c3463e"
|
||||
@@ -2166,7 +2148,7 @@ optionator@^0.9.1:
|
||||
type-check "^0.4.0"
|
||||
word-wrap "^1.2.3"
|
||||
|
||||
os-browserify@0.3.0, os-browserify@^0.3.0:
|
||||
os-browserify@0.3.0:
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27"
|
||||
integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=
|
||||
@@ -2247,11 +2229,6 @@ parse-json@^4.0.0:
|
||||
error-ex "^1.3.1"
|
||||
json-parse-better-errors "^1.0.1"
|
||||
|
||||
path-browserify@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-0.0.1.tgz#e6c4ddd7ed3aa27c68a20cc4e50e1a4ee83bbc4a"
|
||||
integrity sha512-BapA40NHICOS+USX9SN4tyhq+A2RrN/Ws5F0Z5aMHDp98Fl86lX8Oti8B7uN93L4Ifv4fHOEA+pQw87gmMO/lQ==
|
||||
|
||||
path-browserify@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/path-browserify/-/path-browserify-1.0.1.tgz#d98454a9c3753d5790860f16f68867b9e46be1fd"
|
||||
@@ -2341,13 +2318,6 @@ platform@1.3.6:
|
||||
resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7"
|
||||
integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==
|
||||
|
||||
pnp-webpack-plugin@1.6.4:
|
||||
version "1.6.4"
|
||||
resolved "https://registry.yarnpkg.com/pnp-webpack-plugin/-/pnp-webpack-plugin-1.6.4.tgz#c9711ac4dc48a685dabafc86f8b6dd9f8df84149"
|
||||
integrity sha512-7Wjy+9E3WwLOEL30D+m8TSTF7qJJUJLONBnwQp0518siuMxUQUbgZwssaFX+QKlZkjHZcw/IpZCt/H0srrntSg==
|
||||
dependencies:
|
||||
ts-pnp "^1.1.6"
|
||||
|
||||
postcss@8.2.15:
|
||||
version "8.2.15"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.2.15.tgz#9e66ccf07292817d226fc315cbbf9bc148fbca65"
|
||||
@@ -2362,12 +2332,7 @@ prelude-ls@^1.2.1:
|
||||
resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396"
|
||||
integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==
|
||||
|
||||
process-nextick-args@~2.0.0:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
|
||||
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
|
||||
|
||||
process@0.11.10, process@^0.11.10:
|
||||
process@0.11.10:
|
||||
version "0.11.10"
|
||||
resolved "https://registry.yarnpkg.com/process/-/process-0.11.10.tgz#7332300e840161bda3e69a1d1d91a7d4bc16f182"
|
||||
integrity sha1-czIwDoQBYb2j5podHZGn1LwW8YI=
|
||||
@@ -2398,36 +2363,16 @@ public-encrypt@^4.0.0:
|
||||
randombytes "^2.0.1"
|
||||
safe-buffer "^5.1.2"
|
||||
|
||||
punycode@1.3.2:
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d"
|
||||
integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0=
|
||||
|
||||
punycode@^1.2.4:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
|
||||
integrity sha1-wNWmOycYgArY4esPpSachN1BhF4=
|
||||
|
||||
punycode@^2.1.0:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec"
|
||||
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
|
||||
|
||||
querystring-es3@0.2.1, querystring-es3@^0.2.0:
|
||||
querystring-es3@0.2.1:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/querystring-es3/-/querystring-es3-0.2.1.tgz#9ec61f79049875707d69414596fd907a4d711e73"
|
||||
integrity sha1-nsYfeQSYdXB9aUFFlv2Qek1xHnM=
|
||||
|
||||
querystring@0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620"
|
||||
integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=
|
||||
|
||||
querystring@^0.2.0:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.1.tgz#40d77615bb09d16902a85c3e38aa8b5ed761c2dd"
|
||||
integrity sha512-wkvS7mL/JMugcup3/rMitHmd9ecIGd2lhFhK9N3UUQ450h66d1r3Y9nvXzQAW1Lq+wyx61k/1pfKS5KuKiyEbg==
|
||||
|
||||
queue-microtask@^1.2.2:
|
||||
version "1.2.3"
|
||||
resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243"
|
||||
@@ -2489,6 +2434,16 @@ react-refresh@0.8.3:
|
||||
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f"
|
||||
integrity sha512-X8jZHc7nCMjaCqoU+V2I0cOhNW+QMBwSUkeXnTi8IPe6zaRWfn60ZzvFDZqWPfmSJfjub7dDW1SP0jaHWLu/hg==
|
||||
|
||||
react-server-dom-webpack@0.0.0-experimental-3c4c1c470-20211021:
|
||||
version "0.0.0-experimental-3c4c1c470-20211021"
|
||||
resolved "https://registry.yarnpkg.com/react-server-dom-webpack/-/react-server-dom-webpack-0.0.0-experimental-3c4c1c470-20211021.tgz#cdcaa2f19c8d820c1f4d31252319fb05e2de0e88"
|
||||
integrity sha512-YyRlED5kR0C2aQ3IJ/8BR2TELt51RcDZhnUDKz+m/HU+Gb/qak0CZkG0A8Zxffom9VI6HFkUj1dRFZqm0Lh9Pg==
|
||||
dependencies:
|
||||
acorn "^6.2.1"
|
||||
loose-envify "^1.1.0"
|
||||
neo-async "^2.6.1"
|
||||
object-assign "^4.1.1"
|
||||
|
||||
react@17.0.2:
|
||||
version "17.0.2"
|
||||
resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037"
|
||||
@@ -2514,19 +2469,6 @@ read-pkg@^3.0.0:
|
||||
normalize-package-data "^2.3.2"
|
||||
path-type "^3.0.0"
|
||||
|
||||
readable-stream@^2.0.2, readable-stream@^2.3.3, readable-stream@^2.3.6:
|
||||
version "2.3.7"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
|
||||
dependencies:
|
||||
core-util-is "~1.0.0"
|
||||
inherits "~2.0.3"
|
||||
isarray "~1.0.0"
|
||||
process-nextick-args "~2.0.0"
|
||||
safe-buffer "~5.1.1"
|
||||
string_decoder "~1.1.1"
|
||||
util-deprecate "~1.0.1"
|
||||
|
||||
readable-stream@^3.5.0, readable-stream@^3.6.0:
|
||||
version "3.6.0"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
|
||||
@@ -2543,6 +2485,11 @@ readdirp@~3.5.0:
|
||||
dependencies:
|
||||
picomatch "^2.2.1"
|
||||
|
||||
regenerator-runtime@0.13.4:
|
||||
version "0.13.4"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.4.tgz#e96bf612a3362d12bb69f7e8f74ffeab25c7ac91"
|
||||
integrity sha512-plpwicqEzfEyTQohIKktWigcLzmNStMGwbOUbykx51/29Z3JOGYldaaNGK7ngNXV+UcoqvIMmloZ48Sr74sd+g==
|
||||
|
||||
regenerator-runtime@^0.13.4:
|
||||
version "0.13.9"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52"
|
||||
@@ -2619,7 +2566,7 @@ safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2,
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
|
||||
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
|
||||
|
||||
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
|
||||
safe-buffer@~5.1.1:
|
||||
version "5.1.2"
|
||||
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
|
||||
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
|
||||
@@ -2780,14 +2727,6 @@ stream-browserify@3.0.0:
|
||||
inherits "~2.0.4"
|
||||
readable-stream "^3.5.0"
|
||||
|
||||
stream-browserify@^2.0.1:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/stream-browserify/-/stream-browserify-2.0.2.tgz#87521d38a44aa7ee91ce1cd2a47df0cb49dd660b"
|
||||
integrity sha512-nX6hmklHs/gr2FuxYDltq8fJA1GDlxKQCz8O/IM4atRqBH8OORmBNgfvW5gG10GT/qQ9u0CzIvr2X5Pkt6ntqg==
|
||||
dependencies:
|
||||
inherits "~2.0.1"
|
||||
readable-stream "^2.0.2"
|
||||
|
||||
stream-http@3.1.1:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-3.1.1.tgz#0370a8017cf8d050b9a8554afe608f043eaff564"
|
||||
@@ -2798,17 +2737,6 @@ stream-http@3.1.1:
|
||||
readable-stream "^3.6.0"
|
||||
xtend "^4.0.2"
|
||||
|
||||
stream-http@^2.7.2:
|
||||
version "2.8.3"
|
||||
resolved "https://registry.yarnpkg.com/stream-http/-/stream-http-2.8.3.tgz#b2d242469288a5a27ec4fe8933acf623de6514fc"
|
||||
integrity sha512-+TSkfINHDo4J+ZobQLWiMouQYB+UVYFttRA94FpEzzJ7ZdqcL4uUUQ7WkdkI4DSozGmgBUE/a47L+38PenXhUw==
|
||||
dependencies:
|
||||
builtin-status-codes "^3.0.0"
|
||||
inherits "^2.0.1"
|
||||
readable-stream "^2.3.6"
|
||||
to-arraybuffer "^1.0.0"
|
||||
xtend "^4.0.0"
|
||||
|
||||
stream-parser@^0.3.1:
|
||||
version "0.3.1"
|
||||
resolved "https://registry.yarnpkg.com/stream-parser/-/stream-parser-0.3.1.tgz#1618548694420021a1182ff0af1911c129761773"
|
||||
@@ -2860,21 +2788,21 @@ string.prototype.trimstart@^1.0.4:
|
||||
call-bind "^1.0.2"
|
||||
define-properties "^1.1.3"
|
||||
|
||||
string_decoder@1.3.0, string_decoder@^1.0.0, string_decoder@^1.1.1:
|
||||
string_decoder@1.3.0, string_decoder@^1.1.1:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
|
||||
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
|
||||
dependencies:
|
||||
safe-buffer "~5.2.0"
|
||||
|
||||
string_decoder@~1.1.1:
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
|
||||
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
|
||||
strip-ansi@6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9"
|
||||
integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==
|
||||
dependencies:
|
||||
safe-buffer "~5.1.0"
|
||||
ansi-regex "^5.0.1"
|
||||
|
||||
strip-ansi@6.0.0, strip-ansi@^6.0.0:
|
||||
strip-ansi@^6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532"
|
||||
integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==
|
||||
@@ -2891,10 +2819,10 @@ strip-json-comments@^3.1.0, strip-json-comments@^3.1.1:
|
||||
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006"
|
||||
integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==
|
||||
|
||||
styled-jsx@4.0.1:
|
||||
version "4.0.1"
|
||||
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-4.0.1.tgz#ae3f716eacc0792f7050389de88add6d5245b9e9"
|
||||
integrity sha512-Gcb49/dRB1k8B4hdK8vhW27Rlb2zujCk1fISrizCcToIs+55B4vmUM0N9Gi4nnVfFZWe55jRdWpAqH1ldAKWvQ==
|
||||
styled-jsx@5.0.0-beta.3:
|
||||
version "5.0.0-beta.3"
|
||||
resolved "https://registry.yarnpkg.com/styled-jsx/-/styled-jsx-5.0.0-beta.3.tgz#400d16179b5dff10d5954ab8be27a9a1b7780dd2"
|
||||
integrity sha512-HtDDGSFPvmjHIqWf9n8Oo54tAoY/DTplvlyOH2+YOtD80Sp31Ap8ffSmxhgk5EkUoJ7xepdXMGT650mSffWuRA==
|
||||
dependencies:
|
||||
"@babel/plugin-syntax-jsx" "7.14.5"
|
||||
"@babel/types" "7.15.0"
|
||||
@@ -2953,18 +2881,13 @@ text-table@^0.2.0:
|
||||
resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4"
|
||||
integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=
|
||||
|
||||
timers-browserify@2.0.12, timers-browserify@^2.0.4:
|
||||
timers-browserify@2.0.12:
|
||||
version "2.0.12"
|
||||
resolved "https://registry.yarnpkg.com/timers-browserify/-/timers-browserify-2.0.12.tgz#44a45c11fbf407f34f97bccd1577c652361b00ee"
|
||||
integrity sha512-9phl76Cqm6FhSX9Xe1ZUAMLtm1BLkKj2Qd5ApyWkXzsMRaA7dgr81kf4wJmQf/hAvg8EEyJxDo3du/0KlhPiKQ==
|
||||
dependencies:
|
||||
setimmediate "^1.0.4"
|
||||
|
||||
to-arraybuffer@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/to-arraybuffer/-/to-arraybuffer-1.0.1.tgz#7d229b1fcc637e466ca081180836a7aabff83f43"
|
||||
integrity sha1-fSKbH8xjfkZsoIEYCDanqr/4P0M=
|
||||
|
||||
to-fast-properties@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e"
|
||||
@@ -2989,11 +2912,6 @@ tr46@^1.0.1:
|
||||
dependencies:
|
||||
punycode "^2.1.0"
|
||||
|
||||
ts-pnp@^1.1.6:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/ts-pnp/-/ts-pnp-1.2.0.tgz#a500ad084b0798f1c3071af391e65912c86bca92"
|
||||
integrity sha512-csd+vJOb/gkzvcCHgTGSChYpy5f1/XKNsmvBGO4JXS+z1v2HobugDz4s1IeFXM3wZB44uczs+eazB5Q/ccdhQw==
|
||||
|
||||
tsconfig-paths@^3.9.0:
|
||||
version "3.10.1"
|
||||
resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.10.1.tgz#79ae67a68c15289fdf5c51cb74f397522d795ed7"
|
||||
@@ -3015,11 +2933,6 @@ tsutils@^3.21.0:
|
||||
dependencies:
|
||||
tslib "^1.8.1"
|
||||
|
||||
tty-browserify@0.0.0:
|
||||
version "0.0.0"
|
||||
resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.0.tgz#a157ba402da24e9bf957f9aa69d524eed42901a6"
|
||||
integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY=
|
||||
|
||||
tty-browserify@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/tty-browserify/-/tty-browserify-0.0.1.tgz#3f05251ee17904dfd0677546670db9651682b811"
|
||||
@@ -3064,14 +2977,6 @@ uri-js@^4.2.2:
|
||||
dependencies:
|
||||
punycode "^2.1.0"
|
||||
|
||||
url@^0.11.0:
|
||||
version "0.11.0"
|
||||
resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1"
|
||||
integrity sha1-ODjpfPxgUh63PFJajlW/3Z4uKPE=
|
||||
dependencies:
|
||||
punycode "1.3.2"
|
||||
querystring "0.2.0"
|
||||
|
||||
use-subscription@1.5.1:
|
||||
version "1.5.1"
|
||||
resolved "https://registry.yarnpkg.com/use-subscription/-/use-subscription-1.5.1.tgz#73501107f02fad84c6dd57965beb0b75c68c42d1"
|
||||
@@ -3079,18 +2984,11 @@ use-subscription@1.5.1:
|
||||
dependencies:
|
||||
object-assign "^4.1.1"
|
||||
|
||||
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
|
||||
util-deprecate@^1.0.1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
|
||||
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
|
||||
|
||||
util@0.10.3:
|
||||
version "0.10.3"
|
||||
resolved "https://registry.yarnpkg.com/util/-/util-0.10.3.tgz#7afb1afe50805246489e3db7fe0ed379336ac0f9"
|
||||
integrity sha1-evsa/lCAUkZInj23/g7TeTNqwPk=
|
||||
dependencies:
|
||||
inherits "2.0.1"
|
||||
|
||||
util@0.12.4, util@^0.12.0:
|
||||
version "0.12.4"
|
||||
resolved "https://registry.yarnpkg.com/util/-/util-0.12.4.tgz#66121a31420df8f01ca0c464be15dfa1d1850253"
|
||||
@@ -3103,13 +3001,6 @@ util@0.12.4, util@^0.12.0:
|
||||
safe-buffer "^5.1.2"
|
||||
which-typed-array "^1.1.2"
|
||||
|
||||
util@^0.11.0:
|
||||
version "0.11.1"
|
||||
resolved "https://registry.yarnpkg.com/util/-/util-0.11.1.tgz#3236733720ec64bb27f6e26f421aaa2e1b588d61"
|
||||
integrity sha512-HShAsny+zS2TZfaXxD9tYj4HQGlBezXZMZuM/S5PKLLoZkShZiGk9o5CzukI1LVHZvjdvZ2Sj1aW/Ndn2NB/HQ==
|
||||
dependencies:
|
||||
inherits "2.0.3"
|
||||
|
||||
v8-compile-cache@^2.0.3:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.3.0.tgz#2de19618c66dc247dcfb6f99338035d8245a2cee"
|
||||
@@ -3123,7 +3014,7 @@ validate-npm-package-license@^3.0.1:
|
||||
spdx-correct "^3.0.0"
|
||||
spdx-expression-parse "^3.0.0"
|
||||
|
||||
vm-browserify@1.1.2, vm-browserify@^1.0.1:
|
||||
vm-browserify@1.1.2:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/vm-browserify/-/vm-browserify-1.1.2.tgz#78641c488b8e6ca91a75f511e7a3b32a86e5dda0"
|
||||
integrity sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ==
|
||||
@@ -3136,6 +3027,11 @@ watchpack@2.1.1:
|
||||
glob-to-regexp "^0.4.1"
|
||||
graceful-fs "^4.1.2"
|
||||
|
||||
web-streams-polyfill@3.0.3:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-3.0.3.tgz#f49e487eedeca47a207c1aee41ee5578f884b42f"
|
||||
integrity sha512-d2H/t0eqRNM4w2WvmTdoeIvzAUSpK7JmATB8Nr2lb7nQ9BTIJVjbQ/TRFVEh2gUH1HwclPdoPtfMoFfetXaZnA==
|
||||
|
||||
webidl-conversions@^4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad"
|
||||
@@ -3190,7 +3086,7 @@ wrappy@1:
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
|
||||
|
||||
xtend@^4.0.0, xtend@^4.0.2:
|
||||
xtend@^4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54"
|
||||
integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==
|
||||
|
||||
@@ -24,7 +24,7 @@
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"eslint-plugin-jest": "24.3.6",
|
||||
"husky": "6.0.0",
|
||||
"jest": "27.0.6",
|
||||
"jest": "27.3.1",
|
||||
"json5": "2.1.1",
|
||||
"lint-staged": "9.2.5",
|
||||
"node-fetch": "2.6.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "2.12.3-canary.14",
|
||||
"version": "2.12.3-canary.20",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/async-retry": "^1.2.1",
|
||||
"@types/cross-spawn": "6.0.0",
|
||||
"@types/end-of-stream": "^1.4.0",
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/jest": "27.0.1",
|
||||
"@types/js-yaml": "3.12.1",
|
||||
@@ -30,7 +30,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "^2.4.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.10",
|
||||
"@vercel/frameworks": "0.5.1-canary.12",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
@@ -38,7 +38,7 @@
|
||||
"boxen": "4.2.0",
|
||||
"cross-spawn": "6.0.5",
|
||||
"end-of-stream": "1.4.1",
|
||||
"fs-extra": "7.0.0",
|
||||
"fs-extra": "10.0.0",
|
||||
"glob": "7.1.3",
|
||||
"into-stream": "5.0.0",
|
||||
"js-yaml": "3.13.1",
|
||||
|
||||
202
packages/build-utils/src/convert-runtime-to-plugin.ts
Normal file
202
packages/build-utils/src/convert-runtime-to-plugin.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import fs from 'fs-extra';
|
||||
import { join, dirname, relative } from 'path';
|
||||
import glob from './fs/glob';
|
||||
import { normalizePath } from './fs/normalize-path';
|
||||
import { FILES_SYMBOL, getLambdaOptionsFromFunction, Lambda } from './lambda';
|
||||
import type FileBlob from './file-blob';
|
||||
import type { BuilderFunctions, BuildOptions, Files } from './types';
|
||||
import minimatch from 'minimatch';
|
||||
|
||||
/**
|
||||
* Convert legacy Runtime to a Plugin.
|
||||
* @param buildRuntime - a legacy build() function from a Runtime
|
||||
* @param ext - the file extension, for example `.py`
|
||||
*/
|
||||
export function convertRuntimeToPlugin(
|
||||
buildRuntime: (options: BuildOptions) => Promise<{ output: Lambda }>,
|
||||
ext: string
|
||||
) {
|
||||
return async function build({ workPath }: { workPath: string }) {
|
||||
const opts = { cwd: workPath };
|
||||
const files = await glob('**', opts);
|
||||
delete files['vercel.json']; // Builders/Runtimes didn't have vercel.json
|
||||
const entrypoints = await glob(`api/**/*${ext}`, opts);
|
||||
const pages: { [key: string]: any } = {};
|
||||
const { functions = {} } = await readVercelConfig(workPath);
|
||||
const traceDir = join(workPath, '.output', 'runtime-traced-files');
|
||||
await fs.ensureDir(traceDir);
|
||||
|
||||
for (const entrypoint of Object.keys(entrypoints)) {
|
||||
const key =
|
||||
Object.keys(functions).find(
|
||||
src => src === entrypoint || minimatch(entrypoint, src)
|
||||
) || '';
|
||||
const config = functions[key] || {};
|
||||
|
||||
const { output } = await buildRuntime({
|
||||
files,
|
||||
entrypoint,
|
||||
workPath,
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
includeFiles: config.includeFiles,
|
||||
excludeFiles: config.excludeFiles,
|
||||
},
|
||||
});
|
||||
|
||||
pages[entrypoint] = {
|
||||
handler: output.handler,
|
||||
runtime: output.runtime,
|
||||
memory: output.memory,
|
||||
maxDuration: output.maxDuration,
|
||||
environment: output.environment,
|
||||
allowQuery: output.allowQuery,
|
||||
regions: output.regions,
|
||||
};
|
||||
|
||||
// @ts-ignore This symbol is a private API
|
||||
const lambdaFiles: Files = output[FILES_SYMBOL];
|
||||
|
||||
const entry = join(workPath, '.output', 'server', 'pages', entrypoint);
|
||||
await fs.ensureDir(dirname(entry));
|
||||
await linkOrCopy(files[entrypoint].fsPath, entry);
|
||||
|
||||
const tracedFiles: {
|
||||
absolutePath: string;
|
||||
relativePath: string;
|
||||
}[] = [];
|
||||
|
||||
Object.entries(lambdaFiles).forEach(async ([relPath, file]) => {
|
||||
const newPath = join(traceDir, relPath);
|
||||
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
||||
if (file.fsPath) {
|
||||
await linkOrCopy(file.fsPath, newPath);
|
||||
} else if (file.type === 'FileBlob') {
|
||||
const { data, mode } = file as FileBlob;
|
||||
await fs.writeFile(newPath, data, { mode });
|
||||
} else {
|
||||
throw new Error(`Unknown file type: ${file.type}`);
|
||||
}
|
||||
});
|
||||
|
||||
const nft = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'server',
|
||||
'pages',
|
||||
`${entrypoint}.nft.json`
|
||||
);
|
||||
const json = JSON.stringify({
|
||||
version: 1,
|
||||
files: tracedFiles.map(f => ({
|
||||
input: normalizePath(relative(nft, f.absolutePath)),
|
||||
output: normalizePath(f.relativePath),
|
||||
})),
|
||||
});
|
||||
|
||||
await fs.ensureDir(dirname(nft));
|
||||
await fs.writeFile(nft, json);
|
||||
}
|
||||
|
||||
await updateFunctionsManifest({ workPath, pages });
|
||||
};
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
} catch (err: any) {
|
||||
if (err.code !== 'EEXIST') {
|
||||
await fs.copyFile(existingPath, newPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
|
||||
try {
|
||||
const str = await fs.readFile(filePath, 'utf8');
|
||||
return JSON.parse(str);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async function readVercelConfig(
|
||||
workPath: string
|
||||
): Promise<{ functions?: BuilderFunctions; regions?: string[] }> {
|
||||
const vercelJsonPath = join(workPath, 'vercel.json');
|
||||
return readJson(vercelJsonPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* If `.output/functions-manifest.json` exists, append to the pages
|
||||
* property. Otherwise write a new file. This will also read `vercel.json`
|
||||
* and apply relevant `functions` property config.
|
||||
*/
|
||||
export async function updateFunctionsManifest({
|
||||
workPath,
|
||||
pages,
|
||||
}: {
|
||||
workPath: string;
|
||||
pages: { [key: string]: any };
|
||||
}) {
|
||||
const functionsManifestPath = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'functions-manifest.json'
|
||||
);
|
||||
const vercelConfig = await readVercelConfig(workPath);
|
||||
const functionsManifest = await readJson(functionsManifestPath);
|
||||
|
||||
if (!functionsManifest.version) functionsManifest.version = 1;
|
||||
if (!functionsManifest.pages) functionsManifest.pages = {};
|
||||
|
||||
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
||||
const fnConfig = await getLambdaOptionsFromFunction({
|
||||
sourceFile: pageKey,
|
||||
config: vercelConfig,
|
||||
});
|
||||
functionsManifest.pages[pageKey] = {
|
||||
...pageConfig,
|
||||
memory: fnConfig.memory || pageConfig.memory,
|
||||
maxDuration: fnConfig.maxDuration || pageConfig.maxDuration,
|
||||
regions: vercelConfig.regions || pageConfig.regions,
|
||||
};
|
||||
}
|
||||
|
||||
await fs.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
|
||||
}
|
||||
|
||||
/**
|
||||
* Will append routes to the `routes-manifest.json` file.
|
||||
* If the file does not exist, it'll be created.
|
||||
*/
|
||||
export async function updateRoutesManifest({
|
||||
workPath,
|
||||
dynamicRoutes,
|
||||
}: {
|
||||
workPath: string;
|
||||
dynamicRoutes?: {
|
||||
page: string;
|
||||
regex: string;
|
||||
namedRegex?: string;
|
||||
routeKeys?: { [named: string]: string };
|
||||
}[];
|
||||
}) {
|
||||
const routesManifestPath = join(workPath, '.output', 'routes-manifest.json');
|
||||
|
||||
const routesManifest = await readJson(routesManifestPath);
|
||||
|
||||
if (!routesManifest.version) routesManifest.version = 1;
|
||||
if (routesManifest.pages404 === undefined) routesManifest.pages404 = true;
|
||||
|
||||
if (dynamicRoutes) {
|
||||
if (!routesManifest.dynamicRoutes) routesManifest.dynamicRoutes = [];
|
||||
routesManifest.dynamicRoutes.push(...dynamicRoutes);
|
||||
}
|
||||
|
||||
await fs.writeFile(routesManifestPath, JSON.stringify(routesManifest));
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import assert from 'assert';
|
||||
import vanillaGlob_ from 'glob';
|
||||
import { promisify } from 'util';
|
||||
import { lstat, Stats } from 'fs-extra';
|
||||
import { normalizePath } from './normalize-path';
|
||||
import FileFsRef from '../file-fs-ref';
|
||||
|
||||
export type GlobOptions = vanillaGlob_.IOptions;
|
||||
@@ -45,7 +46,7 @@ export default async function glob(
|
||||
const files = await vanillaGlob(pattern, options);
|
||||
|
||||
for (const relativePath of files) {
|
||||
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||
const fsPath = normalizePath(path.join(options.cwd!, relativePath));
|
||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||
assert(
|
||||
stat,
|
||||
|
||||
8
packages/build-utils/src/fs/normalize-path.ts
Normal file
8
packages/build-utils/src/fs/normalize-path.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
const isWin = process.platform === 'win32';
|
||||
|
||||
/**
|
||||
* Convert Windows separators to Unix separators.
|
||||
*/
|
||||
export function normalizePath(p: string): string {
|
||||
return isWin ? p.replace(/\\/g, '/') : p;
|
||||
}
|
||||
@@ -81,6 +81,12 @@ export {
|
||||
export { detectFramework } from './detect-framework';
|
||||
export { DetectorFilesystem } from './detectors/filesystem';
|
||||
export { readConfigFile } from './fs/read-config-file';
|
||||
export { normalizePath } from './fs/normalize-path';
|
||||
export {
|
||||
convertRuntimeToPlugin,
|
||||
updateFunctionsManifest,
|
||||
updateRoutesManifest,
|
||||
} from './convert-runtime-to-plugin';
|
||||
|
||||
export * from './schemas';
|
||||
export * from './types';
|
||||
|
||||
@@ -36,9 +36,11 @@ interface CreateLambdaOptions {
|
||||
|
||||
interface GetLambdaOptionsFromFunctionOptions {
|
||||
sourceFile: string;
|
||||
config?: Config;
|
||||
config?: Pick<Config, 'functions'>;
|
||||
}
|
||||
|
||||
export const FILES_SYMBOL = Symbol('files');
|
||||
|
||||
export class Lambda {
|
||||
public type: 'Lambda';
|
||||
public zipBuffer: Buffer;
|
||||
@@ -118,7 +120,7 @@ export async function createLambda({
|
||||
|
||||
try {
|
||||
const zipBuffer = await createZip(files);
|
||||
return new Lambda({
|
||||
const lambda = new Lambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
@@ -127,6 +129,9 @@ export async function createLambda({
|
||||
environment,
|
||||
regions,
|
||||
});
|
||||
// @ts-ignore This symbol is a private API
|
||||
lambda[FILES_SYMBOL] = files;
|
||||
return lambda;
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ export interface File {
|
||||
mode: number;
|
||||
contentType?: string;
|
||||
toStream: () => NodeJS.ReadableStream;
|
||||
toStreamAsync?: () => Promise<NodeJS.ReadableStream>;
|
||||
/**
|
||||
* The absolute path to the file in the filesystem
|
||||
*/
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "package.json", "use": "@vercel/static-build" }],
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 7" }]
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 8" }]
|
||||
}
|
||||
|
||||
182
packages/build-utils/test/unit.convert-runtime-to-plugin.test.ts
vendored
Normal file
182
packages/build-utils/test/unit.convert-runtime-to-plugin.test.ts
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
import { join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import { BuildOptions, createLambda } from '../src';
|
||||
import { convertRuntimeToPlugin } from '../src/convert-runtime-to-plugin';
|
||||
|
||||
async function fsToJson(dir: string, output: Record<string, any> = {}) {
|
||||
const files = await fs.readdir(dir);
|
||||
for (const file of files) {
|
||||
const fsPath = join(dir, file);
|
||||
const stat = await fs.stat(fsPath);
|
||||
if (stat.isDirectory()) {
|
||||
output[file] = {};
|
||||
await fsToJson(fsPath, output[file]);
|
||||
} else {
|
||||
output[file] = await fs.readFile(fsPath, 'utf8');
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
const workPath = join(__dirname, 'walk', 'python-api');
|
||||
|
||||
describe('convert-runtime-to-plugin', () => {
|
||||
afterEach(async () => {
|
||||
await fs.remove(join(workPath, '.output'));
|
||||
});
|
||||
|
||||
it('should create correct fileystem for python', async () => {
|
||||
const lambdaOptions = {
|
||||
handler: 'index.handler',
|
||||
runtime: 'python3.9',
|
||||
memory: 512,
|
||||
maxDuration: 5,
|
||||
environment: {},
|
||||
regions: ['sfo1'],
|
||||
};
|
||||
|
||||
const buildRuntime = async (opts: BuildOptions) => {
|
||||
const lambda = await createLambda({
|
||||
files: opts.files,
|
||||
...lambdaOptions,
|
||||
});
|
||||
return { output: lambda };
|
||||
};
|
||||
|
||||
const lambdaFiles = await fsToJson(workPath);
|
||||
delete lambdaFiles['vercel.json'];
|
||||
const build = await convertRuntimeToPlugin(buildRuntime, '.py');
|
||||
|
||||
await build({ workPath });
|
||||
|
||||
const output = await fsToJson(join(workPath, '.output'));
|
||||
expect(output).toMatchObject({
|
||||
'functions-manifest.json': expect.stringContaining('{'),
|
||||
'runtime-traced-files': lambdaFiles,
|
||||
server: {
|
||||
pages: {
|
||||
api: {
|
||||
'index.py': expect.stringContaining('index'),
|
||||
'index.py.nft.json': expect.stringContaining('{'),
|
||||
users: {
|
||||
'get.py': expect.stringContaining('get'),
|
||||
'get.py.nft.json': expect.stringContaining('{'),
|
||||
'post.py': expect.stringContaining('post'),
|
||||
'post.py.nft.json': expect.stringContaining('{'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const funcManifest = JSON.parse(output['functions-manifest.json']);
|
||||
expect(funcManifest).toMatchObject({
|
||||
version: 1,
|
||||
pages: {
|
||||
'api/index.py': lambdaOptions,
|
||||
'api/users/get.py': lambdaOptions,
|
||||
'api/users/post.py': { ...lambdaOptions, memory: 3008 },
|
||||
},
|
||||
});
|
||||
|
||||
const indexJson = JSON.parse(output.server.pages.api['index.py.nft.json']);
|
||||
expect(indexJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: '../../../../runtime-traced-files/api/index.py',
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/api/users/get.py',
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/api/users/post.py',
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/file.txt',
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/util/date.py',
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../runtime-traced-files/util/math.py',
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const getJson = JSON.parse(
|
||||
output.server.pages.api.users['get.py.nft.json']
|
||||
);
|
||||
expect(getJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/index.py',
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/get.py',
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/post.py',
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/file.txt',
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/date.py',
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/math.py',
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const postJson = JSON.parse(
|
||||
output.server.pages.api.users['post.py.nft.json']
|
||||
);
|
||||
expect(postJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/index.py',
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/get.py',
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/api/users/post.py',
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/file.txt',
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/date.py',
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: '../../../../../runtime-traced-files/util/math.py',
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(output.server.pages['file.txt']).toBeUndefined();
|
||||
expect(output.server.pages.api['file.txt']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
1
packages/build-utils/test/walk/python-api/api/index.py
Normal file
1
packages/build-utils/test/walk/python-api/api/index.py
Normal file
@@ -0,0 +1 @@
|
||||
# index
|
||||
@@ -0,0 +1 @@
|
||||
# get
|
||||
@@ -0,0 +1 @@
|
||||
# post
|
||||
1
packages/build-utils/test/walk/python-api/file.txt
Normal file
1
packages/build-utils/test/walk/python-api/file.txt
Normal file
@@ -0,0 +1 @@
|
||||
This file should also be included
|
||||
1
packages/build-utils/test/walk/python-api/util/date.py
Normal file
1
packages/build-utils/test/walk/python-api/util/date.py
Normal file
@@ -0,0 +1 @@
|
||||
# date
|
||||
1
packages/build-utils/test/walk/python-api/util/math.py
Normal file
1
packages/build-utils/test/walk/python-api/util/math.py
Normal file
@@ -0,0 +1 @@
|
||||
# math
|
||||
10
packages/build-utils/test/walk/python-api/vercel.json
Normal file
10
packages/build-utils/test/walk/python-api/vercel.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/users/post.py": {
|
||||
"memory": 3008
|
||||
},
|
||||
"api/not-matching-anything.py": {
|
||||
"memory": 768
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "23.1.3-canary.16",
|
||||
"version": "23.1.3-canary.38",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -43,14 +43,14 @@
|
||||
"node": ">= 12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.14",
|
||||
"@vercel/go": "1.2.4-canary.3",
|
||||
"@vercel/node": "1.12.2-canary.4",
|
||||
"@vercel/python": "2.0.6-canary.4",
|
||||
"@vercel/ruby": "1.2.8-canary.3",
|
||||
"@vercel/build-utils": "2.12.3-canary.20",
|
||||
"@vercel/go": "1.2.4-canary.4",
|
||||
"@vercel/node": "1.12.2-canary.7",
|
||||
"@vercel/python": "2.1.1",
|
||||
"@vercel/ruby": "1.2.8-canary.4",
|
||||
"update-notifier": "4.1.0",
|
||||
"vercel-plugin-middleware": "0.0.0-canary.3",
|
||||
"vercel-plugin-node": "1.12.2-plugin.0"
|
||||
"vercel-plugin-middleware": "0.0.0-canary.7",
|
||||
"vercel-plugin-node": "1.12.2-canary.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/env": "11.1.2",
|
||||
@@ -90,8 +90,9 @@
|
||||
"@types/update-notifier": "5.1.0",
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.10",
|
||||
"@vercel/frameworks": "0.5.1-canary.12",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.17.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
"ajv": "6.12.2",
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
scanParentDirs,
|
||||
spawnAsync,
|
||||
} from '@vercel/build-utils';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import Sema from 'async-sema';
|
||||
import chalk from 'chalk';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
@@ -22,19 +23,14 @@ import handleError from '../util/handle-error';
|
||||
import confirm from '../util/input/confirm';
|
||||
import { isSettingValue } from '../util/is-setting-value';
|
||||
import cmd from '../util/output/cmd';
|
||||
import code from '../util/output/code';
|
||||
import { getColorForPkgName } from '../util/output/color-name-cache';
|
||||
import logo from '../util/output/logo';
|
||||
import param from '../util/output/param';
|
||||
import stamp from '../util/output/stamp';
|
||||
import cliPkgJson from '../util/pkg';
|
||||
import { getCommandName, getPkgName } from '../util/pkg-name';
|
||||
import { loadCliPlugins } from '../util/plugins';
|
||||
import { findFramework } from '../util/projects/find-framework';
|
||||
import { VERCEL_DIR } from '../util/projects/link';
|
||||
import {
|
||||
ProjectLinkAndSettings,
|
||||
readProjectSettings,
|
||||
} from '../util/projects/project-settings';
|
||||
import { readProjectSettings } from '../util/projects/project-settings';
|
||||
import pull from './pull';
|
||||
|
||||
const sema = new Sema(16, {
|
||||
@@ -63,23 +59,29 @@ const help = () => {
|
||||
${chalk.gray('–')} Build the project
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} build`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} build --cwd ./path-to-project`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} build --cwd ./path-to-project`)}
|
||||
`);
|
||||
};
|
||||
|
||||
const OUTPUT_DIR = '.output';
|
||||
const VERCEL_PLUGIN_PREFIX = 'vercel-plugin-';
|
||||
|
||||
const fields: {
|
||||
name: string;
|
||||
value: keyof ProjectLinkAndSettings['settings'];
|
||||
}[] = [
|
||||
{ name: 'Build Command', value: 'buildCommand' },
|
||||
{ name: 'Output Directory', value: 'outputDirectory' },
|
||||
{ name: 'Root Directory', value: 'rootDirectory' },
|
||||
];
|
||||
|
||||
export default async function main(client: Client) {
|
||||
if (process.env.__VERCEL_BUILD_RUNNING) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} build`
|
||||
)} must not recursively invoke itself. Check the Build Command in the Project Settings or the ${cmd(
|
||||
'build'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
return 1;
|
||||
} else {
|
||||
process.env.__VERCEL_BUILD_RUNNING = '1';
|
||||
}
|
||||
|
||||
let argv;
|
||||
const buildStamp = stamp();
|
||||
try {
|
||||
@@ -119,6 +121,9 @@ export default async function main(client: Client) {
|
||||
project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
}
|
||||
|
||||
// If `rootDirectory` exists, then `baseDir` will be the repo's root directory.
|
||||
const baseDir = cwd;
|
||||
|
||||
cwd = project.settings.rootDirectory
|
||||
? join(cwd, project.settings.rootDirectory)
|
||||
: cwd;
|
||||
@@ -151,47 +156,57 @@ export default async function main(client: Client) {
|
||||
}
|
||||
|
||||
const buildState = { ...project.settings };
|
||||
|
||||
client.output.log(`Retrieved Project Settings:`);
|
||||
client.output.print(
|
||||
chalk.dim(` - ${chalk.bold(`Framework Preset:`)} ${framework.name}\n`)
|
||||
const formatSetting = (
|
||||
name: string,
|
||||
override: string | null | undefined,
|
||||
defaults: typeof framework.settings.outputDirectory
|
||||
) =>
|
||||
` - ${chalk.bold(`${name}:`)} ${`${
|
||||
override
|
||||
? override + ` (override)`
|
||||
: 'placeholder' in defaults
|
||||
? chalk.italic(`${defaults.placeholder}`)
|
||||
: defaults.value
|
||||
}`}`;
|
||||
console.log(`Retrieved Project Settings:`);
|
||||
console.log(
|
||||
chalk.dim(` - ${chalk.bold(`Framework Preset:`)} ${framework.name}`)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Build Command',
|
||||
project.settings.buildCommand,
|
||||
framework.settings.buildCommand
|
||||
)
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Output Directory',
|
||||
project.settings.outputDirectory,
|
||||
framework.settings.outputDirectory
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
for (let field of fields) {
|
||||
const defaults = (framework.settings as any)[field.value];
|
||||
if (defaults) {
|
||||
client.output.print(
|
||||
chalk.dim(
|
||||
` - ${chalk.bold(`${field.name}:`)} ${`${
|
||||
project.settings[field.value]
|
||||
? project.settings[field.value] + ` (override)`
|
||||
: isSettingValue(defaults)
|
||||
? defaults.value
|
||||
: chalk.italic(`${defaults.placeholder}`)
|
||||
}`}\n`
|
||||
)
|
||||
);
|
||||
}
|
||||
if (field.value != 'buildCommand') {
|
||||
(buildState as any)[field.value] = project.settings[field.value]
|
||||
? project.settings[field.value]
|
||||
: defaults
|
||||
? isSettingValue(defaults)
|
||||
? defaults.value
|
||||
: null
|
||||
: null;
|
||||
}
|
||||
}
|
||||
buildState.outputDirectory =
|
||||
project.settings.outputDirectory ||
|
||||
(isSettingValue(framework.settings.outputDirectory)
|
||||
? framework.settings.outputDirectory.value
|
||||
: null);
|
||||
buildState.rootDirectory = project.settings.rootDirectory;
|
||||
|
||||
if (loadedEnvFiles.length > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Loaded Environment Variables from ${loadedEnvFiles.length} ${pluralize(
|
||||
'file',
|
||||
loadedEnvFiles.length
|
||||
)}:`
|
||||
);
|
||||
for (let envFile of loadedEnvFiles) {
|
||||
client.output.print(chalk.dim(` - ${envFile.path}\n`));
|
||||
console.log(chalk.dim(` - ${envFile.path}`));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -199,7 +214,7 @@ export default async function main(client: Client) {
|
||||
const debug = argv['--debug'];
|
||||
let plugins;
|
||||
try {
|
||||
plugins = await loadCliPlugins(client, cwd);
|
||||
plugins = await loadCliPlugins(cwd, client.output);
|
||||
} catch (error) {
|
||||
client.output.error('Failed to load CLI Plugins');
|
||||
handleError(error, { debug });
|
||||
@@ -222,7 +237,7 @@ export default async function main(client: Client) {
|
||||
};
|
||||
|
||||
if (plugins?.pluginCount && plugins?.pluginCount > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Loaded ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
@@ -230,7 +245,7 @@ export default async function main(client: Client) {
|
||||
);
|
||||
// preBuild Plugins
|
||||
if (plugins.preBuildPlugins.length > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
@@ -267,38 +282,37 @@ export default async function main(client: Client) {
|
||||
|
||||
// Clean the output directory
|
||||
fs.removeSync(join(cwd, OUTPUT_DIR));
|
||||
let result: boolean;
|
||||
|
||||
// Yarn v2 PnP mode may be activated, so force
|
||||
// "node-modules" linker style
|
||||
const env = {
|
||||
YARN_NODE_LINKER: 'node-modules',
|
||||
...spawnOpts.env,
|
||||
};
|
||||
|
||||
if (typeof buildState.buildCommand === 'string') {
|
||||
client.output.log(`Running Build Command: ${cmd(buildState.buildCommand)}`);
|
||||
result = await execCommand(buildState.buildCommand, {
|
||||
console.log(`Running Build Command: ${cmd(buildState.buildCommand)}`);
|
||||
await execCommand(buildState.buildCommand, {
|
||||
...spawnOpts,
|
||||
// Yarn v2 PnP mode may be activated, so force
|
||||
// "node-modules" linker style
|
||||
env: {
|
||||
YARN_NODE_LINKER: 'node-modules',
|
||||
...spawnOpts.env,
|
||||
},
|
||||
cwd: cwd,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
} else if (fs.existsSync(join(cwd, 'package.json'))) {
|
||||
result = await runPackageJsonScript(
|
||||
await runPackageJsonScript(
|
||||
client,
|
||||
cwd,
|
||||
['vercel-build', 'now-build', 'build'],
|
||||
spawnOpts
|
||||
);
|
||||
} else {
|
||||
// no package.json exists and no build command present
|
||||
result = true;
|
||||
}
|
||||
|
||||
if (!result) {
|
||||
client.output.error(
|
||||
`Missing required "${cmd(
|
||||
buildState.buildCommand || 'vercel-build' || 'build'
|
||||
)}" script in ${param(cwd)}"\n`
|
||||
} else if (typeof framework.settings.buildCommand.value === 'string') {
|
||||
console.log(
|
||||
`Running Build Command: ${cmd(framework.settings.buildCommand.value)}`
|
||||
);
|
||||
return 1;
|
||||
await execCommand(framework.settings.buildCommand.value, {
|
||||
...spawnOpts,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
}
|
||||
|
||||
if (!fs.existsSync(join(cwd, OUTPUT_DIR))) {
|
||||
@@ -317,6 +331,9 @@ export default async function main(client: Client) {
|
||||
ignore: [
|
||||
'node_modules/**',
|
||||
'.vercel/**',
|
||||
'.env',
|
||||
'.env.*',
|
||||
'.*ignore',
|
||||
'_middleware.ts',
|
||||
'_middleware.mts',
|
||||
'_middleware.cts',
|
||||
@@ -325,6 +342,7 @@ export default async function main(client: Client) {
|
||||
'_middleware.js',
|
||||
'api/**',
|
||||
'.git/**',
|
||||
'.next/cache/**',
|
||||
],
|
||||
nodir: true,
|
||||
dot: true,
|
||||
@@ -343,7 +361,7 @@ export default async function main(client: Client) {
|
||||
)
|
||||
);
|
||||
client.output.stopSpinner();
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Copied ${files.length.toLocaleString()} files from ${param(
|
||||
distDir
|
||||
)} to ${param(outputDir)} ${copyStamp()}`
|
||||
@@ -382,53 +400,189 @@ export default async function main(client: Client) {
|
||||
{ spaces: 2 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (framework.slug === 'nextjs') {
|
||||
const files = await glob(join(OUTPUT_DIR, '**', '*.nft.json'), {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'inputs'));
|
||||
for (let f of files) {
|
||||
client.output.debug(`Processing ${f}:`);
|
||||
const json = await fs.readJson(f);
|
||||
const newFilesList: Array<{ input: string; output: string }> = [];
|
||||
for (let fileEntity of json.files) {
|
||||
const file =
|
||||
typeof fileEntity === 'string' ? fileEntity : fileEntity.input;
|
||||
// if the resolved path is NOT in the .output directory we move in it there
|
||||
const fullPath = resolve(parse(f).dir);
|
||||
if (!resolve(fullPath).includes(OUTPUT_DIR)) {
|
||||
const { ext } = parse(file);
|
||||
const raw = await fs.readFile(resolve(fullPath));
|
||||
const newFilePath = join(OUTPUT_DIR, 'inputs', hash(raw) + ext);
|
||||
smartCopy(client, fullPath, newFilePath);
|
||||
// Special Next.js processing.
|
||||
if (framework.slug === 'nextjs') {
|
||||
// The contents of `.output/static` should be placed inside of `.output/static/_next/static`
|
||||
const tempStatic = '___static';
|
||||
await fs.rename(
|
||||
join(cwd, OUTPUT_DIR, 'static'),
|
||||
join(cwd, OUTPUT_DIR, tempStatic)
|
||||
);
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'static', '_next', 'static'));
|
||||
await fs.rename(
|
||||
join(cwd, OUTPUT_DIR, tempStatic),
|
||||
join(cwd, OUTPUT_DIR, 'static', '_next', 'static')
|
||||
);
|
||||
|
||||
newFilesList.push({
|
||||
input: relative(parse(f).dir, newFilePath),
|
||||
output: file,
|
||||
// Next.js might reference files from the `static` directory in `middleware-manifest.json`.
|
||||
// Since we move all files from `static` to `static/_next/static`, we'll need to change
|
||||
// those references as well and update the manifest file.
|
||||
const middlewareManifest = join(
|
||||
cwd,
|
||||
OUTPUT_DIR,
|
||||
'server',
|
||||
'middleware-manifest.json'
|
||||
);
|
||||
if (fs.existsSync(middlewareManifest)) {
|
||||
const manifest = await fs.readJSON(middlewareManifest);
|
||||
Object.keys(manifest.middleware).forEach(key => {
|
||||
const files = manifest.middleware[key].files.map((f: string) => {
|
||||
if (f.startsWith('static/')) {
|
||||
const next = f.replace(/^static\//gm, 'static/_next/static/');
|
||||
client.output.debug(
|
||||
`Replacing file in \`middleware-manifest.json\`: ${f} => ${next}`
|
||||
);
|
||||
return next;
|
||||
}
|
||||
|
||||
return f;
|
||||
});
|
||||
} else {
|
||||
newFilesList.push({
|
||||
input: file,
|
||||
output: file,
|
||||
|
||||
manifest.middleware[key].files = files;
|
||||
});
|
||||
|
||||
await fs.writeJSON(middlewareManifest, manifest);
|
||||
}
|
||||
|
||||
// We want to pick up directories for user-provided static files into `.`output/static`.
|
||||
// More specifically, the static directory contents would then be mounted to `output/static/static`,
|
||||
// and the public directory contents would be mounted to `output/static`. Old Next.js versions
|
||||
// allow `static`, and newer ones allow both, but since there's nobody that actually uses both,
|
||||
// we can check for the existence of both and pick the first match that we find (first
|
||||
// `public`, then`static`). We can't read both at the same time because that would mean we'd
|
||||
// read public for old Next.js versions that don't support it, which might be breaking (and
|
||||
// we don't want to make vercel build specific framework versions).
|
||||
const publicFiles = await glob('public/**', {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
if (publicFiles.length > 0) {
|
||||
await Promise.all(
|
||||
publicFiles.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
f.replace('public', join(OUTPUT_DIR, 'static'))
|
||||
)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
const staticFiles = await glob('static/**', {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
await Promise.all(
|
||||
staticFiles.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
f.replace('static', join(OUTPUT_DIR, 'static', 'static'))
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Regardless of the Next.js version, we make sure that it is compatible with
|
||||
// the Filesystem API. We get there by moving all the files needed
|
||||
// into the outputs directory `inputs` folder. Next.js is > 12, we can
|
||||
// read the .nft.json files directly. If there aren't .nft.json files
|
||||
// we trace and create them. We then resolve the files in each nft file list
|
||||
// and move them into the "inputs" directory. We rename them with hashes to
|
||||
// prevent collisions and then update the related .nft files accordingly
|
||||
// to point to the newly named input files. Again, all of this is so that Next.js
|
||||
// works with the Filesystem API (and so .output contains all inputs
|
||||
// needed to run Next.js) and `vc --prebuilt`.
|
||||
const nftFiles = await glob(join(OUTPUT_DIR, '**', '*.nft.json'), {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
// If there are no .nft.json files, we know that Next.js < 12. We then
|
||||
// execute the tracing on our own.
|
||||
if (nftFiles.length === 0) {
|
||||
const serverFiles = await glob(
|
||||
join(OUTPUT_DIR, 'server', 'pages', '**', '*.js'),
|
||||
{
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
ignore: ['webpack-runtime.js'],
|
||||
absolute: true,
|
||||
}
|
||||
);
|
||||
for (let f of serverFiles) {
|
||||
const { ext, dir } = parse(f);
|
||||
const { fileList } = await nodeFileTrace([f], {
|
||||
ignore: [
|
||||
relative(cwd, f),
|
||||
'node_modules/next/dist/pages/**/*',
|
||||
'node_modules/next/dist/compiled/webpack/(bundle4|bundle5).js',
|
||||
'node_modules/react/**/*.development.js',
|
||||
'node_modules/react-dom/**/*.development.js',
|
||||
'node_modules/use-subscription/**/*.development.js',
|
||||
'node_modules/sharp/**/*',
|
||||
],
|
||||
});
|
||||
fileList.delete(relative(cwd, f));
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f.replace(ext, '.js.nft.json'),
|
||||
nft: {
|
||||
version: 1,
|
||||
files: Array.from(fileList).map(fileListEntry =>
|
||||
relative(dir, fileListEntry)
|
||||
),
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
for (let f of nftFiles) {
|
||||
const json = await fs.readJson(f);
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f,
|
||||
nft: json,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Update the .nft.json with new input and output mapping
|
||||
await fs.writeJSON(f, {
|
||||
...json,
|
||||
files: newFilesList,
|
||||
|
||||
const requiredServerFilesPath = join(
|
||||
OUTPUT_DIR,
|
||||
'required-server-files.json'
|
||||
);
|
||||
const requiredServerFilesJson = await fs.readJSON(
|
||||
requiredServerFilesPath
|
||||
);
|
||||
await fs.writeJSON(requiredServerFilesPath, {
|
||||
...requiredServerFilesJson,
|
||||
appDir: '.',
|
||||
files: requiredServerFilesJson.files.map((i: string) => {
|
||||
const absolutePath = join(cwd, i.replace('.next', '.output'));
|
||||
const output = relative(baseDir, absolutePath);
|
||||
|
||||
return {
|
||||
input: i.replace('.next', '.output'),
|
||||
output,
|
||||
};
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Build Plugins
|
||||
if (plugins?.buildPlugins && plugins.buildPlugins.length > 0) {
|
||||
client.output.log(
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
@@ -445,7 +599,9 @@ export default async function main(client: Client) {
|
||||
console.log = (...args: any[]) => prefixedLog(prefix, args, origLog);
|
||||
console.error = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origErr);
|
||||
await plugin.build();
|
||||
await plugin.build({
|
||||
workPath: cwd,
|
||||
});
|
||||
client.output.debug(
|
||||
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
|
||||
);
|
||||
@@ -461,13 +617,13 @@ export default async function main(client: Client) {
|
||||
}
|
||||
}
|
||||
|
||||
client.output.print(
|
||||
console.log(
|
||||
`${prependEmoji(
|
||||
`Build Completed in ${chalk.bold(OUTPUT_DIR)} ${chalk.gray(
|
||||
buildStamp()
|
||||
)}`,
|
||||
emoji('success')
|
||||
)}\n`
|
||||
)}`
|
||||
);
|
||||
|
||||
return 0;
|
||||
@@ -513,71 +669,37 @@ export async function runPackageJsonScript(
|
||||
}
|
||||
}
|
||||
|
||||
client.output.log(`Running Build Command: ${cmd(opts.prettyCommand)}\n`);
|
||||
console.log(`Running Build Command: ${cmd(opts.prettyCommand)}\n`);
|
||||
await spawnAsync(cliType, ['run', scriptName], opts);
|
||||
client.output.print('\n'); // give it some room
|
||||
console.log(); // give it some room
|
||||
client.output.debug(`Script complete [${Date.now() - runScriptTime}ms]`);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function loadCliPlugins(client: Client, cwd: string) {
|
||||
const { packageJson } = await scanParentDirs(cwd, true);
|
||||
|
||||
let pluginCount = 0;
|
||||
const preBuildPlugins = [];
|
||||
const buildPlugins = [];
|
||||
const deps = new Set(
|
||||
[
|
||||
...Object.keys(packageJson?.dependencies || {}),
|
||||
...Object.keys(packageJson?.devDependencies || {}),
|
||||
...Object.keys(cliPkgJson.dependencies),
|
||||
].filter(dep => dep.startsWith(VERCEL_PLUGIN_PREFIX))
|
||||
);
|
||||
|
||||
for (let dep of deps) {
|
||||
pluginCount++;
|
||||
const resolved = require.resolve(dep, {
|
||||
paths: [cwd, process.cwd(), __dirname],
|
||||
});
|
||||
let plugin;
|
||||
try {
|
||||
plugin = require(resolved);
|
||||
const color = getColorForPkgName(dep);
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
preBuildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.build === 'function') {
|
||||
buildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
client.output.error(`Failed to import ${code(dep)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return { pluginCount, preBuildPlugins, buildPlugins };
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
if (
|
||||
newPath.endsWith('.nft.json') ||
|
||||
newPath.endsWith('middleware-manifest.json') ||
|
||||
newPath.endsWith('required-server-files.json')
|
||||
) {
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} else {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
}
|
||||
} catch (err: any) {
|
||||
// eslint-disable-line
|
||||
// If a hard link to the same file already exists
|
||||
// If a symlink to the same file already exists
|
||||
// then trying to copy it will make an empty file from it.
|
||||
if (err['code'] === 'EEXIST') return;
|
||||
// In some VERY rare cases (1 in a thousand), hard-link creation fails on Windows.
|
||||
// In some VERY rare cases (1 in a thousand), symlink creation fails on Windows.
|
||||
// In that case, we just fall back to copying.
|
||||
// This issue is reproducible with "pnpm add @material-ui/icons@4.9.1"
|
||||
await fs.copyFile(existingPath, newPath);
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -608,3 +730,62 @@ async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
|
||||
function hash(buf: Buffer): string {
|
||||
return createHash('sha1').update(buf).digest('hex');
|
||||
}
|
||||
|
||||
interface NftFile {
|
||||
version: number;
|
||||
files: (string | { input: string; output: string })[];
|
||||
}
|
||||
|
||||
// resolveNftToOutput takes nft file and moves all of its trace files
|
||||
// into the specified directory + `inputs`, (renaming them to their hash + ext) and
|
||||
// subsequently updating the original nft file accordingly. This is done
|
||||
// to make the `.output` directory be self-contained, so that it works
|
||||
// properly with `vc --prebuilt`.
|
||||
async function resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir,
|
||||
nftFileName,
|
||||
nft,
|
||||
}: {
|
||||
client: Client;
|
||||
baseDir: string;
|
||||
outputDir: string;
|
||||
nftFileName: string;
|
||||
nft: NftFile;
|
||||
}) {
|
||||
client.output.debug(`Processing and resolving ${nftFileName}`);
|
||||
await fs.ensureDir(join(outputDir, 'inputs'));
|
||||
const newFilesList: NftFile['files'] = [];
|
||||
for (let fileEntity of nft.files) {
|
||||
const relativeInput: string =
|
||||
typeof fileEntity === 'string' ? fileEntity : fileEntity.input;
|
||||
const fullInput = resolve(join(parse(nftFileName).dir, relativeInput));
|
||||
|
||||
// if the resolved path is NOT in the .output directory we move in it there
|
||||
if (!fullInput.includes(outputDir)) {
|
||||
const { ext } = parse(fullInput);
|
||||
const raw = await fs.readFile(fullInput);
|
||||
const newFilePath = join(outputDir, 'inputs', hash(raw) + ext);
|
||||
smartCopy(client, fullInput, newFilePath);
|
||||
|
||||
// We have to use `baseDir` instead of `cwd`, because we want to
|
||||
// mount everything from there (especially `node_modules`).
|
||||
// This is important for NPM Workspaces where `node_modules` is not
|
||||
// in the directory of the workspace.
|
||||
const output = relative(baseDir, fullInput).replace('.output', '.next');
|
||||
|
||||
newFilesList.push({
|
||||
input: relative(parse(nftFileName).dir, newFilePath),
|
||||
output,
|
||||
});
|
||||
} else {
|
||||
newFilesList.push(relativeInput);
|
||||
}
|
||||
}
|
||||
// Update the .nft.json with new input and output mapping
|
||||
await fs.writeJSON(nftFileName, {
|
||||
...nft,
|
||||
files: newFilesList,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@ import { ProjectEnvVariable } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import { getLinkedProject } from '../../util/projects/link';
|
||||
import { getFrameworks } from '../../util/get-frameworks';
|
||||
import { isSettingValue } from '../../util/is-setting-value';
|
||||
import { ProjectSettings } from '../../types';
|
||||
import getDecryptedEnvRecords from '../../util/get-decrypted-env-records';
|
||||
import setupAndLink from '../../util/link/setup-and-link';
|
||||
@@ -71,9 +70,9 @@ export default async function dev(
|
||||
frameworkSlug = framework.slug;
|
||||
}
|
||||
|
||||
const defaults = framework.settings.devCommand;
|
||||
if (isSettingValue(defaults)) {
|
||||
devCommand = defaults.value;
|
||||
const defaults = framework.settings.devCommand.value;
|
||||
if (defaults) {
|
||||
devCommand = defaults;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,6 +48,22 @@ const help = () => {
|
||||
};
|
||||
|
||||
export default async function main(client: Client) {
|
||||
if (process.env.__VERCEL_DEV_RUNNING) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} dev`
|
||||
)} must not recursively invoke itself. Check the Development Command in the Project Settings or the ${cmd(
|
||||
'dev'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
return 1;
|
||||
} else {
|
||||
process.env.__VERCEL_DEV_RUNNING = '1';
|
||||
}
|
||||
|
||||
let argv;
|
||||
let args;
|
||||
const { output } = client;
|
||||
@@ -90,22 +106,21 @@ export default async function main(client: Client) {
|
||||
if (pkg) {
|
||||
const { scripts } = pkg as PackageJson;
|
||||
|
||||
if (scripts && scripts.dev && /\bnow\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
`The ${cmd('dev')} script in ${cmd(
|
||||
'package.json'
|
||||
)} must not contain ${cmd('now dev')}`
|
||||
if (
|
||||
scripts &&
|
||||
scripts.dev &&
|
||||
/\b(now|vercel)\b\W+\bdev\b/.test(scripts.dev)
|
||||
) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} dev`
|
||||
)} must not recursively invoke itself. Check the Development Command in the Project Settings or the ${cmd(
|
||||
'dev'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
output.error(`Learn More: http://err.sh/vercel/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
if (scripts && scripts.dev && /\bvercel\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
`The ${cmd('dev')} script in ${cmd(
|
||||
'package.json'
|
||||
)} must not contain ${cmd('vercel dev')}`
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
output.error(`Learn More: http://err.sh/vercel/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -87,6 +87,8 @@ export default async function main(client: Client) {
|
||||
|
||||
const { project, org } = link;
|
||||
|
||||
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
|
||||
|
||||
const result = await pull(
|
||||
client,
|
||||
project,
|
||||
|
||||
@@ -160,24 +160,26 @@ const main = async () => {
|
||||
// * a path to deploy (as in: `vercel path/`)
|
||||
// * a subcommand (as in: `vercel ls`)
|
||||
const targetOrSubcommand = argv._[2];
|
||||
const isBuildOrDev =
|
||||
targetOrSubcommand === 'build' || targetOrSubcommand === 'dev';
|
||||
|
||||
output.print(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${pkg.version}${
|
||||
targetOrSubcommand === 'dev'
|
||||
? ' dev (beta)'
|
||||
: targetOrSubcommand === 'build'
|
||||
? ' build (beta)'
|
||||
: ''
|
||||
}${
|
||||
isCanary ||
|
||||
targetOrSubcommand === 'dev' ||
|
||||
targetOrSubcommand === 'build'
|
||||
? ' — https://vercel.com/feedback'
|
||||
: ''
|
||||
}`
|
||||
)}\n`
|
||||
);
|
||||
if (isBuildOrDev) {
|
||||
console.log(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${
|
||||
pkg.version
|
||||
} ${targetOrSubcommand} (beta) — https://vercel.com/feedback`
|
||||
)}`
|
||||
);
|
||||
} else {
|
||||
output.print(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${pkg.version}${
|
||||
isCanary ? ' — https://vercel.com/feedback' : ''
|
||||
}`
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// Handle `--version` directly
|
||||
if (!targetOrSubcommand && argv['--version']) {
|
||||
|
||||
@@ -18,12 +18,8 @@ export const isDirectory = (path: string): boolean => {
|
||||
const getGlobalPathConfig = (): string => {
|
||||
let customPath: string | undefined;
|
||||
|
||||
try {
|
||||
const argv = getArgs(process.argv.slice(2), {});
|
||||
customPath = argv['--global-config'];
|
||||
} catch (_error) {
|
||||
// args are optional so consume error
|
||||
}
|
||||
const argv = getArgs(process.argv.slice(2), {}, { permissive: true });
|
||||
customPath = argv['--global-config'];
|
||||
|
||||
const vercelDirectories = XDGAppPaths('com.vercel.cli').dataDirs();
|
||||
|
||||
|
||||
@@ -7,12 +7,8 @@ import getArgs from '../../util/get-args';
|
||||
export default function getLocalPathConfig(prefix: string) {
|
||||
let customPath: string | undefined;
|
||||
|
||||
try {
|
||||
const argv = getArgs(process.argv.slice(2), {});
|
||||
customPath = argv['--local-config'];
|
||||
} catch (_error) {
|
||||
// args are optional so consume error
|
||||
}
|
||||
const argv = getArgs(process.argv.slice(2), {}, { permissive: true });
|
||||
customPath = argv['--local-config'];
|
||||
|
||||
// If `--local-config` flag was specified, then that takes priority
|
||||
if (customPath) {
|
||||
|
||||
@@ -89,6 +89,7 @@ import {
|
||||
} from './types';
|
||||
import { ProjectEnvVariable, ProjectSettings } from '../../types';
|
||||
import exposeSystemEnvs from './expose-system-envs';
|
||||
import { loadCliPlugins } from '../plugins';
|
||||
|
||||
const frontendRuntimeSet = new Set(
|
||||
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
|
||||
@@ -1349,6 +1350,30 @@ export default class DevServer {
|
||||
return false;
|
||||
};
|
||||
|
||||
runDevMiddleware = async (
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse
|
||||
) => {
|
||||
const { devMiddlewarePlugins } = await loadCliPlugins(
|
||||
this.cwd,
|
||||
this.output
|
||||
);
|
||||
try {
|
||||
for (let plugin of devMiddlewarePlugins) {
|
||||
const result = await plugin.plugin.runDevMiddleware(req, res, this.cwd);
|
||||
if (result.finished) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return { finished: false };
|
||||
} catch (e) {
|
||||
return {
|
||||
finished: true,
|
||||
error: e,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serve project directory as a v2 deployment.
|
||||
*/
|
||||
@@ -1416,6 +1441,36 @@ export default class DevServer {
|
||||
let prevUrl = req.url;
|
||||
let prevHeaders: HttpHeadersConfig = {};
|
||||
|
||||
const middlewareResult = await this.runDevMiddleware(req, res);
|
||||
|
||||
if (middlewareResult) {
|
||||
if (middlewareResult.error) {
|
||||
this.sendError(
|
||||
req,
|
||||
res,
|
||||
requestId,
|
||||
'EDGE_FUNCTION_INVOCATION_FAILED',
|
||||
500
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (middlewareResult.finished) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (middlewareResult.pathname) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
origUrl.pathname = middlewareResult.pathname;
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
if (middlewareResult.query && prevUrl) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
delete origUrl.search;
|
||||
Object.assign(origUrl.query, middlewareResult.query);
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
}
|
||||
|
||||
for (const phase of phases) {
|
||||
statusCode = undefined;
|
||||
|
||||
@@ -2106,7 +2161,10 @@ export default class DevServer {
|
||||
process.stdout.write(data.replace(proxyPort, devPort));
|
||||
});
|
||||
|
||||
p.on('exit', () => {
|
||||
p.on('exit', (code: number) => {
|
||||
if (code > 0) {
|
||||
process.exit(code);
|
||||
}
|
||||
this.devProcessPort = undefined;
|
||||
});
|
||||
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { Stats } from 'fs';
|
||||
import { sep, dirname, join, resolve } from 'path';
|
||||
import { readJSON, lstat, readlink, readFile, realpath } from 'fs-extra';
|
||||
import { lstat, readlink, readFile, realpath } from 'fs-extra';
|
||||
import { isCanary } from './is-canary';
|
||||
import { getPkgName } from './pkg-name';
|
||||
|
||||
// `npm` tacks a bunch of extra properties on the `package.json` file,
|
||||
// so check for one of them to determine yarn vs. npm.
|
||||
async function isYarn(): Promise<boolean> {
|
||||
let s: Stats;
|
||||
let binPath = process.argv[1];
|
||||
@@ -20,8 +18,12 @@ async function isYarn(): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
const pkgPath = join(dirname(binPath), '..', 'package.json');
|
||||
const pkg = await readJSON(pkgPath).catch(() => ({}));
|
||||
return !('_id' in pkg);
|
||||
/*
|
||||
* Generally, pkgPath looks like:
|
||||
* "/Users/username/.config/yarn/global/node_modules/vercel/package.json"
|
||||
* "/usr/local/share/.config/yarn/global/node_modules/vercel/package.json"
|
||||
*/
|
||||
return pkgPath.includes(join('yarn', 'global'));
|
||||
}
|
||||
|
||||
async function getConfigPrefix() {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import crypto from 'crypto';
|
||||
import ua from 'universal-analytics';
|
||||
import { platform, release, userInfo } from 'os';
|
||||
import { getPlatformEnv } from '@vercel/build-utils';
|
||||
|
||||
import userAgent from './ua-browser';
|
||||
@@ -16,10 +15,15 @@ export const shouldCollectMetrics =
|
||||
|
||||
export const metrics = (): ua.Visitor => {
|
||||
const token =
|
||||
typeof config.token === 'string' ? config.token : platform() + release();
|
||||
const salt = userInfo().username;
|
||||
typeof config.token === 'string'
|
||||
? config.token
|
||||
: process.platform + process.arch;
|
||||
const salt =
|
||||
(process.env.USER || '') +
|
||||
(process.env.LANG || '') +
|
||||
(process.env.SHELL || '');
|
||||
const hash = crypto
|
||||
.pbkdf2Sync(token, salt, 1000, 64, 'sha512')
|
||||
.pbkdf2Sync(token, salt, 100, 64, 'sha512')
|
||||
.toString('hex')
|
||||
.substring(0, 24);
|
||||
|
||||
|
||||
@@ -125,10 +125,14 @@ export class Output {
|
||||
this.debug(`Spinner invoked (${message}) with a ${delay}ms delay`);
|
||||
return;
|
||||
}
|
||||
if (this._spinner) {
|
||||
this._spinner.text = message;
|
||||
if (this.isTTY) {
|
||||
if (this._spinner) {
|
||||
this._spinner.text = message;
|
||||
} else {
|
||||
this._spinner = wait(message, delay);
|
||||
}
|
||||
} else {
|
||||
this._spinner = wait(message, delay);
|
||||
this.print(`${message}\n`);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
import { relative as nativeRelative } from 'path';
|
||||
|
||||
const isWin = process.platform === 'win32';
|
||||
import { normalizePath } from '@vercel/build-utils';
|
||||
|
||||
export function relative(a: string, b: string): string {
|
||||
let p = nativeRelative(a, b);
|
||||
if (isWin) {
|
||||
p = p.replace(/\\/g, '/');
|
||||
}
|
||||
return p;
|
||||
return normalizePath(nativeRelative(a, b));
|
||||
}
|
||||
|
||||
76
packages/cli/src/util/plugins.ts
Normal file
76
packages/cli/src/util/plugins.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import code from '../util/output/code';
|
||||
import { getColorForPkgName } from '../util/output/color-name-cache';
|
||||
import cliPkgJson from '../util/pkg';
|
||||
import { scanParentDirs } from '@vercel/build-utils';
|
||||
import { Output } from './output';
|
||||
|
||||
const VERCEL_PLUGIN_PREFIX = 'vercel-plugin-';
|
||||
|
||||
export async function loadCliPlugins(cwd: string, output: Output) {
|
||||
const { packageJson } = await scanParentDirs(cwd, true);
|
||||
|
||||
let pluginCount = 0;
|
||||
const preBuildPlugins = [];
|
||||
const buildPlugins = [];
|
||||
const devServerPlugins = [];
|
||||
const devMiddlewarePlugins = [];
|
||||
const deps = new Set(
|
||||
[
|
||||
...Object.keys(packageJson?.dependencies || {}),
|
||||
...Object.keys(packageJson?.devDependencies || {}),
|
||||
...Object.keys(cliPkgJson.dependencies),
|
||||
].filter(dep => dep.startsWith(VERCEL_PLUGIN_PREFIX))
|
||||
);
|
||||
|
||||
for (let dep of deps) {
|
||||
pluginCount++;
|
||||
const resolved = require.resolve(dep, {
|
||||
paths: [cwd, process.cwd(), __dirname],
|
||||
});
|
||||
let plugin;
|
||||
try {
|
||||
plugin = require(resolved);
|
||||
|
||||
const color = getColorForPkgName(dep);
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
preBuildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.build === 'function') {
|
||||
buildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.startDevServer === 'function') {
|
||||
devServerPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.runDevMiddleware === 'function') {
|
||||
devMiddlewarePlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
output.error(`Failed to import ${code(dep)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
pluginCount,
|
||||
preBuildPlugins,
|
||||
buildPlugins,
|
||||
devServerPlugins,
|
||||
devMiddlewarePlugins,
|
||||
};
|
||||
}
|
||||
@@ -244,20 +244,27 @@ export async function linkFolderToProject(
|
||||
try {
|
||||
const gitIgnorePath = join(path, '.gitignore');
|
||||
|
||||
const gitIgnore = await readFile(gitIgnorePath, 'utf8').catch(() => null);
|
||||
const EOL = gitIgnore && gitIgnore.includes('\r\n') ? '\r\n' : os.EOL;
|
||||
let gitIgnore =
|
||||
(await readFile(gitIgnorePath, 'utf8').catch(() => null)) ?? '';
|
||||
const EOL = gitIgnore.includes('\r\n') ? '\r\n' : os.EOL;
|
||||
let contentModified = false;
|
||||
|
||||
if (
|
||||
!gitIgnore ||
|
||||
!gitIgnore.split(EOL).includes(VERCEL_DIR) ||
|
||||
!gitIgnore.split(EOL).includes(VERCEL_OUTPUT_DIR)
|
||||
) {
|
||||
await writeFile(
|
||||
gitIgnorePath,
|
||||
gitIgnore
|
||||
? `${gitIgnore}${EOL}${VERCEL_DIR}${EOL}${VERCEL_OUTPUT_DIR}${EOL}`
|
||||
: `${VERCEL_DIR}${EOL}${VERCEL_OUTPUT_DIR}${EOL}`
|
||||
);
|
||||
if (!gitIgnore.split(EOL).includes(VERCEL_DIR)) {
|
||||
gitIgnore += `${
|
||||
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
|
||||
}${VERCEL_DIR}${EOL}`;
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (!gitIgnore.split(EOL).includes(VERCEL_OUTPUT_DIR)) {
|
||||
gitIgnore += `${
|
||||
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
|
||||
}${VERCEL_OUTPUT_DIR}${EOL}`;
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (contentModified) {
|
||||
await writeFile(gitIgnorePath, gitIgnore);
|
||||
isGitIgnoreUpdated = true;
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware-error/_middleware.js
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware-error/_middleware.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export default () => {
|
||||
throw new Error('asdf');
|
||||
};
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware-invalid-response/_middleware.js
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware-invalid-response/_middleware.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export default function () {
|
||||
return 'freecandy';
|
||||
}
|
||||
5
packages/cli/test/fixtures/unit/edge-middleware-ts/_middleware.ts
vendored
Normal file
5
packages/cli/test/fixtures/unit/edge-middleware-ts/_middleware.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { response } from './response';
|
||||
|
||||
export default () => {
|
||||
return new Response(response);
|
||||
};
|
||||
1
packages/cli/test/fixtures/unit/edge-middleware-ts/response.ts
vendored
Normal file
1
packages/cli/test/fixtures/unit/edge-middleware-ts/response.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export const response = 'response';
|
||||
10
packages/cli/test/fixtures/unit/edge-middleware/_middleware.js
vendored
Normal file
10
packages/cli/test/fixtures/unit/edge-middleware/_middleware.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import response from './response.json';
|
||||
|
||||
export default function () {
|
||||
return new Response(JSON.stringify(response), {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/edge-middleware/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/edge-middleware/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
not hello world
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware/response.json
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware/response.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"text": "hello world"
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/vercel-pull-next/static/robots.txt
vendored
Normal file
1
packages/cli/test/fixtures/unit/vercel-pull-next/static/robots.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
User-Agent: *
|
||||
@@ -19,7 +19,7 @@ const getRevertAliasConfigFile = () => {
|
||||
],
|
||||
});
|
||||
};
|
||||
module.exports = async function prepare(session) {
|
||||
module.exports = async function prepare(session, binaryPath) {
|
||||
const spec = {
|
||||
'static-single-file': {
|
||||
'first.png': getImageFile(session, { size: 30 }),
|
||||
@@ -114,6 +114,23 @@ module.exports = async function prepare(session) {
|
||||
2
|
||||
),
|
||||
},
|
||||
'dev-fail-on-recursion-command': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'build-fail-on-recursion-command': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'build-fail-on-recursion-script': {
|
||||
'package.json': JSON.stringify(
|
||||
{
|
||||
scripts: {
|
||||
build: `${binaryPath} build`,
|
||||
},
|
||||
},
|
||||
null,
|
||||
2
|
||||
),
|
||||
},
|
||||
'static-deployment': {
|
||||
'index.txt': 'Hello World',
|
||||
},
|
||||
@@ -348,6 +365,10 @@ module.exports = async function prepare(session) {
|
||||
'project-link-dev': {
|
||||
'package.json': '{}',
|
||||
},
|
||||
'project-link-gitignore': {
|
||||
'package.json': '{}',
|
||||
'.gitignore': '.output',
|
||||
},
|
||||
'project-link-legacy': {
|
||||
'index.html': 'Hello',
|
||||
'vercel.json': '{"builds":[{"src":"*.html","use":"@vercel/static"}]}',
|
||||
|
||||
432
packages/cli/test/integration.js
vendored
432
packages/cli/test/integration.js
vendored
@@ -252,10 +252,69 @@ const createUser = async () => {
|
||||
|
||||
const getConfigAuthPath = () => path.join(globalDir, 'auth.json');
|
||||
|
||||
async function setupProject(process, projectName, overrides) {
|
||||
await waitForPrompt(process, chunk => /Set up [^?]+\?/.test(chunk));
|
||||
process.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(process, chunk => /Which scope [^?]+\?/.test(chunk));
|
||||
process.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
process.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
process.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
process.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
|
||||
if (overrides) {
|
||||
process.stdin.write('yes\n');
|
||||
|
||||
const { buildCommand, outputDirectory, devCommand } = overrides;
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
process.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
process.stdin.write(`${buildCommand ?? ''}\n`);
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
process.stdin.write(`${outputDirectory ?? ''}\n`);
|
||||
|
||||
await waitForPrompt(process, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
process.stdin.write(`${devCommand ?? ''}\n`);
|
||||
} else {
|
||||
process.stdin.write('no\n');
|
||||
}
|
||||
|
||||
await waitForPrompt(process, chunk => chunk.includes('Linked to'));
|
||||
}
|
||||
|
||||
test.before(async () => {
|
||||
try {
|
||||
await createUser();
|
||||
await prepareFixtures(contextName);
|
||||
await prepareFixtures(contextName, binaryPath);
|
||||
} catch (err) {
|
||||
console.log('Failed `test.before`');
|
||||
console.log(err);
|
||||
@@ -2227,13 +2286,93 @@ test('whoami', async t => {
|
||||
t.is(stdout, contextName, formatOutput({ stdout, stderr }));
|
||||
});
|
||||
|
||||
test('fail `now dev` dev script without now.json', async t => {
|
||||
test('[vercel dev] fails when dev script calls vercel dev recursively', async t => {
|
||||
const deploymentPath = fixture('now-dev-fail-dev-script');
|
||||
const { exitCode, stderr } = await execute(['dev', deploymentPath]);
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not contain `now dev`'),
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel dev] fails when development commad calls vercel dev recursively', async t => {
|
||||
const dir = fixture('dev-fail-on-recursion-command');
|
||||
const projectName = `dev-fail-on-recursion-command-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const dev = execa(binaryPath, ['dev', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await setupProject(dev, projectName, {
|
||||
devCommand: `${binaryPath} dev`,
|
||||
});
|
||||
|
||||
const { exitCode, stderr } = await dev;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel build] fails when build commad calls vercel build recursively', async t => {
|
||||
const dir = fixture('build-fail-on-recursion-command');
|
||||
const projectName = `build-fail-on-recursion-command-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const build = execa(binaryPath, ['build', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await waitForPrompt(build, chunk =>
|
||||
chunk.includes('No Project Settings found locally')
|
||||
);
|
||||
build.stdin.write('yes\n');
|
||||
|
||||
await setupProject(build, projectName, {
|
||||
buildCommand: `${binaryPath} build`,
|
||||
});
|
||||
|
||||
const { exitCode, stderr } = await build;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel build] fails when build script calls vercel build recursively', async t => {
|
||||
const dir = fixture('build-fail-on-recursion-script');
|
||||
const projectName = `build-fail-on-recursion-script-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const build = execa(binaryPath, ['build', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await waitForPrompt(build, chunk =>
|
||||
chunk.includes('No Project Settings found locally')
|
||||
);
|
||||
build.stdin.write('yes\n');
|
||||
|
||||
await setupProject(build, projectName);
|
||||
|
||||
const { exitCode, stderr } = await build;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
@@ -2544,7 +2683,7 @@ test('deploy a Lambda with 3 seconds of maxDuration', async t => {
|
||||
const url = new URL(output.stdout);
|
||||
|
||||
// Should time out
|
||||
url.pathname = '/api/wait-for/4';
|
||||
url.pathname = '/api/wait-for/5';
|
||||
const response1 = await fetch(url.href);
|
||||
t.is(
|
||||
response1.status,
|
||||
@@ -2553,7 +2692,7 @@ test('deploy a Lambda with 3 seconds of maxDuration', async t => {
|
||||
);
|
||||
|
||||
// Should not time out
|
||||
url.pathname = '/api/wait-for/2';
|
||||
url.pathname = '/api/wait-for/1';
|
||||
const response2 = await fetch(url.href);
|
||||
t.is(
|
||||
response2.status,
|
||||
@@ -2673,69 +2812,20 @@ test('ensure `github` and `scope` are not sent to the API', async t => {
|
||||
});
|
||||
|
||||
test('should show prompts to set up project during first deploy', async t => {
|
||||
const directory = fixture('project-link-deploy');
|
||||
const dir = fixture('project-link-deploy');
|
||||
const projectName = `project-link-deploy-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
// remove previously linked project if it exists
|
||||
await remove(path.join(directory, '.vercel'));
|
||||
await remove(path.join(dir, '.vercel'));
|
||||
|
||||
const now = execa(binaryPath, [directory, ...defaultArgs]);
|
||||
const now = execa(binaryPath, [dir, ...defaultArgs]);
|
||||
|
||||
await waitForPrompt(now, chunk => /Set up and deploy [^?]+\?/.test(chunk));
|
||||
now.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('Which scope do you want to deploy to?')
|
||||
);
|
||||
now.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
now.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
now.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
now.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
now.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
now.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
now.stdin.write(
|
||||
`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`
|
||||
);
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
now.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(now, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
now.stdin.write(`\n`);
|
||||
|
||||
await waitForPrompt(now, chunk => chunk.includes('Linked to'));
|
||||
await setupProject(now, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
const output = await now;
|
||||
|
||||
@@ -2743,19 +2833,17 @@ test('should show prompts to set up project during first deploy', async t => {
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
|
||||
// Ensure .gitignore is created
|
||||
t.is(
|
||||
(await readFile(path.join(directory, '.gitignore'))).toString(),
|
||||
'.vercel\n'
|
||||
);
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
await exists(path.join(directory, '.vercel', 'project.json')),
|
||||
await exists(path.join(dir, '.vercel', 'project.json')),
|
||||
true,
|
||||
'project.json should be created'
|
||||
);
|
||||
t.is(
|
||||
await exists(path.join(directory, '.vercel', 'README.txt')),
|
||||
await exists(path.join(dir, '.vercel', 'README.txt')),
|
||||
true,
|
||||
'README.txt should be created'
|
||||
);
|
||||
@@ -2769,13 +2857,7 @@ test('should show prompts to set up project during first deploy', async t => {
|
||||
// and output directory
|
||||
let stderr = '';
|
||||
const port = 58351;
|
||||
const dev = execa(binaryPath, [
|
||||
'dev',
|
||||
'--listen',
|
||||
port,
|
||||
directory,
|
||||
...defaultArgs,
|
||||
]);
|
||||
const dev = execa(binaryPath, ['dev', '--listen', port, dir, ...defaultArgs]);
|
||||
dev.stderr.setEncoding('utf8');
|
||||
|
||||
try {
|
||||
@@ -3309,55 +3391,10 @@ test('[vc link] should show prompts to set up project', async t => {
|
||||
|
||||
const vc = execa(binaryPath, ['link', ...defaultArgs], { cwd: dir });
|
||||
|
||||
await waitForPrompt(vc, chunk => /Set up [^?]+\?/.test(chunk));
|
||||
vc.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('Which scope should contain your project?')
|
||||
);
|
||||
vc.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(vc, chunk => chunk.includes('Link to existing project?'));
|
||||
vc.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
vc.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
vc.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
vc.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
vc.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
vc.stdin.write(`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
vc.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
vc.stdin.write(`\n`);
|
||||
|
||||
await waitForPrompt(vc, chunk => chunk.includes('Linked to'));
|
||||
await setupProject(vc, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
const output = await vc;
|
||||
|
||||
@@ -3365,7 +3402,8 @@ test('[vc link] should show prompts to set up project', async t => {
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
|
||||
// Ensure .gitignore is created
|
||||
t.is((await readFile(path.join(dir, '.gitignore'))).toString(), '.vercel\n');
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3399,7 +3437,8 @@ test('[vc link --confirm] should not show prompts and autolink', async t => {
|
||||
t.regex(stderr, /Linked to /m);
|
||||
|
||||
// Ensure .gitignore is created
|
||||
t.is((await readFile(path.join(dir, '.gitignore'))).toString(), '.vercel\n');
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3414,6 +3453,29 @@ test('[vc link --confirm] should not show prompts and autolink', async t => {
|
||||
);
|
||||
});
|
||||
|
||||
test('[vc link] should not duplicate paths in .gitignore', async t => {
|
||||
const dir = fixture('project-link-gitignore');
|
||||
|
||||
// remove previously linked project if it exists
|
||||
await remove(path.join(dir, '.vercel'));
|
||||
|
||||
const { exitCode, stderr, stdout } = await execa(
|
||||
binaryPath,
|
||||
['link', '--confirm', ...defaultArgs],
|
||||
{ cwd: dir, reject: false }
|
||||
);
|
||||
|
||||
// Ensure the exit code is right
|
||||
t.is(exitCode, 0, formatOutput({ stderr, stdout }));
|
||||
|
||||
// Ensure the message is correct pattern
|
||||
t.regex(stderr, /Linked to /m);
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.output\n.vercel\n');
|
||||
});
|
||||
|
||||
test('[vc dev] should show prompts to set up project', async t => {
|
||||
const dir = fixture('project-link-dev');
|
||||
const port = 58352;
|
||||
@@ -3428,62 +3490,14 @@ test('[vc dev] should show prompts to set up project', async t => {
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
await waitForPrompt(dev, chunk => /Set up and develop [^?]+\?/.test(chunk));
|
||||
dev.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Which scope should contain your project?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
dev.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
dev.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
dev.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
dev.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
dev.stdin.write(
|
||||
`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`
|
||||
);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
dev.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
dev.stdin.write(`\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk => chunk.includes('Linked to'));
|
||||
await setupProject(dev, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
});
|
||||
|
||||
// Ensure .gitignore is created
|
||||
t.is((await readFile(path.join(dir, '.gitignore'))).toString(), '.vercel\n');
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3549,7 +3563,8 @@ test('[vc link] should show project prompts but not framework when `builds` defi
|
||||
t.is(output.exitCode, 0, formatOutput(output));
|
||||
|
||||
// Ensure .gitignore is created
|
||||
t.is((await readFile(path.join(dir, '.gitignore'))).toString(), '.vercel\n');
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3578,59 +3593,12 @@ test('[vc dev] should send the platform proxy request headers to frontend dev se
|
||||
cwd: dir,
|
||||
});
|
||||
|
||||
await waitForPrompt(dev, chunk => /Set up and develop [^?]+\?/.test(chunk));
|
||||
dev.stdin.write('yes\n');
|
||||
await setupProject(dev, projectName, {
|
||||
buildCommand: `mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html`,
|
||||
outputDirectory: 'o',
|
||||
devCommand: 'node server.js',
|
||||
});
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Which scope should contain your project?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Link to existing project?')
|
||||
);
|
||||
dev.stdin.write('no\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('What’s your project’s name?')
|
||||
);
|
||||
dev.stdin.write(`${projectName}\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('In which directory is your code located?')
|
||||
);
|
||||
dev.stdin.write('\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes('Want to override the settings?')
|
||||
);
|
||||
dev.stdin.write('yes\n');
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(
|
||||
'Which settings would you like to overwrite (select multiple)?'
|
||||
)
|
||||
);
|
||||
dev.stdin.write('a\n'); // 'a' means select all
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Build Command?`)
|
||||
);
|
||||
dev.stdin.write(
|
||||
`mkdir -p o && echo '<h1>custom hello</h1>' > o/index.html\n`
|
||||
);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Output Directory?`)
|
||||
);
|
||||
dev.stdin.write(`o\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk =>
|
||||
chunk.includes(`What's your Development Command?`)
|
||||
);
|
||||
dev.stdin.write(`node server.js\n`);
|
||||
|
||||
await waitForPrompt(dev, chunk => chunk.includes('Linked to'));
|
||||
await waitForPrompt(dev, chunk => chunk.includes('Ready! Available at'));
|
||||
|
||||
// Ensure that `vc dev` also works
|
||||
|
||||
@@ -335,4 +335,54 @@ describe('DevServer', () => {
|
||||
expect(body).toEqual('The page could not be found.\n\nNOT_FOUND\n');
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should support edge middleware',
|
||||
testFixture('edge-middleware', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.json();
|
||||
expect(body).toEqual(
|
||||
JSON.parse(
|
||||
fs.readFileSync(
|
||||
path.join(
|
||||
__dirname,
|
||||
'../../fixtures/unit/edge-middleware/response.json'
|
||||
),
|
||||
'utf8'
|
||||
)
|
||||
)
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should work with middleware written in typescript',
|
||||
testFixture('edge-middleware-ts', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.text();
|
||||
expect(body).toStrictEqual('response');
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should render an error page when the middleware throws',
|
||||
testFixture('edge-middleware-error', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.text();
|
||||
expect(body).toStrictEqual(
|
||||
'A server error has occurred\n\nEDGE_FUNCTION_INVOCATION_FAILED\n'
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
it(
|
||||
'should render an error page when the middleware returns not a Response',
|
||||
testFixture('edge-middleware-invalid-response', async server => {
|
||||
const response = await fetch(`${server.address}/index.html`);
|
||||
const body = await response.text();
|
||||
expect(body).toStrictEqual(
|
||||
'A server error has occurred\n\nEDGE_FUNCTION_INVOCATION_FAILED\n'
|
||||
);
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ describe('getUpdateCommand', () => {
|
||||
it('should detect update command', async () => {
|
||||
const updateCommand = await getUpdateCommand();
|
||||
expect(updateCommand).toEqual(
|
||||
`yarn add vercel@${isCanary() ? 'canary' : 'latest'}`
|
||||
`npm i vercel@${isCanary() ? 'canary' : 'latest'}`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "10.2.3-canary.14",
|
||||
"version": "10.2.3-canary.21",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -40,7 +40,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.14",
|
||||
"@vercel/build-utils": "2.12.3-canary.20",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "0.5.1-canary.10",
|
||||
"version": "0.5.1-canary.12",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "1.11.4-canary.5",
|
||||
"@vercel/routing-utils": "1.11.4-canary.6",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -43,6 +43,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `blitz build`',
|
||||
value: 'blitz build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'blitz start',
|
||||
@@ -51,8 +52,6 @@ export const frameworks = [
|
||||
placeholder: 'Next.js default',
|
||||
},
|
||||
},
|
||||
devCommand: 'blitz start',
|
||||
buildCommand: 'blitz build',
|
||||
getFsOutputDir: async () => '.next',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -83,6 +82,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `next build`',
|
||||
value: 'next build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'next dev --port $PORT',
|
||||
@@ -98,8 +98,6 @@ export const frameworks = [
|
||||
dependencies: ['next-plugin-sentry', 'next-sentry-source-maps'],
|
||||
},
|
||||
],
|
||||
devCommand: 'next dev --port $PORT',
|
||||
buildCommand: 'next build',
|
||||
getFsOutputDir: async () => '.next',
|
||||
getOutputDirName: async () => 'public',
|
||||
cachePattern: '.next/cache/**',
|
||||
@@ -131,6 +129,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `gatsby build`',
|
||||
value: 'gatsby build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'gatsby develop --port $PORT',
|
||||
@@ -141,8 +140,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'gatsby',
|
||||
devCommand: 'gatsby develop --port $PORT',
|
||||
buildCommand: 'gatsby build',
|
||||
getOutputDirName: async () => 'public',
|
||||
getFsOutputDir: async () => 'public',
|
||||
defaultRoutes: async (dirPrefix: string) => {
|
||||
@@ -219,6 +216,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `hexo generate`',
|
||||
value: 'hexo generate',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'hexo server --port $PORT',
|
||||
@@ -229,8 +227,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'hexo',
|
||||
devCommand: 'hexo server --port $PORT',
|
||||
buildCommand: 'hexo generate',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -259,6 +255,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `npx @11ty/eleventy`',
|
||||
value: 'npx @11ty/eleventy',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'npx @11ty/eleventy --serve --watch --port $PORT',
|
||||
@@ -269,8 +266,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@11ty/eleventy',
|
||||
devCommand: 'npx @11ty/eleventy --serve --watch --port $PORT',
|
||||
buildCommand: 'npx @11ty/eleventy',
|
||||
getFsOutputDir: async () => '_site',
|
||||
getOutputDirName: async () => '_site',
|
||||
cachePattern: '.cache/**',
|
||||
@@ -300,6 +295,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `docusaurus build`',
|
||||
value: 'docusaurus build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'docusaurus start --port $PORT',
|
||||
@@ -310,8 +306,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@docusaurus/core',
|
||||
devCommand: 'docusaurus start --port $PORT',
|
||||
buildCommand: 'docusaurus build',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -456,6 +450,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `docusaurus-build`',
|
||||
value: 'docusaurus-build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'docusaurus-start --port $PORT',
|
||||
@@ -466,8 +461,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'docusaurus',
|
||||
devCommand: 'docusaurus-start --port $PORT',
|
||||
buildCommand: 'docusaurus-build',
|
||||
getFsOutputDir: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
try {
|
||||
@@ -523,6 +516,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `preact build`',
|
||||
value: 'preact build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'preact watch --port $PORT',
|
||||
@@ -533,8 +527,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'preact-cli',
|
||||
devCommand: 'preact watch --port $PORT',
|
||||
buildCommand: 'preact build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
@@ -581,6 +573,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `dojo build`',
|
||||
value: 'dojo build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'dojo build -m dev -w -s -p $PORT',
|
||||
@@ -591,8 +584,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@dojo/cli',
|
||||
devCommand: 'dojo build -m dev -w -s -p $PORT',
|
||||
buildCommand: 'dojo build',
|
||||
getFsOutputDir: async () => 'output/dist',
|
||||
getOutputDirName: async () => join('output', 'dist'),
|
||||
defaultRoutes: [
|
||||
@@ -649,6 +640,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ember build`',
|
||||
value: 'ember build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ember serve --port $PORT',
|
||||
@@ -659,8 +651,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'ember-cli',
|
||||
devCommand: 'ember serve --port $PORT',
|
||||
buildCommand: 'ember build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -705,6 +695,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `vue-cli-service build`',
|
||||
value: 'vue-cli-service build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'vue-cli-service serve --port $PORT',
|
||||
@@ -715,8 +706,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@vue/cli-service',
|
||||
devCommand: 'vue-cli-service serve --port $PORT',
|
||||
buildCommand: 'vue-cli-service build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -783,6 +772,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ng build && scully`',
|
||||
value: 'ng build && scully',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ng serve --port $PORT',
|
||||
@@ -793,8 +783,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@scullyio/init',
|
||||
devCommand: 'ng serve --port $PORT',
|
||||
buildCommand: 'ng build && scully',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist/static',
|
||||
},
|
||||
@@ -822,6 +810,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ng build`',
|
||||
value: 'ng build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ng serve --port $PORT',
|
||||
@@ -831,8 +820,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/angular',
|
||||
devCommand: 'ng serve --port $PORT',
|
||||
buildCommand: 'ng build',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
@@ -876,6 +863,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `ng build`',
|
||||
value: 'ng build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'ng serve --port $PORT',
|
||||
@@ -886,8 +874,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@angular/cli',
|
||||
devCommand: 'ng serve --port $PORT',
|
||||
buildCommand: 'ng build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'dist';
|
||||
@@ -945,6 +931,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `polymer build`',
|
||||
value: 'polymer build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'polymer serve --port $PORT',
|
||||
@@ -955,8 +942,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'polymer-cli',
|
||||
devCommand: 'polymer serve --port $PORT',
|
||||
buildCommand: 'polymer build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async (dirPrefix: string) => {
|
||||
const base = 'build';
|
||||
@@ -1016,6 +1001,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `rollup -c`',
|
||||
value: 'rollup -c',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'rollup -c -w',
|
||||
@@ -1025,8 +1011,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sirv-cli',
|
||||
devCommand: 'rollup -c -w',
|
||||
buildCommand: 'rollup -c',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
@@ -1070,6 +1054,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `svelte-kit build`',
|
||||
value: 'svelte-kit build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'svelte-kit dev --port $PORT',
|
||||
@@ -1079,8 +1064,6 @@ export const frameworks = [
|
||||
placeholder: 'public',
|
||||
},
|
||||
},
|
||||
devCommand: 'svelte-kit dev --port $PORT',
|
||||
buildCommand: 'svelte-kit build',
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -1108,6 +1091,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `react-scripts build`',
|
||||
value: 'react-scripts build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'react-scripts start',
|
||||
@@ -1117,8 +1101,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@ionic/react',
|
||||
devCommand: 'react-scripts start',
|
||||
buildCommand: 'react-scripts build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
@@ -1216,6 +1198,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `react-scripts build`',
|
||||
value: 'react-scripts build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'react-scripts start',
|
||||
@@ -1225,8 +1208,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'react-scripts',
|
||||
devCommand: 'react-scripts start',
|
||||
buildCommand: 'react-scripts build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
defaultRoutes: [
|
||||
@@ -1318,6 +1299,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `gridsome build`',
|
||||
value: 'gridsome build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'gridsome develop -p $PORT',
|
||||
@@ -1328,8 +1310,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'gridsome',
|
||||
devCommand: 'gridsome develop -p $PORT',
|
||||
buildCommand: 'gridsome build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
@@ -1357,6 +1337,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `umi build`',
|
||||
value: 'umi build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'umi dev --port $PORT',
|
||||
@@ -1367,8 +1348,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'umi',
|
||||
devCommand: 'umi dev --port $PORT',
|
||||
buildCommand: 'umi build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -1412,6 +1391,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `sapper export`',
|
||||
value: 'sapper export',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'sapper dev --port $PORT',
|
||||
@@ -1422,8 +1402,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'sapper',
|
||||
devCommand: 'sapper dev --port $PORT',
|
||||
buildCommand: 'sapper export',
|
||||
getFsOutputDir: async () => '__sapper__/export',
|
||||
getOutputDirName: async () => '__sapper__/export',
|
||||
},
|
||||
@@ -1451,6 +1429,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `saber build`',
|
||||
value: 'saber build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'saber --port $PORT',
|
||||
@@ -1461,8 +1440,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'saber',
|
||||
devCommand: 'saber --port $PORT',
|
||||
buildCommand: 'saber build',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultRoutes: [
|
||||
@@ -1521,6 +1498,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `stencil build`',
|
||||
value: 'stencil build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'stencil build --dev --watch --serve --port $PORT',
|
||||
@@ -1531,8 +1509,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: '@stencil/core',
|
||||
devCommand: 'stencil build --dev --watch --serve --port $PORT',
|
||||
buildCommand: 'stencil build',
|
||||
getFsOutputDir: async () => 'www',
|
||||
getOutputDirName: async () => 'www',
|
||||
defaultRoutes: [
|
||||
@@ -1611,6 +1587,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `nuxt generate`',
|
||||
value: 'nuxt generate',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'nuxt',
|
||||
@@ -1620,8 +1597,6 @@ export const frameworks = [
|
||||
},
|
||||
},
|
||||
dependency: 'nuxt',
|
||||
devCommand: 'nuxt',
|
||||
buildCommand: 'nuxt generate',
|
||||
getFsOutputDir: async () => '.output',
|
||||
getOutputDirName: async () => 'dist',
|
||||
cachePattern: '.nuxt/**',
|
||||
@@ -1680,8 +1655,6 @@ export const frameworks = [
|
||||
placeholder: 'RedwoodJS default',
|
||||
},
|
||||
},
|
||||
devCommand: 'yarn rw dev --fwd="--port=$PORT --open=false',
|
||||
buildCommand: 'yarn rw deploy vercel',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -1717,6 +1690,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `hugo -D --gc`',
|
||||
value: 'hugo -D --gc',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'hugo server -D -w -p $PORT',
|
||||
@@ -1726,8 +1700,6 @@ export const frameworks = [
|
||||
placeholder: '`public` or `publishDir` from the `config` file',
|
||||
},
|
||||
},
|
||||
devCommand: 'hugo server -D -w -p $PORT',
|
||||
buildCommand: 'hugo -D --gc',
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type HugoConfig = { publishDir?: string };
|
||||
const config = await readConfigFile<HugoConfig>(
|
||||
@@ -1772,6 +1744,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `jekyll build`',
|
||||
value: 'jekyll build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'bundle exec jekyll serve --watch --port $PORT',
|
||||
@@ -1781,8 +1754,6 @@ export const frameworks = [
|
||||
placeholder: '`_site` or `destination` from `_config.yml`',
|
||||
},
|
||||
},
|
||||
devCommand: 'bundle exec jekyll serve --watch --port $PORT',
|
||||
buildCommand: 'jekyll build',
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
type JekyllConfig = { destination?: string };
|
||||
const config = await readConfigFile<JekyllConfig>(
|
||||
@@ -1821,6 +1792,7 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `brunch build --production`',
|
||||
value: 'brunch build --production',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'brunch watch --server --port $PORT',
|
||||
@@ -1830,8 +1802,6 @@ export const frameworks = [
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
devCommand: 'brunch watch --server --port $PORT',
|
||||
buildCommand: 'brunch build --production',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
},
|
||||
@@ -1856,18 +1826,17 @@ export const frameworks = [
|
||||
value: 'bundle install',
|
||||
},
|
||||
buildCommand: {
|
||||
value: '`npm run build` or `bundle exec middleman build`',
|
||||
placeholder: '`npm run build` or `bundle exec middleman build`',
|
||||
value: 'bundle exec middleman build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'bundle exec middleman server -p $PORT',
|
||||
placeholder: 'bundle exec middleman server',
|
||||
value: 'bundle exec middleman server -p $PORT',
|
||||
},
|
||||
outputDirectory: {
|
||||
value: 'build',
|
||||
},
|
||||
},
|
||||
devCommand: 'bundle exec middleman server -p $PORT',
|
||||
buildCommand: 'bundle exec middleman build',
|
||||
getFsOutputDir: async () => 'build',
|
||||
getOutputDirName: async () => 'build',
|
||||
cachePattern: '{vendor/bin,vendor/cache,vendor/bundle}/**',
|
||||
@@ -1896,15 +1865,13 @@ export const frameworks = [
|
||||
value: 'zola build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'zola serve --port $PORT',
|
||||
placeholder: 'zola serve',
|
||||
value: 'zola serve --port $PORT',
|
||||
},
|
||||
outputDirectory: {
|
||||
value: 'public',
|
||||
},
|
||||
},
|
||||
devCommand: 'zola serve --port $PORT',
|
||||
buildCommand: 'zola build',
|
||||
getFsOutputDir: async () => 'public',
|
||||
getOutputDirName: async () => 'public',
|
||||
defaultVersion: '0.13.0',
|
||||
@@ -1934,17 +1901,17 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `vite build`',
|
||||
value: 'vite build',
|
||||
},
|
||||
devCommand: {
|
||||
placeholder: 'vite',
|
||||
value: 'vite',
|
||||
},
|
||||
outputDirectory: {
|
||||
value: 'dist',
|
||||
},
|
||||
},
|
||||
dependency: 'vite',
|
||||
devCommand: 'vite',
|
||||
buildCommand: 'vite build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
},
|
||||
@@ -1972,17 +1939,17 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `parcel build`',
|
||||
value: 'parcel build',
|
||||
},
|
||||
devCommand: {
|
||||
placeholder: 'parcel',
|
||||
value: 'parcel',
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: 'dist',
|
||||
value: 'dist',
|
||||
},
|
||||
},
|
||||
dependency: 'parcel',
|
||||
devCommand: 'parcel',
|
||||
buildCommand: 'parcel build',
|
||||
getFsOutputDir: async () => 'dist',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
@@ -2016,16 +1983,16 @@ export const frameworks = [
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run vercel-build` or `npm run build`',
|
||||
value: null,
|
||||
},
|
||||
devCommand: {
|
||||
placeholder: 'None',
|
||||
value: null,
|
||||
},
|
||||
outputDirectory: {
|
||||
placeholder: '`public` if it exists, or `.`',
|
||||
},
|
||||
},
|
||||
devCommand: null,
|
||||
buildCommand: null,
|
||||
getFsOutputDir: async (dirPrefix: string): Promise<string> => {
|
||||
// Public if it exists or `.`
|
||||
let base = 'public';
|
||||
|
||||
@@ -26,7 +26,7 @@ export interface SettingValue {
|
||||
* A predefined setting for the detected framework
|
||||
* @example "next dev --port $PORT"
|
||||
*/
|
||||
value: string;
|
||||
value: string | null;
|
||||
placeholder?: string;
|
||||
}
|
||||
|
||||
@@ -129,11 +129,11 @@ export interface Framework {
|
||||
/**
|
||||
* Default Build Command or a placeholder
|
||||
*/
|
||||
buildCommand: Setting;
|
||||
buildCommand: SettingValue;
|
||||
/**
|
||||
* Default Development Command or a placeholder
|
||||
*/
|
||||
devCommand: Setting;
|
||||
devCommand: SettingValue;
|
||||
/**
|
||||
* Default Output Directory
|
||||
*/
|
||||
@@ -157,6 +157,7 @@ export interface Framework {
|
||||
/**
|
||||
* Name of a dependency in `package.json` to detect this framework.
|
||||
* @example "hexo"
|
||||
* @deprecated use `detectors` instead (new frameworks should not use this prop)
|
||||
*/
|
||||
dependency?: string;
|
||||
/**
|
||||
@@ -201,16 +202,6 @@ export interface Framework {
|
||||
* @example ".cache/**"
|
||||
*/
|
||||
cachePattern?: string;
|
||||
/**
|
||||
* The default build command for the framework.
|
||||
* @example "next build"
|
||||
*/
|
||||
buildCommand: string | null;
|
||||
/**
|
||||
* The default development command for the framework.
|
||||
* @example "next dev"
|
||||
*/
|
||||
devCommand: string | null;
|
||||
/**
|
||||
* The default version of the framework command that is available within the
|
||||
* build image. Usually an environment variable can be set to override this.
|
||||
|
||||
14
packages/frameworks/test/frameworks.unit.test.ts
vendored
14
packages/frameworks/test/frameworks.unit.test.ts
vendored
@@ -34,7 +34,7 @@ const SchemaSettings = {
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
value: {
|
||||
type: 'string',
|
||||
type: ['string', 'null'],
|
||||
},
|
||||
placeholder: {
|
||||
type: 'string',
|
||||
@@ -58,15 +58,7 @@ const Schema = {
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'object',
|
||||
required: [
|
||||
'name',
|
||||
'slug',
|
||||
'logo',
|
||||
'description',
|
||||
'settings',
|
||||
'buildCommand',
|
||||
'devCommand',
|
||||
],
|
||||
required: ['name', 'slug', 'logo', 'description', 'settings'],
|
||||
properties: {
|
||||
name: { type: 'string' },
|
||||
slug: { type: ['string', 'null'] },
|
||||
@@ -138,8 +130,6 @@ const Schema = {
|
||||
|
||||
dependency: { type: 'string' },
|
||||
cachePattern: { type: 'string' },
|
||||
buildCommand: { type: ['string', 'null'] },
|
||||
devCommand: { type: ['string', 'null'] },
|
||||
defaultVersion: { type: 'string' },
|
||||
},
|
||||
},
|
||||
|
||||
@@ -6,7 +6,8 @@ import { join } from 'path';
|
||||
import stringArgv from 'string-argv';
|
||||
import { debug } from '@vercel/build-utils';
|
||||
const versionMap = new Map([
|
||||
['1.16', '1.16'],
|
||||
['1.17', '1.17.3'],
|
||||
['1.16', '1.16.10'],
|
||||
['1.15', '1.15.8'],
|
||||
['1.14', '1.14.15'],
|
||||
['1.13', '1.13.15'],
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "1.2.4-canary.3",
|
||||
"version": "1.2.4-canary.4",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
|
||||
@@ -5,10 +5,10 @@
|
||||
{ "src": "subdirectory/index.go", "use": "@vercel/go" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:go1.16:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/", "mustContain": "cow:go1.17.3:RANDOMNESS_PLACEHOLDER" },
|
||||
{
|
||||
"path": "/subdirectory",
|
||||
"mustContain": "subcow:go1.16:RANDOMNESS_PLACEHOLDER"
|
||||
"mustContain": "subcow:go1.17.3:RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
module with-nested
|
||||
|
||||
go 1.12
|
||||
go 1.16
|
||||
|
||||
2
packages/middleware/.eslintignore
Normal file
2
packages/middleware/.eslintignore
Normal file
@@ -0,0 +1,2 @@
|
||||
entries.js
|
||||
dist
|
||||
2
packages/middleware/.gitignore
vendored
Normal file
2
packages/middleware/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/dist
|
||||
/test/fixtures/*/.output
|
||||
30
packages/middleware/build.js
Normal file
30
packages/middleware/build.js
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require('fs-extra');
|
||||
const execa = require('execa');
|
||||
const { join } = require('path');
|
||||
|
||||
async function main() {
|
||||
const srcDir = join(__dirname, 'src');
|
||||
const outDir = join(__dirname, 'dist');
|
||||
|
||||
// Start fresh
|
||||
await fs.remove(outDir);
|
||||
|
||||
await execa(
|
||||
'ncc',
|
||||
['build', join(srcDir, 'index.ts'), '-o', outDir, '--external', 'esbuild'],
|
||||
{
|
||||
stdio: 'inherit',
|
||||
}
|
||||
);
|
||||
|
||||
await fs.copyFile(
|
||||
join(__dirname, 'src/entries.js'),
|
||||
join(outDir, 'entries.js')
|
||||
);
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
58
packages/middleware/package.json
Normal file
58
packages/middleware/package.json
Normal file
@@ -0,0 +1,58 @@
|
||||
{
|
||||
"name": "vercel-plugin-middleware",
|
||||
"version": "0.0.0-canary.7",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
"directory": "packages/middleware"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "node build",
|
||||
"test-unit": "jest",
|
||||
"prepublishOnly": "node build"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"esbuild": "0.13.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@peculiar/webcrypto": "1.2.0",
|
||||
"@types/cookie": "0.4.1",
|
||||
"@types/glob": "7.2.0",
|
||||
"@types/http-proxy": "1.17.7",
|
||||
"@types/jest": "27.0.2",
|
||||
"@types/node": "16.11.6",
|
||||
"@types/node-fetch": "^2",
|
||||
"@types/ua-parser-js": "0.7.36",
|
||||
"@types/uuid": "8.3.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"cookie": "0.4.1",
|
||||
"formdata-node": "4.3.1",
|
||||
"glob": "7.2.0",
|
||||
"http-proxy": "1.18.1",
|
||||
"node-fetch": "^2",
|
||||
"ua-parser-js": "1.0.2",
|
||||
"url": "0.11.0",
|
||||
"uuid": "8.3.2",
|
||||
"web-streams-polyfill": "3.1.1"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
"globals": {
|
||||
"ts-jest": {
|
||||
"diagnostics": false,
|
||||
"isolatedModules": true
|
||||
}
|
||||
},
|
||||
"verbose": false,
|
||||
"testEnvironment": "node",
|
||||
"testMatch": [
|
||||
"<rootDir>/test/**/*.test.ts"
|
||||
]
|
||||
}
|
||||
}
|
||||
18
packages/middleware/src/entries.js
Normal file
18
packages/middleware/src/entries.js
Normal file
@@ -0,0 +1,18 @@
|
||||
import * as middleware from './_middleware';
|
||||
_ENTRIES = typeof _ENTRIES === 'undefined' ? {} : _ENTRIES;
|
||||
_ENTRIES['middleware_pages/_middleware'] = {
|
||||
default: async function (ev) {
|
||||
const result = await middleware.default(ev.request, ev);
|
||||
return {
|
||||
promise: Promise.resolve(),
|
||||
waitUntil: Promise.resolve(),
|
||||
response:
|
||||
result ||
|
||||
new Response(null, {
|
||||
headers: {
|
||||
'x-middleware-next': 1,
|
||||
},
|
||||
}),
|
||||
};
|
||||
},
|
||||
};
|
||||
323
packages/middleware/src/index.ts
Normal file
323
packages/middleware/src/index.ts
Normal file
@@ -0,0 +1,323 @@
|
||||
import util from 'util';
|
||||
import { extname, join, basename } from 'path';
|
||||
import * as esbuild from 'esbuild';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
import libGlob from 'glob';
|
||||
import Proxy from 'http-proxy';
|
||||
|
||||
import { run } from './websandbox';
|
||||
import type { FetchEventResult } from './websandbox/types';
|
||||
|
||||
import { ParsedUrlQuery, stringify as stringifyQs } from 'querystring';
|
||||
import {
|
||||
format as formatUrl,
|
||||
parse as parseUrl,
|
||||
UrlWithParsedQuery,
|
||||
} from 'url';
|
||||
import { toNodeHeaders } from './websandbox/utils';
|
||||
|
||||
const glob = util.promisify(libGlob);
|
||||
const SUPPORTED_EXTENSIONS = ['.js', '.ts'];
|
||||
|
||||
// File name of the `entries.js` file that gets copied into the
|
||||
// project directory. Use a name that is unlikely to conflict.
|
||||
const ENTRIES_NAME = '___vc_entries.js';
|
||||
|
||||
async function getMiddlewareFile(workingDirectory: string) {
|
||||
// Only the root-level `_middleware.*` files are considered.
|
||||
// For more granular routing, the Project's Framework (i.e. Next.js)
|
||||
// middleware support should be used.
|
||||
const middlewareFiles = await glob(join(workingDirectory, '_middleware.*'));
|
||||
|
||||
if (middlewareFiles.length === 0) {
|
||||
// No middleware file at the root of the project, so bail...
|
||||
return;
|
||||
}
|
||||
|
||||
if (middlewareFiles.length > 1) {
|
||||
throw new Error(
|
||||
`Only one middleware file is allowed. Found: ${middlewareFiles.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
const ext = extname(middlewareFiles[0]);
|
||||
if (!SUPPORTED_EXTENSIONS.includes(ext)) {
|
||||
throw new Error(`Unsupported file type: ${ext}`);
|
||||
}
|
||||
|
||||
return middlewareFiles[0];
|
||||
}
|
||||
|
||||
export async function build({ workPath }: { workPath: string }) {
|
||||
const entriesPath = join(workPath, ENTRIES_NAME);
|
||||
const middlewareFile = await getMiddlewareFile(workPath);
|
||||
if (!middlewareFile) return;
|
||||
|
||||
console.log('Compiling middleware file: %j', middlewareFile);
|
||||
|
||||
// Create `_ENTRIES` wrapper
|
||||
await fsp.copyFile(join(__dirname, 'entries.js'), entriesPath);
|
||||
|
||||
// Build
|
||||
try {
|
||||
await esbuild.build({
|
||||
entryPoints: [entriesPath],
|
||||
bundle: true,
|
||||
absWorkingDir: workPath,
|
||||
outfile: join(workPath, '.output/server/pages/_middleware.js'),
|
||||
});
|
||||
} finally {
|
||||
await fsp.unlink(entriesPath);
|
||||
}
|
||||
|
||||
// Write middleware manifest
|
||||
const middlewareManifest = {
|
||||
version: 1,
|
||||
sortedMiddleware: ['/'],
|
||||
middleware: {
|
||||
'/': {
|
||||
env: [],
|
||||
files: ['server/pages/_middleware.js'],
|
||||
name: 'pages/_middleware',
|
||||
page: '/',
|
||||
regexp: '^/.*$',
|
||||
},
|
||||
},
|
||||
};
|
||||
const middlewareManifestData = JSON.stringify(middlewareManifest, null, 2);
|
||||
const middlewareManifestPath = join(
|
||||
workPath,
|
||||
'.output/server/middleware-manifest.json'
|
||||
);
|
||||
await fsp.writeFile(middlewareManifestPath, middlewareManifestData);
|
||||
}
|
||||
|
||||
const stringifyQuery = (req: IncomingMessage, query: ParsedUrlQuery) => {
|
||||
const initialQueryValues = Object.values((req as any).__NEXT_INIT_QUERY);
|
||||
|
||||
return stringifyQs(query, undefined, undefined, {
|
||||
encodeURIComponent(value: any) {
|
||||
if (initialQueryValues.some(val => val === value)) {
|
||||
return encodeURIComponent(value);
|
||||
}
|
||||
return value;
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// eslint-disable-next-line
|
||||
async function runMiddlewareCatchAll(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
requestId: string,
|
||||
name: string,
|
||||
path: string
|
||||
) {
|
||||
let result: FetchEventResult | null = null;
|
||||
const parsedUrl = parseUrl(req.url!, true);
|
||||
try {
|
||||
result = await runMiddleware({
|
||||
request: req,
|
||||
response: res,
|
||||
name: name,
|
||||
path,
|
||||
requestId: requestId,
|
||||
parsedUrl,
|
||||
parsed: parseUrl(req.url!, true),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return { finished: true, error: err };
|
||||
}
|
||||
|
||||
if (result === null) {
|
||||
return { finished: true };
|
||||
}
|
||||
|
||||
if (
|
||||
!result.response.headers.has('x-middleware-rewrite') &&
|
||||
!result.response.headers.has('x-middleware-next') &&
|
||||
!result.response.headers.has('Location')
|
||||
) {
|
||||
result.response.headers.set('x-middleware-refresh', '1');
|
||||
}
|
||||
|
||||
result.response.headers.delete('x-middleware-next');
|
||||
|
||||
for (const [key, value] of Object.entries(
|
||||
toNodeHeaders(result.response.headers)
|
||||
)) {
|
||||
if (key !== 'content-encoding' && value !== undefined) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const preflight =
|
||||
req.method === 'HEAD' && req.headers['x-middleware-preflight'];
|
||||
|
||||
if (preflight) {
|
||||
res.writeHead(200);
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
res.statusCode = result.response.status;
|
||||
res.statusMessage = result.response.statusText;
|
||||
|
||||
const location = result.response.headers.get('Location');
|
||||
if (location) {
|
||||
res.statusCode = result.response.status;
|
||||
if (res.statusCode === 308) {
|
||||
res.setHeader('Refresh', `0;url=${location}`);
|
||||
}
|
||||
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
if (result.response.headers.has('x-middleware-rewrite')) {
|
||||
const rewrite = result.response.headers.get('x-middleware-rewrite')!;
|
||||
const rewriteParsed = parseUrl(rewrite, true);
|
||||
if (rewriteParsed.protocol) {
|
||||
return proxyRequest(req, res, rewriteParsed);
|
||||
}
|
||||
|
||||
(req as any)._nextRewroteUrl = rewrite;
|
||||
(req as any)._nextDidRewrite = (req as any)._nextRewroteUrl !== req.url;
|
||||
|
||||
return {
|
||||
finished: false,
|
||||
pathname: rewriteParsed.pathname,
|
||||
query: {
|
||||
...parsedUrl.query,
|
||||
...rewriteParsed.query,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (result.response.headers.has('x-middleware-refresh')) {
|
||||
res.writeHead(result.response.status);
|
||||
|
||||
if (result.response.body instanceof Buffer) {
|
||||
res.write(result.response.body);
|
||||
} else {
|
||||
//@ts-ignore
|
||||
for await (const chunk of result.response.body || []) {
|
||||
res.write(chunk);
|
||||
}
|
||||
}
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
finished: false,
|
||||
};
|
||||
}
|
||||
|
||||
const proxyRequest = async (
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
parsedUrl: UrlWithParsedQuery
|
||||
) => {
|
||||
const { query } = parsedUrl;
|
||||
delete (parsedUrl as any).query;
|
||||
parsedUrl.search = stringifyQuery(req, query);
|
||||
|
||||
const target = formatUrl(parsedUrl);
|
||||
const proxy = new Proxy({
|
||||
target,
|
||||
changeOrigin: true,
|
||||
ignorePath: true,
|
||||
xfwd: true,
|
||||
proxyTimeout: 30_000, // limit proxying to 30 seconds
|
||||
});
|
||||
|
||||
await new Promise((proxyResolve, proxyReject) => {
|
||||
let finished = false;
|
||||
|
||||
proxy.on('proxyReq', (proxyReq: any) => {
|
||||
proxyReq.on('close', () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyResolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
proxy.on('error', (err: any) => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyReject(err);
|
||||
}
|
||||
});
|
||||
proxy.web(req, res);
|
||||
});
|
||||
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
};
|
||||
|
||||
async function runMiddleware(params: {
|
||||
request: IncomingMessage;
|
||||
response: ServerResponse;
|
||||
parsedUrl: UrlWithParsedQuery;
|
||||
parsed: UrlWithParsedQuery;
|
||||
requestId: string;
|
||||
name: string;
|
||||
path: string;
|
||||
}): Promise<FetchEventResult | null> {
|
||||
const page: { name?: string; params?: { [key: string]: string } } = {};
|
||||
let result: FetchEventResult | null = null;
|
||||
|
||||
result = await run({
|
||||
name: params.name,
|
||||
path: params.path,
|
||||
request: {
|
||||
headers: params.request.headers,
|
||||
method: params.request.method || 'GET',
|
||||
url: params.request.url!,
|
||||
// url: (params.request as any).__NEXT_INIT_URL,
|
||||
page,
|
||||
},
|
||||
});
|
||||
|
||||
result.waitUntil.catch((error: any) => {
|
||||
console.error(`Uncaught: middleware waitUntil errored`, error);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Should run the middleware in the `vm` sandbox and return the result
|
||||
// back to `vercel dev`. If no middleware file exists then this function
|
||||
// should return `finished: false` (very quickly, since this is being
|
||||
// invoked for every HTTP request!).
|
||||
export async function runDevMiddleware(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
workingDirectory: string
|
||||
): ReturnType<typeof runMiddlewareCatchAll> {
|
||||
const middlewareFile = await getMiddlewareFile(workingDirectory);
|
||||
if (!middlewareFile) {
|
||||
return {
|
||||
finished: false,
|
||||
};
|
||||
}
|
||||
return runMiddlewareCatchAll(
|
||||
req,
|
||||
res,
|
||||
'',
|
||||
basename(middlewareFile),
|
||||
middlewareFile
|
||||
);
|
||||
}
|
||||
65
packages/middleware/src/websandbox/adapter.ts
Normal file
65
packages/middleware/src/websandbox/adapter.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import type { RequestData, FetchEventResult } from './types';
|
||||
import { DeprecationError } from './error';
|
||||
import { fromNodeHeaders } from './utils';
|
||||
import { NextFetchEvent } from './spec-extension/fetch-event';
|
||||
import { NextRequest, RequestInit } from './spec-extension/request';
|
||||
import { SpecResponse } from './spec-extension/response';
|
||||
import { waitUntilSymbol } from './spec-compliant/fetch-event';
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
export async function adapter(params: {
|
||||
handler: (request: NextRequest, event: NextFetchEvent) => Promise<Response>;
|
||||
page: string;
|
||||
request: RequestData;
|
||||
}): Promise<FetchEventResult> {
|
||||
const url = params.request.url.startsWith('/')
|
||||
? `https://${params.request.headers.host}${params.request.url}`
|
||||
: params.request.url;
|
||||
|
||||
const request = new NextRequestHint({
|
||||
page: params.page,
|
||||
input: url,
|
||||
init: {
|
||||
geo: params.request.geo,
|
||||
//@ts-ignore
|
||||
headers: fromNodeHeaders(params.request.headers),
|
||||
ip: params.request.ip,
|
||||
method: params.request.method,
|
||||
page: params.request.page,
|
||||
},
|
||||
});
|
||||
|
||||
const event = new NextFetchEvent({ request, page: params.page });
|
||||
const original = await params.handler(request, event);
|
||||
|
||||
return {
|
||||
response: original || SpecResponse.next(),
|
||||
waitUntil: Promise.all(event[waitUntilSymbol]),
|
||||
};
|
||||
}
|
||||
|
||||
class NextRequestHint extends NextRequest {
|
||||
sourcePage: string;
|
||||
|
||||
constructor(params: {
|
||||
init: RequestInit;
|
||||
input: Request | string;
|
||||
page: string;
|
||||
}) {
|
||||
//@ts-ignore
|
||||
super(params.input, params.init);
|
||||
this.sourcePage = params.page;
|
||||
}
|
||||
|
||||
get request() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
|
||||
respondWith() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
|
||||
waitUntil() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
}
|
||||
12
packages/middleware/src/websandbox/error.ts
Normal file
12
packages/middleware/src/websandbox/error.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
export class DeprecationError extends Error {
|
||||
constructor({ page }: { page: string }) {
|
||||
super(`The middleware "${page}" accepts an async API directly with the form:
|
||||
|
||||
export function middleware(request, event) {
|
||||
return new Response("Hello " + request.url)
|
||||
}
|
||||
|
||||
Read more: https://nextjs.org/docs/messages/middleware-new-signature
|
||||
`);
|
||||
}
|
||||
}
|
||||
76
packages/middleware/src/websandbox/form-data.ts
Normal file
76
packages/middleware/src/websandbox/form-data.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { isBlob } from './is';
|
||||
import { streamToIterator } from './utils';
|
||||
|
||||
const carriage = '\r\n';
|
||||
const dashes = '--';
|
||||
const carriageLength = 2;
|
||||
|
||||
function escape(str: string) {
|
||||
return str.replace(/"/g, '\\"');
|
||||
}
|
||||
|
||||
function getFooter(boundary: string) {
|
||||
return `${dashes}${boundary}${dashes}${carriage.repeat(2)}`;
|
||||
}
|
||||
|
||||
function getHeader(boundary: string, name: string, field: FormDataEntryValue) {
|
||||
let header = '';
|
||||
header += `${dashes}${boundary}${carriage}`;
|
||||
header += `Content-Disposition: form-data; name="${escape(name)}"`;
|
||||
|
||||
if (isBlob(field)) {
|
||||
header += `; filename="${escape(field.name)}"${carriage}`;
|
||||
header += `Content-Type: ${field.type || 'application/octet-stream'}`;
|
||||
}
|
||||
|
||||
return `${header}${carriage.repeat(2)}`;
|
||||
}
|
||||
|
||||
export function getBoundary() {
|
||||
const array = new Uint8Array(32);
|
||||
crypto.getRandomValues(array);
|
||||
|
||||
let str = '';
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
str += array[i].toString(16).padStart(2, '0');
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
export async function* formDataIterator(
|
||||
form: FormData,
|
||||
boundary: string
|
||||
): AsyncIterableIterator<Uint8Array> {
|
||||
const encoder = new TextEncoder();
|
||||
//@ts-ignore
|
||||
for (const [name, value] of form) {
|
||||
yield encoder.encode(getHeader(boundary, name, value));
|
||||
|
||||
if (isBlob(value)) {
|
||||
// @ts-ignore /shrug
|
||||
const stream: ReadableStream<Uint8Array> = value.stream();
|
||||
yield* streamToIterator(stream);
|
||||
} else {
|
||||
yield encoder.encode(value);
|
||||
}
|
||||
|
||||
yield encoder.encode(carriage);
|
||||
}
|
||||
|
||||
yield encoder.encode(getFooter(boundary));
|
||||
}
|
||||
|
||||
export function getFormDataLength(form: FormData, boundary: string) {
|
||||
let length = 0;
|
||||
|
||||
//@ts-ignore
|
||||
for (const [name, value] of form) {
|
||||
length += Buffer.byteLength(getHeader(boundary, name, value));
|
||||
length += isBlob(value) ? value.size : Buffer.byteLength(String(value));
|
||||
length += carriageLength;
|
||||
}
|
||||
|
||||
length += Buffer.byteLength(getFooter(boundary));
|
||||
return length;
|
||||
}
|
||||
1
packages/middleware/src/websandbox/index.ts
Normal file
1
packages/middleware/src/websandbox/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './sandbox/sandbox';
|
||||
80
packages/middleware/src/websandbox/is.ts
Normal file
80
packages/middleware/src/websandbox/is.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* The ArrayBuffer object is used to represent a generic, fixed-length raw
|
||||
* binary data buffer. It is an array of bytes, often referred to in other
|
||||
* languages as a "byte array". You cannot directly manipulate the contents of
|
||||
* an ArrayBuffer; instead, you create one of the typed array objects or a
|
||||
* DataView object which represents the buffer in a specific format, and use
|
||||
* that to read and write the contents of the buffer.
|
||||
*/
|
||||
export function isArrayBuffer(value: any): value is ArrayBuffer {
|
||||
return Object.prototype.isPrototypeOf.call(ArrayBuffer, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* ArrayBufferView is a helper type representing any of the following JS
|
||||
* TypedArray types which correspond to the list below. It is checked by duck
|
||||
* typing the provided object.
|
||||
*/
|
||||
export function isArrayBufferView(value: any): value is ArrayBufferView {
|
||||
return ArrayBuffer.isView(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The DataView view provides a low-level interface for reading and writing
|
||||
* multiple number types in a binary ArrayBuffer, without having to care about
|
||||
* the platform's endianness.
|
||||
*/
|
||||
export function isDataView(value: any): value is DataView {
|
||||
return Object.prototype.isPrototypeOf.call(DataView, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The URLSearchParams interface defines utility methods to work with the
|
||||
* query string of a URL.
|
||||
*/
|
||||
export function isURLSearchParams(value: any): value is URLSearchParams {
|
||||
return Object.prototype.isPrototypeOf.call(URLSearchParams, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The Blob object represents a blob, which is a file-like object of immutable,
|
||||
* raw data; they can be read as text or binary data. Blobs can represent data
|
||||
* that isn't necessarily in a JavaScript-native format.
|
||||
*/
|
||||
export function isBlob(value: any): value is Blob {
|
||||
return Object.prototype.isPrototypeOf.call(Blob, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The FormData interface provides a way to easily construct a set of key/value
|
||||
* pairs representing form fields and their values, which can then be easily
|
||||
* sent using the XMLHttpRequest.send() method. It uses the same format a
|
||||
* form would use if the encoding type were set to "multipart/form-data".
|
||||
*/
|
||||
export function isFormData(value: any): value is FormData {
|
||||
return Object.prototype.isPrototypeOf.call(FormData, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The ReadableStream interface of the Streams API represents a readable stream
|
||||
* of byte data. Because we want to allow alternative implementations we also
|
||||
* duck type here.
|
||||
*/
|
||||
export function isReadableStream(value: any): value is ReadableStream {
|
||||
return (
|
||||
value &&
|
||||
(Object.prototype.isPrototypeOf.call(ReadableStream, value) ||
|
||||
(value.constructor.name === 'ReadableStream' && 'getReader' in value))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks in an object implements an Iterable interface
|
||||
*/
|
||||
export function isIterable(object: any): object is Iterable<unknown> {
|
||||
return (
|
||||
object &&
|
||||
Symbol.iterator in object &&
|
||||
typeof object[Symbol.iterator] === 'function'
|
||||
);
|
||||
}
|
||||
127
packages/middleware/src/websandbox/sandbox/polyfills.ts
Normal file
127
packages/middleware/src/websandbox/sandbox/polyfills.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import { Crypto as WebCrypto } from '@peculiar/webcrypto';
|
||||
import { TransformStream } from 'web-streams-polyfill';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import crypto from 'crypto';
|
||||
|
||||
export function atob(b64Encoded: string) {
|
||||
return Buffer.from(b64Encoded, 'base64').toString('binary');
|
||||
}
|
||||
|
||||
export function btoa(str: string) {
|
||||
return Buffer.from(str, 'binary').toString('base64');
|
||||
}
|
||||
|
||||
class TextEncoderRuntime {
|
||||
encoder: TextEncoder;
|
||||
|
||||
constructor() {
|
||||
this.encoder = new TextEncoder();
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this.encoder.encoding;
|
||||
}
|
||||
|
||||
public encode(input: string) {
|
||||
return this.encoder.encode(input);
|
||||
}
|
||||
}
|
||||
|
||||
class TextDecoderRuntime {
|
||||
decoder: TextDecoder;
|
||||
|
||||
constructor() {
|
||||
this.decoder = new TextDecoder();
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this.decoder.encoding;
|
||||
}
|
||||
|
||||
get fatal() {
|
||||
return this.decoder.fatal;
|
||||
}
|
||||
|
||||
get ignoreBOM() {
|
||||
return this.decoder.ignoreBOM;
|
||||
}
|
||||
|
||||
public decode(input: BufferSource, options?: TextDecodeOptions) {
|
||||
return this.decoder.decode(input, options);
|
||||
}
|
||||
}
|
||||
|
||||
export { TextDecoderRuntime as TextDecoder };
|
||||
export { TextEncoderRuntime as TextEncoder };
|
||||
|
||||
export class Crypto extends WebCrypto {
|
||||
// @ts-ignore Remove once types are updated and we deprecate node 12
|
||||
randomUUID = crypto.randomUUID || uuid;
|
||||
}
|
||||
|
||||
export class ReadableStream<T> {
|
||||
constructor(opts: UnderlyingSource = {}) {
|
||||
let closed = false;
|
||||
let pullPromise: any;
|
||||
|
||||
let transformController: TransformStreamDefaultController;
|
||||
const { readable, writable } = new TransformStream(
|
||||
{
|
||||
start: (controller: TransformStreamDefaultController) => {
|
||||
transformController = controller;
|
||||
},
|
||||
},
|
||||
undefined,
|
||||
{
|
||||
highWaterMark: 1,
|
||||
}
|
||||
);
|
||||
|
||||
const writer = writable.getWriter();
|
||||
const encoder = new TextEncoder();
|
||||
const controller: ReadableStreamController<T> = {
|
||||
get desiredSize() {
|
||||
return transformController.desiredSize;
|
||||
},
|
||||
close: () => {
|
||||
if (!closed) {
|
||||
closed = true;
|
||||
writer.close();
|
||||
}
|
||||
},
|
||||
enqueue: (chunk: T) => {
|
||||
writer.write(typeof chunk === 'string' ? encoder.encode(chunk) : chunk);
|
||||
pull();
|
||||
},
|
||||
error: (reason: any) => {
|
||||
transformController.error(reason);
|
||||
},
|
||||
};
|
||||
|
||||
const pull = () => {
|
||||
if (opts.pull) {
|
||||
if (!pullPromise) {
|
||||
pullPromise = Promise.resolve().then(() => {
|
||||
pullPromise = 0;
|
||||
opts.pull!(controller);
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (opts.start) {
|
||||
opts.start(controller);
|
||||
}
|
||||
|
||||
if (opts.cancel) {
|
||||
readable.cancel = (reason: any) => {
|
||||
opts.cancel!(reason);
|
||||
return readable.cancel(reason);
|
||||
};
|
||||
}
|
||||
|
||||
pull();
|
||||
|
||||
return readable;
|
||||
}
|
||||
}
|
||||
226
packages/middleware/src/websandbox/sandbox/sandbox.ts
Normal file
226
packages/middleware/src/websandbox/sandbox/sandbox.ts
Normal file
@@ -0,0 +1,226 @@
|
||||
import type { RequestData, FetchEventResult, NodeHeaders } from '../types';
|
||||
import { Blob, File, FormData } from 'formdata-node';
|
||||
import { dirname, extname, resolve } from 'path';
|
||||
import { readFileSync } from 'fs';
|
||||
import { TransformStream } from 'web-streams-polyfill';
|
||||
import * as polyfills from './polyfills';
|
||||
import cookie from 'cookie';
|
||||
import vm from 'vm';
|
||||
import fetch, {
|
||||
Headers,
|
||||
RequestInit,
|
||||
Response,
|
||||
Request,
|
||||
RequestInfo,
|
||||
} from 'node-fetch';
|
||||
import { adapter } from '../adapter';
|
||||
import * as esbuild from 'esbuild';
|
||||
import m from 'module';
|
||||
|
||||
interface URLLike {
|
||||
href: string;
|
||||
}
|
||||
|
||||
let cache:
|
||||
| {
|
||||
context: { [key: string]: any };
|
||||
paths: Map<string, string>;
|
||||
require: Map<string, any>;
|
||||
sandbox: vm.Context;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
const WEBPACK_HASH_REGEX =
|
||||
/__webpack_require__\.h = function\(\) \{ return "[0-9a-f]+"; \}/g;
|
||||
|
||||
/**
|
||||
* The cache is cleared when a path is cached and the content has changed. The
|
||||
* hack ignores changes than only change the compilation hash. Instead it is
|
||||
* probably better to disable HMR for middleware entries.
|
||||
*/
|
||||
export function clearSandboxCache(path: string, content: Buffer | string) {
|
||||
const prev = cache?.paths.get(path)?.replace(WEBPACK_HASH_REGEX, '');
|
||||
if (prev === undefined) return;
|
||||
if (prev === content.toString().replace(WEBPACK_HASH_REGEX, '')) return;
|
||||
cache = undefined;
|
||||
}
|
||||
|
||||
export async function run(params: {
|
||||
name: string;
|
||||
path: string;
|
||||
request: RequestData;
|
||||
}): Promise<FetchEventResult> {
|
||||
if (cache === undefined) {
|
||||
const context: { [key: string]: any } = {
|
||||
atob: polyfills.atob,
|
||||
Blob,
|
||||
btoa: polyfills.btoa,
|
||||
clearInterval,
|
||||
clearTimeout,
|
||||
console: {
|
||||
assert: console.assert.bind(console),
|
||||
error: console.error.bind(console),
|
||||
info: console.info.bind(console),
|
||||
log: console.log.bind(console),
|
||||
time: console.time.bind(console),
|
||||
timeEnd: console.timeEnd.bind(console),
|
||||
timeLog: console.timeLog.bind(console),
|
||||
warn: console.warn.bind(console),
|
||||
},
|
||||
Crypto: polyfills.Crypto,
|
||||
crypto: new polyfills.Crypto(),
|
||||
Response,
|
||||
Headers,
|
||||
Request,
|
||||
fetch: (input: RequestInfo, init: RequestInit = {}) => {
|
||||
const url = getFetchURL(input, params.request.headers);
|
||||
init.headers = getFetchHeaders(params.name, init);
|
||||
if (isRequestLike(input)) {
|
||||
return fetch(url, {
|
||||
...init,
|
||||
headers: {
|
||||
...Object.fromEntries(input.headers),
|
||||
...Object.fromEntries(init.headers),
|
||||
},
|
||||
});
|
||||
}
|
||||
return fetch(url, init);
|
||||
},
|
||||
File,
|
||||
FormData,
|
||||
process: { env: { ...process.env } },
|
||||
ReadableStream: polyfills.ReadableStream,
|
||||
setInterval,
|
||||
setTimeout,
|
||||
TextDecoder: polyfills.TextDecoder,
|
||||
TextEncoder: polyfills.TextEncoder,
|
||||
TransformStream,
|
||||
URL,
|
||||
URLSearchParams,
|
||||
};
|
||||
|
||||
context.self = context;
|
||||
|
||||
cache = {
|
||||
context,
|
||||
require: new Map<string, any>([
|
||||
[require.resolve('cookie'), { exports: cookie }],
|
||||
]),
|
||||
paths: new Map<string, string>(),
|
||||
sandbox: vm.createContext(context),
|
||||
};
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(params.path, 'utf-8');
|
||||
const esBuildResult = esbuild.transformSync(content, {
|
||||
format: 'cjs',
|
||||
});
|
||||
const x = vm.runInNewContext(m.wrap(esBuildResult.code), cache.sandbox, {
|
||||
filename: params.path,
|
||||
});
|
||||
const module = {
|
||||
exports: {},
|
||||
loaded: false,
|
||||
id: params.path,
|
||||
};
|
||||
x(
|
||||
module.exports,
|
||||
sandboxRequire.bind(null, params.path),
|
||||
module,
|
||||
dirname(params.path),
|
||||
params.path
|
||||
);
|
||||
const adapterResult = await adapter({
|
||||
request: params.request,
|
||||
// @ts-ignore
|
||||
handler: module.exports.default,
|
||||
page: params.path,
|
||||
});
|
||||
return adapterResult;
|
||||
} catch (error) {
|
||||
cache = undefined;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function sandboxRequire(referrer: string, specifier: string) {
|
||||
const resolved = require.resolve(specifier, {
|
||||
paths: [resolve(dirname(referrer))],
|
||||
});
|
||||
|
||||
const cached = cache?.require.get(resolved);
|
||||
if (cached !== undefined) {
|
||||
return cached.exports;
|
||||
}
|
||||
|
||||
const module = {
|
||||
exports: {},
|
||||
loaded: false,
|
||||
id: resolved,
|
||||
};
|
||||
|
||||
cache?.require.set(resolved, module);
|
||||
|
||||
const transformOptions: esbuild.TransformOptions = {
|
||||
format: 'cjs',
|
||||
};
|
||||
if (extname(resolved) === '.json') {
|
||||
transformOptions.loader = 'json';
|
||||
}
|
||||
const transformedContent = esbuild.transformSync(
|
||||
readFileSync(resolved, 'utf-8'),
|
||||
transformOptions
|
||||
).code;
|
||||
const fn = vm.runInContext(
|
||||
`(function(module,exports,require,__dirname,__filename) {${transformedContent}\n})`,
|
||||
cache!.sandbox
|
||||
);
|
||||
|
||||
try {
|
||||
fn(
|
||||
module,
|
||||
module.exports,
|
||||
sandboxRequire.bind(null, resolved),
|
||||
dirname(resolved),
|
||||
resolved
|
||||
);
|
||||
} finally {
|
||||
cache?.require.delete(resolved);
|
||||
}
|
||||
module.loaded = true;
|
||||
return module.exports;
|
||||
}
|
||||
|
||||
function getFetchHeaders(middleware: string, init: RequestInit) {
|
||||
const headers = new Headers(init.headers ?? {});
|
||||
const prevsub = headers.get(`x-middleware-subrequest`) || '';
|
||||
const value = prevsub.split(':').concat(middleware).join(':');
|
||||
headers.set(`x-middleware-subrequest`, value);
|
||||
headers.set(`user-agent`, `Next.js Middleware`);
|
||||
return headers;
|
||||
}
|
||||
|
||||
function getFetchURL(input: RequestInfo, headers: NodeHeaders = {}): string {
|
||||
const initurl = isRequestLike(input)
|
||||
? input.url
|
||||
: isURLLike(input)
|
||||
? input.href
|
||||
: input;
|
||||
if (initurl.startsWith('/')) {
|
||||
const host = headers.host?.toString();
|
||||
const localhost =
|
||||
host === '127.0.0.1' ||
|
||||
host === 'localhost' ||
|
||||
host?.startsWith('localhost:');
|
||||
return `${localhost ? 'http' : 'https'}://${host}${initurl}`;
|
||||
}
|
||||
return initurl;
|
||||
}
|
||||
|
||||
function isURLLike(obj: unknown): obj is URLLike {
|
||||
return Boolean(obj && typeof obj === 'object' && 'href' in obj);
|
||||
}
|
||||
|
||||
function isRequestLike(obj: unknown): obj is Request {
|
||||
return Boolean(obj && typeof obj === 'object' && 'url' in obj);
|
||||
}
|
||||
237
packages/middleware/src/websandbox/spec-compliant/body.ts
Normal file
237
packages/middleware/src/websandbox/spec-compliant/body.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
import { formDataIterator, getBoundary } from '../form-data';
|
||||
import { streamToIterator } from '../utils';
|
||||
import * as util from '../is';
|
||||
import { URLSearchParams } from 'url';
|
||||
|
||||
const INTERNALS = Symbol('internal body');
|
||||
|
||||
abstract class BaseBody implements Body {
|
||||
abstract headers: Headers;
|
||||
|
||||
[INTERNALS]: {
|
||||
bodyInit?: BodyInit;
|
||||
boundary?: string;
|
||||
disturbed: boolean;
|
||||
stream?: ReadableStream<Uint8Array> | null;
|
||||
};
|
||||
|
||||
constructor(bodyInit?: BodyInit) {
|
||||
this[INTERNALS] = {
|
||||
bodyInit: bodyInit,
|
||||
disturbed: false,
|
||||
};
|
||||
|
||||
if (util.isFormData(bodyInit)) {
|
||||
this[INTERNALS].boundary = getBoundary();
|
||||
}
|
||||
}
|
||||
|
||||
get body(): ReadableStream<Uint8Array> | null {
|
||||
const body = this[INTERNALS].bodyInit;
|
||||
if (!body) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
const that = this;
|
||||
if (!this[INTERNALS].stream) {
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
if (typeof body === 'string') {
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(body));
|
||||
} else if (util.isBlob(body)) {
|
||||
const buffer = await body.arrayBuffer();
|
||||
controller.enqueue(new Uint8Array(buffer));
|
||||
} else if (util.isDataView(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isArrayBuffer(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isArrayBufferView(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isURLSearchParams(body)) {
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(body.toString()));
|
||||
} else if (util.isFormData(body)) {
|
||||
for await (const chunk of formDataIterator(
|
||||
body,
|
||||
that[INTERNALS].boundary!
|
||||
)) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
} else if (util.isReadableStream(body)) {
|
||||
for await (const chunk of streamToIterator(body)) {
|
||||
if (chunk.length) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const text = Object.prototype.toString.call(body);
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(text));
|
||||
}
|
||||
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Spy on reading chunks to set the stream as disturbed
|
||||
const getReader = readable.getReader.bind(readable);
|
||||
readable.getReader = () => {
|
||||
const reader = getReader();
|
||||
const read = reader.read.bind(reader);
|
||||
reader.read = () => {
|
||||
this[INTERNALS].disturbed = true;
|
||||
return read();
|
||||
};
|
||||
return reader;
|
||||
};
|
||||
|
||||
this[INTERNALS].stream = readable;
|
||||
}
|
||||
|
||||
return this[INTERNALS].stream!;
|
||||
}
|
||||
|
||||
get bodyUsed(): boolean {
|
||||
return this[INTERNALS].disturbed;
|
||||
}
|
||||
|
||||
_consume() {
|
||||
if (this[INTERNALS].disturbed) {
|
||||
return Promise.reject(
|
||||
new TypeError(
|
||||
`Body has already been used. It can only be used once. Use tee() first if you need to read it twice.`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
this[INTERNALS].disturbed = true;
|
||||
const body = this.body;
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
let buffer = new Uint8Array(0);
|
||||
if (!body) {
|
||||
return resolve(buffer);
|
||||
}
|
||||
|
||||
const reader = body.getReader();
|
||||
(function pump() {
|
||||
reader.read().then(({ value, done }) => {
|
||||
if (done) {
|
||||
return resolve(buffer);
|
||||
} else if (value) {
|
||||
const merge = new Uint8Array(buffer.length + value.length);
|
||||
merge.set(buffer);
|
||||
merge.set(value, buffer.length);
|
||||
buffer = merge;
|
||||
}
|
||||
|
||||
pump();
|
||||
}, reject);
|
||||
})();
|
||||
});
|
||||
}
|
||||
|
||||
async arrayBuffer() {
|
||||
const buffer = await this._consume();
|
||||
const arrayBuffer = new ArrayBuffer(buffer.length);
|
||||
const view = new Uint8Array(arrayBuffer);
|
||||
|
||||
for (let i = 0; i < buffer.length; ++i) {
|
||||
view[i] = buffer[i];
|
||||
}
|
||||
|
||||
return arrayBuffer;
|
||||
}
|
||||
|
||||
async blob() {
|
||||
const buffer = await this._consume();
|
||||
return new Blob([buffer]);
|
||||
}
|
||||
|
||||
async formData() {
|
||||
const bodyInit = this[INTERNALS].bodyInit;
|
||||
if (util.isURLSearchParams(bodyInit)) {
|
||||
const form = new FormData();
|
||||
for (const [key, value] of bodyInit) {
|
||||
form.append(key, value);
|
||||
}
|
||||
return form;
|
||||
} else if (util.isFormData(bodyInit)) {
|
||||
return bodyInit;
|
||||
} else {
|
||||
throw new TypeError(
|
||||
`Unrecognized Content-Type header value. FormData can only parse the following MIME types: multipart/form-data, application/x-www-form-urlencoded.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async text() {
|
||||
const decoder = new TextDecoder();
|
||||
const buffer = await this._consume();
|
||||
return decoder.decode(buffer);
|
||||
}
|
||||
|
||||
async json() {
|
||||
const text = await this.text();
|
||||
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
} catch (err: any) {
|
||||
throw new TypeError(`invalid json body reason: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { BaseBody as Body };
|
||||
|
||||
export type BodyInit =
|
||||
| null
|
||||
| string
|
||||
| Blob
|
||||
| BufferSource
|
||||
| FormData
|
||||
| URLSearchParams
|
||||
| ReadableStream<Uint8Array>;
|
||||
|
||||
export function extractContentType(instance: BaseBody) {
|
||||
const body = instance[INTERNALS].bodyInit;
|
||||
if (typeof body === 'string') {
|
||||
return 'text/plain;charset=UTF-8';
|
||||
} else if (util.isBlob(body)) {
|
||||
return body.type;
|
||||
} else if (util.isDataView(body)) {
|
||||
return null;
|
||||
} else if (util.isArrayBuffer(body)) {
|
||||
return null;
|
||||
} else if (util.isArrayBufferView(body)) {
|
||||
return null;
|
||||
} else if (util.isURLSearchParams(body)) {
|
||||
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
||||
} else if (util.isFormData(body)) {
|
||||
return `multipart/form-data;boundary=${instance[INTERNALS].boundary}`;
|
||||
} else if (util.isReadableStream(body)) {
|
||||
return null;
|
||||
} else {
|
||||
return 'text/plain;charset=UTF-8';
|
||||
}
|
||||
}
|
||||
|
||||
export function cloneBody(instance: BaseBody) {
|
||||
if (instance.bodyUsed) {
|
||||
throw new Error('cannot clone body after it is used');
|
||||
}
|
||||
|
||||
const body = instance[INTERNALS].bodyInit;
|
||||
if (util.isReadableStream(body)) {
|
||||
const [r1, r2] = body.tee();
|
||||
instance[INTERNALS].bodyInit = r1;
|
||||
return r2;
|
||||
}
|
||||
|
||||
return body || null;
|
||||
}
|
||||
|
||||
export function getInstanceBody(instance: BaseBody) {
|
||||
return instance[INTERNALS].bodyInit;
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
export const responseSymbol = Symbol('response');
|
||||
export const passThroughSymbol = Symbol('passThrough');
|
||||
export const waitUntilSymbol = Symbol('waitUntil');
|
||||
|
||||
export class FetchEvent {
|
||||
readonly [waitUntilSymbol]: Promise<any>[] = [];
|
||||
[responseSymbol]?: Promise<Response>;
|
||||
[passThroughSymbol] = false;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
|
||||
constructor() {}
|
||||
|
||||
respondWith(response: Response | Promise<Response>): void {
|
||||
if (!this[responseSymbol]) {
|
||||
this[responseSymbol] = Promise.resolve(response);
|
||||
}
|
||||
}
|
||||
|
||||
passThroughOnException(): void {
|
||||
this[passThroughSymbol] = true;
|
||||
}
|
||||
|
||||
waitUntil(promise: Promise<any>): void {
|
||||
this[waitUntilSymbol].push(promise);
|
||||
}
|
||||
}
|
||||
238
packages/middleware/src/websandbox/spec-compliant/headers.ts
Normal file
238
packages/middleware/src/websandbox/spec-compliant/headers.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
import { isIterable } from '../is';
|
||||
|
||||
const MAP = Symbol('map');
|
||||
const INTERNAL = Symbol('internal');
|
||||
const INVALID_TOKEN_REGEX = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
||||
const INVALID_HEADER_CHAR_REGEX = /[^\t\x20-\x7e\x80-\xff]/;
|
||||
|
||||
class BaseHeaders implements Headers {
|
||||
[MAP]: { [k: string]: string[] } = {};
|
||||
|
||||
constructor(init?: HeadersInit) {
|
||||
if (init instanceof BaseHeaders) {
|
||||
const rawHeaders = init.raw();
|
||||
for (const headerName of Object.keys(rawHeaders)) {
|
||||
for (const value of rawHeaders[headerName]) {
|
||||
this.append(headerName, value);
|
||||
}
|
||||
}
|
||||
} else if (isIterable(init)) {
|
||||
const pairs = [];
|
||||
for (const pair of init) {
|
||||
if (!isIterable(pair)) {
|
||||
throw new TypeError('Each header pair must be iterable');
|
||||
}
|
||||
pairs.push(Array.from(pair));
|
||||
}
|
||||
|
||||
for (const pair of pairs) {
|
||||
if (pair.length !== 2) {
|
||||
throw new TypeError('Each header pair must be a name/value tuple');
|
||||
}
|
||||
this.append(pair[0], pair[1]);
|
||||
}
|
||||
} else if (typeof init === 'object') {
|
||||
for (const key of Object.keys(init)) {
|
||||
// @ts-ignore
|
||||
this.append(key, init[key]);
|
||||
}
|
||||
} else if (init) {
|
||||
throw new TypeError('Provided initializer must be an object');
|
||||
}
|
||||
}
|
||||
|
||||
get(name: string) {
|
||||
const _name = `${name}`;
|
||||
validateName(_name);
|
||||
const key = find(this[MAP], _name);
|
||||
if (key === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this[MAP][key].join(', ');
|
||||
}
|
||||
|
||||
forEach(
|
||||
callback: (value: string, name: string, parent: BaseHeaders) => void,
|
||||
thisArg: any = undefined
|
||||
): void {
|
||||
let pairs = getHeaders(this);
|
||||
let i = 0;
|
||||
while (i < pairs.length) {
|
||||
const [name, value] = pairs[i];
|
||||
callback.call(thisArg, value, name, this);
|
||||
pairs = getHeaders(this);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
set(name: string, value: string) {
|
||||
name = `${name}`;
|
||||
value = `${value}`;
|
||||
validateName(name);
|
||||
validateValue(value);
|
||||
const key = find(this[MAP], name);
|
||||
this[MAP][key !== undefined ? key : name] = [value];
|
||||
}
|
||||
|
||||
append(name: string, value: string) {
|
||||
name = `${name}`;
|
||||
value = `${value}`;
|
||||
validateName(name);
|
||||
validateValue(value);
|
||||
const key = find(this[MAP], name);
|
||||
if (key !== undefined) {
|
||||
this[MAP][key].push(value);
|
||||
} else {
|
||||
this[MAP][name] = [value];
|
||||
}
|
||||
}
|
||||
|
||||
has(name: string) {
|
||||
name = `${name}`;
|
||||
validateName(name);
|
||||
return find(this[MAP], name) !== undefined;
|
||||
}
|
||||
|
||||
delete(name: string) {
|
||||
name = `${name}`;
|
||||
validateName(name);
|
||||
const key = find(this[MAP], name);
|
||||
if (key !== undefined) {
|
||||
delete this[MAP][key];
|
||||
}
|
||||
}
|
||||
|
||||
raw() {
|
||||
return this[MAP];
|
||||
}
|
||||
|
||||
keys() {
|
||||
return createHeadersIterator(this, 'key');
|
||||
}
|
||||
|
||||
values() {
|
||||
return createHeadersIterator(this, 'value');
|
||||
}
|
||||
|
||||
entries() {
|
||||
return createHeadersIterator(this, 'key+value');
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return createHeadersIterator(this, 'key+value');
|
||||
}
|
||||
}
|
||||
|
||||
function createHeadersIterator(
|
||||
target: BaseHeaders,
|
||||
kind: 'key' | 'value' | 'key+value'
|
||||
) {
|
||||
const iterator = Object.create(HeadersIteratorPrototype);
|
||||
iterator[INTERNAL] = {
|
||||
target,
|
||||
kind,
|
||||
index: 0,
|
||||
};
|
||||
return iterator;
|
||||
}
|
||||
|
||||
function validateName(name: string) {
|
||||
name = `${name}`;
|
||||
if (INVALID_TOKEN_REGEX.test(name)) {
|
||||
throw new TypeError(`${name} is not a legal HTTP header name`);
|
||||
}
|
||||
}
|
||||
|
||||
function validateValue(value: string) {
|
||||
value = `${value}`;
|
||||
if (INVALID_HEADER_CHAR_REGEX.test(value)) {
|
||||
throw new TypeError(`${value} is not a legal HTTP header value`);
|
||||
}
|
||||
}
|
||||
|
||||
function find(
|
||||
map: { [k: string]: string[] },
|
||||
name: string
|
||||
): string | undefined {
|
||||
name = name.toLowerCase();
|
||||
for (const key in map) {
|
||||
if (key.toLowerCase() === name) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
Object.defineProperty(BaseHeaders.prototype, Symbol.toStringTag, {
|
||||
value: 'Headers',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
Object.defineProperties(BaseHeaders.prototype, {
|
||||
append: { enumerable: true },
|
||||
delete: { enumerable: true },
|
||||
entries: { enumerable: true },
|
||||
forEach: { enumerable: true },
|
||||
get: { enumerable: true },
|
||||
has: { enumerable: true },
|
||||
keys: { enumerable: true },
|
||||
raw: { enumerable: false },
|
||||
set: { enumerable: true },
|
||||
values: { enumerable: true },
|
||||
});
|
||||
|
||||
function getHeaders(
|
||||
headers: BaseHeaders,
|
||||
kind: 'key' | 'value' | 'key+value' = 'key+value'
|
||||
) {
|
||||
const fn =
|
||||
kind === 'key'
|
||||
? (key: string) => key.toLowerCase()
|
||||
: kind === 'value'
|
||||
? (key: string) => headers[MAP][key].join(', ')
|
||||
: (key: string) => [key.toLowerCase(), headers[MAP][key].join(', ')];
|
||||
|
||||
return Object.keys(headers[MAP])
|
||||
.sort()
|
||||
.map(key => fn(key));
|
||||
}
|
||||
|
||||
const HeadersIteratorPrototype = Object.setPrototypeOf(
|
||||
{
|
||||
next() {
|
||||
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
||||
throw new TypeError('Value of `this` is not a HeadersIterator');
|
||||
}
|
||||
|
||||
const { target, kind, index } = this[INTERNAL];
|
||||
const values = getHeaders(target, kind);
|
||||
const len = values.length;
|
||||
if (index >= len) {
|
||||
return {
|
||||
value: undefined,
|
||||
done: true,
|
||||
};
|
||||
}
|
||||
|
||||
this[INTERNAL].index = index + 1;
|
||||
|
||||
return {
|
||||
value: values[index],
|
||||
done: false,
|
||||
};
|
||||
},
|
||||
},
|
||||
Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))
|
||||
);
|
||||
|
||||
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
||||
value: 'HeadersIterator',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
export { BaseHeaders as Headers };
|
||||
124
packages/middleware/src/websandbox/spec-compliant/request.ts
Normal file
124
packages/middleware/src/websandbox/spec-compliant/request.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import { Body, cloneBody, extractContentType, getInstanceBody } from './body';
|
||||
import { Headers as BaseHeaders } from './headers';
|
||||
import { notImplemented } from '../utils';
|
||||
|
||||
export const INTERNALS = Symbol('internal request');
|
||||
|
||||
class BaseRequest extends Body implements Request {
|
||||
[INTERNALS]: {
|
||||
credentials: RequestCredentials;
|
||||
headers: Headers;
|
||||
method: string;
|
||||
redirect: RequestRedirect;
|
||||
url: URL;
|
||||
};
|
||||
|
||||
constructor(input: BaseRequest | string, init: RequestInit = {}) {
|
||||
const method = init.method?.toUpperCase() ?? 'GET';
|
||||
|
||||
if (
|
||||
(method === 'GET' || method === 'HEAD') &&
|
||||
(init.body || (input instanceof BaseRequest && getInstanceBody(input)))
|
||||
) {
|
||||
throw new TypeError('Request with GET/HEAD method cannot have body');
|
||||
}
|
||||
|
||||
let inputBody: BodyInit | null = null;
|
||||
if (init.body) {
|
||||
inputBody = init.body;
|
||||
} else if (input instanceof BaseRequest && getInstanceBody(input)) {
|
||||
inputBody = cloneBody(input);
|
||||
}
|
||||
|
||||
super(inputBody);
|
||||
|
||||
const headers = new BaseHeaders(
|
||||
init.headers || getProp(input, 'headers') || {}
|
||||
);
|
||||
if (inputBody !== null) {
|
||||
const contentType = extractContentType(this);
|
||||
if (contentType !== null && !headers.has('Content-Type')) {
|
||||
headers.append('Content-Type', contentType);
|
||||
}
|
||||
}
|
||||
|
||||
this[INTERNALS] = {
|
||||
credentials:
|
||||
init.credentials || getProp(input, 'credentials') || 'same-origin',
|
||||
headers,
|
||||
method,
|
||||
redirect: init.redirect || getProp(input, 'redirect') || 'follow',
|
||||
url: new URL(typeof input === 'string' ? input : input.url),
|
||||
};
|
||||
}
|
||||
|
||||
get url() {
|
||||
return this[INTERNALS].url.toString();
|
||||
}
|
||||
|
||||
get credentials() {
|
||||
return this[INTERNALS].credentials;
|
||||
}
|
||||
|
||||
get method() {
|
||||
return this[INTERNALS].method;
|
||||
}
|
||||
|
||||
get headers() {
|
||||
return this[INTERNALS].headers;
|
||||
}
|
||||
|
||||
get redirect() {
|
||||
return this[INTERNALS].redirect;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
public clone() {
|
||||
return new BaseRequest(this);
|
||||
}
|
||||
|
||||
get cache() {
|
||||
return notImplemented('Request', 'cache');
|
||||
}
|
||||
|
||||
get integrity() {
|
||||
return notImplemented('Request', 'integrity');
|
||||
}
|
||||
|
||||
get keepalive() {
|
||||
return notImplemented('Request', 'keepalive');
|
||||
}
|
||||
|
||||
get mode() {
|
||||
return notImplemented('Request', 'mode');
|
||||
}
|
||||
|
||||
get destination() {
|
||||
return notImplemented('Request', 'destination');
|
||||
}
|
||||
|
||||
get referrer() {
|
||||
return notImplemented('Request', 'referrer');
|
||||
}
|
||||
|
||||
get referrerPolicy() {
|
||||
return notImplemented('Request', 'referrerPolicy');
|
||||
}
|
||||
|
||||
get signal() {
|
||||
return notImplemented('Request', 'signal');
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() {
|
||||
return 'Request';
|
||||
}
|
||||
}
|
||||
|
||||
export { BaseRequest as Request };
|
||||
|
||||
function getProp<K extends keyof BaseRequest>(
|
||||
input: BaseRequest | string,
|
||||
key: K
|
||||
): BaseRequest[K] | undefined {
|
||||
return input instanceof BaseRequest ? input[key] : undefined;
|
||||
}
|
||||
113
packages/middleware/src/websandbox/spec-compliant/response.ts
Normal file
113
packages/middleware/src/websandbox/spec-compliant/response.ts
Normal file
@@ -0,0 +1,113 @@
|
||||
import { Body, BodyInit, cloneBody, extractContentType } from './body';
|
||||
|
||||
const INTERNALS = Symbol('internal response');
|
||||
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
|
||||
|
||||
class BaseResponse extends Body implements Response {
|
||||
[INTERNALS]: {
|
||||
headers: Headers;
|
||||
status: number;
|
||||
statusText: string;
|
||||
type: 'default' | 'error';
|
||||
url?: URL;
|
||||
};
|
||||
|
||||
constructor(body?: BodyInit | null, init?: ResponseInit) {
|
||||
super(body);
|
||||
|
||||
this[INTERNALS] = {
|
||||
headers: new Headers(init?.headers),
|
||||
status: init?.status || 200,
|
||||
statusText: init?.statusText || '',
|
||||
type: 'default',
|
||||
url: init?.url ? new URL(init.url) : undefined,
|
||||
};
|
||||
|
||||
if (this[INTERNALS].status < 200 || this[INTERNALS].status > 599) {
|
||||
throw new RangeError(
|
||||
`Responses may only be constructed with status codes in the range 200 to 599, inclusive.`
|
||||
);
|
||||
}
|
||||
|
||||
if (body !== null && !this[INTERNALS].headers.has('Content-Type')) {
|
||||
const contentType = extractContentType(this);
|
||||
if (contentType) {
|
||||
this[INTERNALS].headers.append('Content-Type', contentType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static redirect(url: string, status = 302) {
|
||||
if (!REDIRECTS.has(status)) {
|
||||
throw new RangeError(
|
||||
'Failed to execute "redirect" on "response": Invalid status code'
|
||||
);
|
||||
}
|
||||
|
||||
return new Response(null, {
|
||||
headers: { Location: url },
|
||||
status,
|
||||
});
|
||||
}
|
||||
|
||||
static error() {
|
||||
const response = new BaseResponse(null, { status: 0, statusText: '' });
|
||||
response[INTERNALS].type = 'error';
|
||||
return response;
|
||||
}
|
||||
|
||||
get url() {
|
||||
return this[INTERNALS].url?.toString() || '';
|
||||
}
|
||||
|
||||
get ok() {
|
||||
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;
|
||||
}
|
||||
|
||||
get status() {
|
||||
return this[INTERNALS].status;
|
||||
}
|
||||
|
||||
get statusText() {
|
||||
return this[INTERNALS].statusText;
|
||||
}
|
||||
|
||||
get headers() {
|
||||
return this[INTERNALS].headers;
|
||||
}
|
||||
|
||||
get redirected() {
|
||||
return (
|
||||
this[INTERNALS].status > 299 &&
|
||||
this[INTERNALS].status < 400 &&
|
||||
this[INTERNALS].headers.has('Location')
|
||||
);
|
||||
}
|
||||
|
||||
get type() {
|
||||
return this[INTERNALS].type;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
clone() {
|
||||
return new BaseResponse(cloneBody(this), {
|
||||
headers: this.headers,
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
url: this.url,
|
||||
});
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() {
|
||||
return 'Response';
|
||||
}
|
||||
}
|
||||
|
||||
export interface ResponseInit {
|
||||
headers?: HeadersInit;
|
||||
status?: number;
|
||||
statusText?: string;
|
||||
url?: string;
|
||||
}
|
||||
|
||||
export { BaseResponse as Response };
|
||||
@@ -0,0 +1,26 @@
|
||||
import { DeprecationError } from '../error';
|
||||
import { FetchEvent } from '../spec-compliant/fetch-event';
|
||||
import { NextRequest } from './request';
|
||||
|
||||
export class NextFetchEvent extends FetchEvent {
|
||||
sourcePage: string;
|
||||
|
||||
constructor(params: { request: NextRequest; page: string }) {
|
||||
//@ts-ignore
|
||||
super(params.request);
|
||||
this.sourcePage = params.page;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
get request() {
|
||||
throw new DeprecationError({
|
||||
page: this.sourcePage,
|
||||
});
|
||||
}
|
||||
|
||||
respondWith() {
|
||||
throw new DeprecationError({
|
||||
page: this.sourcePage,
|
||||
});
|
||||
}
|
||||
}
|
||||
98
packages/middleware/src/websandbox/spec-extension/request.ts
Normal file
98
packages/middleware/src/websandbox/spec-extension/request.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import type { IResult } from 'ua-parser-js';
|
||||
import cookie from 'cookie';
|
||||
import parseua from 'ua-parser-js';
|
||||
import { Request, RequestInit as NodeFetchRequestInit } from 'node-fetch';
|
||||
|
||||
export const INTERNALS = Symbol('internal request');
|
||||
|
||||
export class NextRequest extends Request {
|
||||
[INTERNALS]: {
|
||||
cookieParser(): { [key: string]: string };
|
||||
geo: { city?: string; country?: string; region?: string };
|
||||
ip?: string;
|
||||
page?: { name?: string; params?: { [key: string]: string } };
|
||||
ua?: IResult | null;
|
||||
url: URL;
|
||||
};
|
||||
|
||||
constructor(input: Request | string, init: RequestInit = {}) {
|
||||
//@ts-ignore
|
||||
super(input, init);
|
||||
|
||||
const cookieParser = () => {
|
||||
const value = this.headers.get('cookie');
|
||||
return value ? cookie.parse(value) : {};
|
||||
};
|
||||
|
||||
this[INTERNALS] = {
|
||||
cookieParser,
|
||||
geo: init.geo || {},
|
||||
ip: init.ip,
|
||||
page: init.page,
|
||||
url: new URL(typeof input === 'string' ? input : input.url),
|
||||
};
|
||||
}
|
||||
|
||||
public get cookies() {
|
||||
return this[INTERNALS].cookieParser();
|
||||
}
|
||||
|
||||
public get geo() {
|
||||
return this[INTERNALS].geo;
|
||||
}
|
||||
|
||||
public get ip() {
|
||||
return this[INTERNALS].ip;
|
||||
}
|
||||
|
||||
public get preflight() {
|
||||
return this.headers.get('x-middleware-preflight');
|
||||
}
|
||||
|
||||
public get nextUrl() {
|
||||
return this[INTERNALS].url;
|
||||
}
|
||||
|
||||
public get page() {
|
||||
return {
|
||||
name: this[INTERNALS].page?.name,
|
||||
params: this[INTERNALS].page?.params,
|
||||
};
|
||||
}
|
||||
|
||||
public get ua() {
|
||||
if (typeof this[INTERNALS].ua !== 'undefined') {
|
||||
return this[INTERNALS].ua || undefined;
|
||||
}
|
||||
|
||||
const uaString = this.headers.get('user-agent');
|
||||
if (!uaString) {
|
||||
this[INTERNALS].ua = null;
|
||||
return this[INTERNALS].ua || undefined;
|
||||
}
|
||||
|
||||
this[INTERNALS].ua = {
|
||||
...parseua(uaString),
|
||||
};
|
||||
|
||||
return this[INTERNALS].ua;
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
public get url() {
|
||||
return this[INTERNALS].url.toString();
|
||||
}
|
||||
}
|
||||
|
||||
export interface RequestInit extends NodeFetchRequestInit {
|
||||
geo?: {
|
||||
city?: string;
|
||||
country?: string;
|
||||
region?: string;
|
||||
};
|
||||
ip?: string;
|
||||
page?: {
|
||||
name?: string;
|
||||
params?: { [key: string]: string };
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,97 @@
|
||||
import type { CookieSerializeOptions } from 'cookie';
|
||||
import cookie from 'cookie';
|
||||
import { Response, ResponseInit as NodeFetchResponseInit } from 'node-fetch';
|
||||
|
||||
const INTERNALS = Symbol('internal response');
|
||||
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
|
||||
|
||||
export class SpecResponse extends Response {
|
||||
[INTERNALS]: {
|
||||
cookieParser(): { [key: string]: string };
|
||||
url?: URL;
|
||||
};
|
||||
|
||||
constructor(body?: BodyInit | null, init: ResponseInit = {}) {
|
||||
// TODO - why is this failing?
|
||||
// @ts-ignore
|
||||
super(body, init);
|
||||
|
||||
const cookieParser = () => {
|
||||
const value = this.headers.get('cookie');
|
||||
return value ? cookie.parse(value) : {};
|
||||
};
|
||||
|
||||
this[INTERNALS] = {
|
||||
cookieParser,
|
||||
url: init.url ? new URL(init.url) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
public get cookies() {
|
||||
return this[INTERNALS].cookieParser();
|
||||
}
|
||||
|
||||
public cookie(
|
||||
name: string,
|
||||
value: { [key: string]: any } | string,
|
||||
opts: CookieSerializeOptions = {}
|
||||
) {
|
||||
const val =
|
||||
typeof value === 'object' ? 'j:' + JSON.stringify(value) : String(value);
|
||||
|
||||
if (opts.maxAge) {
|
||||
opts.expires = new Date(Date.now() + opts.maxAge);
|
||||
opts.maxAge /= 1000;
|
||||
}
|
||||
|
||||
if (opts.path == null) {
|
||||
opts.path = '/';
|
||||
}
|
||||
|
||||
this.headers.append(
|
||||
'Set-Cookie',
|
||||
cookie.serialize(name, String(val), opts)
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
public clearCookie(name: string, opts: CookieSerializeOptions = {}) {
|
||||
return this.cookie(name, '', { expires: new Date(1), path: '/', ...opts });
|
||||
}
|
||||
|
||||
static redirect(url: string | URL, status = 302) {
|
||||
if (!REDIRECTS.has(status)) {
|
||||
throw new RangeError(
|
||||
'Failed to execute "redirect" on "response": Invalid status code'
|
||||
);
|
||||
}
|
||||
|
||||
return new SpecResponse(null, {
|
||||
headers: { Location: typeof url === 'string' ? url : url.toString() },
|
||||
status,
|
||||
});
|
||||
}
|
||||
|
||||
static rewrite(destination: string | URL) {
|
||||
return new SpecResponse(null, {
|
||||
headers: {
|
||||
'x-middleware-rewrite':
|
||||
typeof destination === 'string'
|
||||
? destination
|
||||
: destination.toString(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static next() {
|
||||
return new SpecResponse(null, {
|
||||
headers: {
|
||||
'x-middleware-next': '1',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
interface ResponseInit extends NodeFetchResponseInit {
|
||||
url?: string;
|
||||
}
|
||||
26
packages/middleware/src/websandbox/types.ts
Normal file
26
packages/middleware/src/websandbox/types.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
export interface NodeHeaders {
|
||||
[header: string]: string | string[] | undefined;
|
||||
}
|
||||
|
||||
export interface RequestData {
|
||||
geo?: {
|
||||
city?: string;
|
||||
country?: string;
|
||||
region?: string;
|
||||
};
|
||||
headers: NodeHeaders;
|
||||
ip?: string;
|
||||
method: string;
|
||||
page?: {
|
||||
name?: string;
|
||||
params?: { [key: string]: string };
|
||||
};
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface FetchEventResult {
|
||||
response: Response;
|
||||
waitUntil: Promise<any>;
|
||||
}
|
||||
124
packages/middleware/src/websandbox/utils.ts
Normal file
124
packages/middleware/src/websandbox/utils.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import type { NodeHeaders } from './types';
|
||||
import { Headers } from 'node-fetch';
|
||||
|
||||
export async function* streamToIterator<T>(
|
||||
readable: ReadableStream<T>
|
||||
): AsyncIterableIterator<T> {
|
||||
const reader = readable.getReader();
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
if (value) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
reader.releaseLock();
|
||||
}
|
||||
|
||||
export function notImplemented(name: string, method: string): any {
|
||||
throw new Error(
|
||||
`Failed to get the '${method}' property on '${name}': the property is not implemented`
|
||||
);
|
||||
}
|
||||
|
||||
export function fromNodeHeaders(object: NodeHeaders): Headers {
|
||||
const headers = new Headers();
|
||||
for (const [key, value] of Object.entries(object)) {
|
||||
const values = Array.isArray(value) ? value : [value];
|
||||
for (const v of values) {
|
||||
if (v !== undefined) {
|
||||
headers.append(key, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
export function toNodeHeaders(headers?: Headers): NodeHeaders {
|
||||
const result: NodeHeaders = {};
|
||||
if (headers) {
|
||||
for (const [key, value] of headers.entries()) {
|
||||
result[key] = value;
|
||||
if (key.toLowerCase() === 'set-cookie') {
|
||||
result[key] = splitCookiesString(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
Set-Cookie header field-values are sometimes comma joined in one string. This splits them without choking on commas
|
||||
that are within a single set-cookie field-value, such as in the Expires portion.
|
||||
This is uncommon, but explicitly allowed - see https://tools.ietf.org/html/rfc2616#section-4.2
|
||||
Node.js does this for every header *except* set-cookie - see https://github.com/nodejs/node/blob/d5e363b77ebaf1caf67cd7528224b651c86815c1/lib/_http_incoming.js#L128
|
||||
React Native's fetch does this for *every* header, including set-cookie.
|
||||
|
||||
Based on: https://github.com/google/j2objc/commit/16820fdbc8f76ca0c33472810ce0cb03d20efe25
|
||||
Credits to: https://github.com/tomball for original and https://github.com/chrusart for JavaScript implementation
|
||||
*/
|
||||
export function splitCookiesString(cookiesString: string) {
|
||||
const cookiesStrings = [];
|
||||
let pos = 0;
|
||||
let start;
|
||||
let ch;
|
||||
let lastComma;
|
||||
let nextStart;
|
||||
let cookiesSeparatorFound;
|
||||
|
||||
function skipWhitespace() {
|
||||
while (pos < cookiesString.length && /\s/.test(cookiesString.charAt(pos))) {
|
||||
pos += 1;
|
||||
}
|
||||
return pos < cookiesString.length;
|
||||
}
|
||||
|
||||
function notSpecialChar() {
|
||||
ch = cookiesString.charAt(pos);
|
||||
|
||||
return ch !== '=' && ch !== ';' && ch !== ',';
|
||||
}
|
||||
|
||||
while (pos < cookiesString.length) {
|
||||
start = pos;
|
||||
cookiesSeparatorFound = false;
|
||||
|
||||
while (skipWhitespace()) {
|
||||
ch = cookiesString.charAt(pos);
|
||||
if (ch === ',') {
|
||||
// ',' is a cookie separator if we have later first '=', not ';' or ','
|
||||
lastComma = pos;
|
||||
pos += 1;
|
||||
|
||||
skipWhitespace();
|
||||
nextStart = pos;
|
||||
|
||||
while (pos < cookiesString.length && notSpecialChar()) {
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
// currently special character
|
||||
if (pos < cookiesString.length && cookiesString.charAt(pos) === '=') {
|
||||
// we found cookies separator
|
||||
cookiesSeparatorFound = true;
|
||||
// pos is inside the next cookie, so back up and return it.
|
||||
pos = nextStart;
|
||||
cookiesStrings.push(cookiesString.substring(start, lastComma));
|
||||
start = pos;
|
||||
} else {
|
||||
// in param ',' or param separator ';',
|
||||
// we continue from that comma
|
||||
pos = lastComma + 1;
|
||||
}
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (!cookiesSeparatorFound || pos >= cookiesString.length) {
|
||||
cookiesStrings.push(cookiesString.substring(start, cookiesString.length));
|
||||
}
|
||||
}
|
||||
|
||||
return cookiesStrings;
|
||||
}
|
||||
21
packages/middleware/test/__snapshots__/build.test.ts.snap
Normal file
21
packages/middleware/test/__snapshots__/build.test.ts.snap
Normal file
@@ -0,0 +1,21 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`build() should build simple middleware 1`] = `
|
||||
Object {
|
||||
"middleware": Object {
|
||||
"/": Object {
|
||||
"env": Array [],
|
||||
"files": Array [
|
||||
"server/pages/_middleware.js",
|
||||
],
|
||||
"name": "pages/_middleware",
|
||||
"page": "/",
|
||||
"regexp": "^/.*$",
|
||||
},
|
||||
},
|
||||
"sortedMiddleware": Array [
|
||||
"/",
|
||||
],
|
||||
"version": 1,
|
||||
}
|
||||
`;
|
||||
57
packages/middleware/test/build.test.ts
vendored
Normal file
57
packages/middleware/test/build.test.ts
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
import { join } from 'path';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { build } from '../src';
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
describe('build()', () => {
|
||||
beforeEach(() => {
|
||||
//@ts-ignore
|
||||
global.Response = Response;
|
||||
});
|
||||
afterEach(() => {
|
||||
//@ts-ignore
|
||||
delete global.Response;
|
||||
//@ts-ignore
|
||||
delete global._ENTRIES;
|
||||
});
|
||||
it('should build simple middleware', async () => {
|
||||
const fixture = join(__dirname, 'fixtures/simple');
|
||||
await build({
|
||||
workPath: fixture,
|
||||
});
|
||||
|
||||
const middlewareManifest = JSON.parse(
|
||||
await fsp.readFile(
|
||||
join(fixture, '.output/server/middleware-manifest.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
expect(middlewareManifest).toMatchSnapshot();
|
||||
|
||||
const outputFile = join(fixture, '.output/server/pages/_middleware.js');
|
||||
expect(await fsp.stat(outputFile)).toBeTruthy();
|
||||
|
||||
require(outputFile);
|
||||
//@ts-ignore
|
||||
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
|
||||
expect(typeof middleware).toStrictEqual('function');
|
||||
const handledResponse = await middleware({
|
||||
request: {
|
||||
url: 'http://google.com',
|
||||
},
|
||||
});
|
||||
const unhandledResponse = await middleware({
|
||||
request: {
|
||||
url: 'literallyanythingelse',
|
||||
},
|
||||
});
|
||||
expect(String(handledResponse.response.body)).toEqual('Hi from the edge!');
|
||||
expect(
|
||||
(handledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual(null);
|
||||
expect(unhandledResponse.response.body).toEqual(null);
|
||||
expect(
|
||||
(unhandledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual('1');
|
||||
});
|
||||
});
|
||||
5
packages/middleware/test/fixtures/simple/_middleware.js
vendored
Normal file
5
packages/middleware/test/fixtures/simple/_middleware.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export default req => {
|
||||
if (req.url === 'http://google.com') {
|
||||
return new Response('Hi from the edge!');
|
||||
}
|
||||
};
|
||||
4
packages/middleware/test/tsconfig.json
vendored
Normal file
4
packages/middleware/test/tsconfig.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["*.test.ts"]
|
||||
}
|
||||
16
packages/middleware/tsconfig.json
Normal file
16
packages/middleware/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext", "dom", "dom.iterable"],
|
||||
"target": "es2018",
|
||||
"module": "commonjs",
|
||||
"outDir": "dist",
|
||||
"sourceMap": false,
|
||||
"declaration": true,
|
||||
"moduleResolution": "node",
|
||||
"typeRoots": ["./@types", "./node_modules/@types"]
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
const { parse } = require('url');
|
||||
const { parse, pathToFileURL } = require('url');
|
||||
const { createServer, Server } = require('http');
|
||||
const { isAbsolute } = require('path');
|
||||
const { Bridge } = require('./bridge.js');
|
||||
|
||||
/**
|
||||
@@ -15,8 +16,9 @@ function makeVercelLauncher(config) {
|
||||
shouldAddSourcemapSupport = false,
|
||||
} = config;
|
||||
return `
|
||||
const { parse } = require('url');
|
||||
const { parse, pathToFileURL } = require('url');
|
||||
const { createServer, Server } = require('http');
|
||||
const { isAbsolute } = require('path');
|
||||
const { Bridge } = require(${JSON.stringify(bridgePath)});
|
||||
${
|
||||
shouldAddSourcemapSupport
|
||||
@@ -60,13 +62,15 @@ function getVercelLauncher({
|
||||
process.env.NODE_ENV = region === 'dev1' ? 'development' : 'production';
|
||||
}
|
||||
|
||||
async function getListener() {
|
||||
/**
|
||||
* @param {string} p - entrypointPath
|
||||
*/
|
||||
async function getListener(p) {
|
||||
let listener = useRequire
|
||||
? require(entrypointPath)
|
||||
: await import(entrypointPath);
|
||||
? require(p)
|
||||
: await import(isAbsolute(p) ? pathToFileURL(p).href : p);
|
||||
|
||||
// In some cases we might have nested default props
|
||||
// due to TS => JS
|
||||
// In some cases we might have nested default props due to TS => JS
|
||||
for (let i = 0; i < 5; i++) {
|
||||
if (listener.default) listener = listener.default;
|
||||
}
|
||||
@@ -74,7 +78,7 @@ function getVercelLauncher({
|
||||
return listener;
|
||||
}
|
||||
|
||||
getListener()
|
||||
getListener(entrypointPath)
|
||||
.then(listener => {
|
||||
if (typeof listener.listen === 'function') {
|
||||
Server.prototype.listen = originalListen;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node-bridge",
|
||||
"version": "2.1.1-canary.1",
|
||||
"version": "2.1.1-canary.2",
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"repository": {
|
||||
|
||||
@@ -33,14 +33,6 @@ async function main() {
|
||||
await fs.remove(symlinkTarget);
|
||||
await fs.symlink('symlinked-asset', symlinkTarget);
|
||||
|
||||
// Use types.d.ts as the main types export
|
||||
await Promise.all(
|
||||
(await fs.readdir(outDir))
|
||||
.filter(p => p.endsWith('.d.ts') && p !== 'types.d.ts')
|
||||
.map(p => fs.remove(join(outDir, p)))
|
||||
);
|
||||
await fs.rename(join(outDir, 'types.d.ts'), join(outDir, 'index.d.ts'));
|
||||
|
||||
// Bundle helpers.ts with ncc
|
||||
await fs.remove(join(outDir, 'helpers.js'));
|
||||
const helpersDir = join(outDir, 'helpers');
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "1.12.2-canary.4",
|
||||
"version": "1.12.2-canary.7",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -34,7 +34,7 @@
|
||||
"@types/test-listen": "1.1.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.14.0",
|
||||
"@vercel/node-bridge": "2.1.1-canary.1",
|
||||
"@vercel/node-bridge": "2.1.1-canary.2",
|
||||
"content-type": "1.0.4",
|
||||
"cookie": "0.4.0",
|
||||
"etag": "1.8.1",
|
||||
|
||||
@@ -339,6 +339,9 @@ function getAWSLambdaHandler(entrypoint: string, config: Config) {
|
||||
return '';
|
||||
}
|
||||
|
||||
// Ensures that everything from `types.ts` is exported in the final `index.d.ts` file.
|
||||
export * from './types';
|
||||
|
||||
export const version = 3;
|
||||
|
||||
export async function build({
|
||||
|
||||
@@ -16,7 +16,7 @@ const init = async () => {
|
||||
console.log('Hapi server running on %s', server.info.uri);
|
||||
};
|
||||
|
||||
process.on('unhandledRejection', (err) => {
|
||||
process.on('unhandledRejection', err => {
|
||||
console.log('Hapi failed in an unexpected way');
|
||||
console.log(err);
|
||||
process.exit(1);
|
||||
|
||||
@@ -3,10 +3,10 @@ const path = require('path');
|
||||
|
||||
module.exports = (req, resp) => {
|
||||
const asset1 = fs.readFileSync(
|
||||
path.join(__dirname, 'subdirectory1/asset.txt'),
|
||||
path.join(__dirname, 'subdirectory1/asset.txt')
|
||||
);
|
||||
const asset2 = fs.readFileSync(
|
||||
path.join(__dirname, 'subdirectory2/asset.txt'),
|
||||
path.join(__dirname, 'subdirectory2/asset.txt')
|
||||
);
|
||||
resp.end(`${asset1},${asset2}`);
|
||||
};
|
||||
|
||||
@@ -8,8 +8,8 @@ const typeDefs = `
|
||||
|
||||
const resolvers = {
|
||||
Query: {
|
||||
hello: (_, { name }) => `Hello ${name || "world"}`
|
||||
}
|
||||
hello: (_, { name }) => `Hello ${name || 'world'}`,
|
||||
},
|
||||
};
|
||||
|
||||
const lambda = new GraphQLServerLambda({
|
||||
|
||||
27
packages/plugin-go/package.json
Normal file
27
packages/plugin-go/package.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-go",
|
||||
"version": "1.0.0-canary.5",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
"directory": "packages/vercel-plugin-go"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.20",
|
||||
"@vercel/go": "1.2.4-canary.4"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "*",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
}
|
||||
6
packages/plugin-go/src/index.ts
Normal file
6
packages/plugin-go/src/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { convertRuntimeToPlugin } from '@vercel/build-utils';
|
||||
import * as go from '@vercel/go';
|
||||
|
||||
export const build = convertRuntimeToPlugin(go.build, '.go');
|
||||
|
||||
export const startDevServer = go.startDevServer;
|
||||
17
packages/plugin-go/tsconfig.json
Normal file
17
packages/plugin-go/tsconfig.json
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user