mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
268 Commits
@now/pytho
...
@now/php-b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d8a5da6a3e | ||
|
|
48f7b72bb2 | ||
|
|
8e2d5de446 | ||
|
|
2b3efb06be | ||
|
|
13e57bf68f | ||
|
|
dc3f112d4f | ||
|
|
da1c423033 | ||
|
|
5e1d58f0e8 | ||
|
|
8940f7fa33 | ||
|
|
0aeecd81d3 | ||
|
|
bd4cb3d2a0 | ||
|
|
b3d14f536d | ||
|
|
445d4d84cb | ||
|
|
30d4ec8cbd | ||
|
|
51355c263c | ||
|
|
ec6d695f24 | ||
|
|
da910dc097 | ||
|
|
00cb55f953 | ||
|
|
54ee557187 | ||
|
|
e9a49f658b | ||
|
|
f3484bc7c9 | ||
|
|
f76d200fd2 | ||
|
|
24ba90bfeb | ||
|
|
9f0fdada18 | ||
|
|
a987c4b298 | ||
|
|
a746adda93 | ||
|
|
6a00f02137 | ||
|
|
65558f8197 | ||
|
|
a8bf77091f | ||
|
|
ee179b9b52 | ||
|
|
b480b07cce | ||
|
|
fc8452abfd | ||
|
|
48b6d0ebfc | ||
|
|
a3d6cea3c6 | ||
|
|
8a61b1b513 | ||
|
|
50e648d28a | ||
|
|
52994bfe26 | ||
|
|
1339f17585 | ||
|
|
9dd12cf1a7 | ||
|
|
6dab09f38e | ||
|
|
c79d7be591 | ||
|
|
9af3425d6d | ||
|
|
0700c16504 | ||
|
|
4e55d9f709 | ||
|
|
945eb24bdc | ||
|
|
c884102401 | ||
|
|
36e79efd7f | ||
|
|
21ee0f3707 | ||
|
|
ea5d3b8e80 | ||
|
|
301e0f216b | ||
|
|
7a6fbd8c3d | ||
|
|
77e7a0f502 | ||
|
|
6bc42bbce9 | ||
|
|
de88969c46 | ||
|
|
e86cd38787 | ||
|
|
dc1badc931 | ||
|
|
ed3c176f5c | ||
|
|
749ee5264c | ||
|
|
9808ea1d8f | ||
|
|
a77e7109c7 | ||
|
|
3b87c7ca83 | ||
|
|
1887df779a | ||
|
|
daccd0d8fc | ||
|
|
fc9bbd2578 | ||
|
|
f23f6ca643 | ||
|
|
c8d90fbcd1 | ||
|
|
f4247da49a | ||
|
|
9d781403ef | ||
|
|
ca188cf8e2 | ||
|
|
207d895c0c | ||
|
|
685821976d | ||
|
|
fef5638cb9 | ||
|
|
073ed247ad | ||
|
|
f071788ce6 | ||
|
|
16f24bc3c8 | ||
|
|
97fe3d489d | ||
|
|
522d3a530c | ||
|
|
bafb49c464 | ||
|
|
7d5bd91e23 | ||
|
|
213614881c | ||
|
|
a225a4f855 | ||
|
|
ed2fd1dd29 | ||
|
|
bd33528fc7 | ||
|
|
16969803f8 | ||
|
|
03cc4c0b01 | ||
|
|
0b9699da75 | ||
|
|
6737011a63 | ||
|
|
6d2b0e014c | ||
|
|
409359bfec | ||
|
|
2151812596 | ||
|
|
22860be6d0 | ||
|
|
78c3cbd7b4 | ||
|
|
a458a55e99 | ||
|
|
911d85be39 | ||
|
|
98b5a4b0e9 | ||
|
|
5f80e451b8 | ||
|
|
0288f2d1a3 | ||
|
|
e39a5eca04 | ||
|
|
d4493f7d39 | ||
|
|
145e5a10c2 | ||
|
|
bd2d289252 | ||
|
|
a673e5f752 | ||
|
|
b2dc31a6b4 | ||
|
|
62a308bed7 | ||
|
|
ac08bfd26f | ||
|
|
d7f1371799 | ||
|
|
c97ad02aca | ||
|
|
c0460b734d | ||
|
|
3b0ed55b57 | ||
|
|
402153f076 | ||
|
|
6ec823e292 | ||
|
|
a9af9ebb5a | ||
|
|
ce88a64693 | ||
|
|
490cd8363e | ||
|
|
71d1651797 | ||
|
|
0da7197c3e | ||
|
|
950a4e98e9 | ||
|
|
8258ede23f | ||
|
|
77f84fe2aa | ||
|
|
5c4b946864 | ||
|
|
dfc51ad97f | ||
|
|
d32afc8332 | ||
|
|
9d1263ccc2 | ||
|
|
7bf2cfb3dc | ||
|
|
9b37460c4f | ||
|
|
b7f8b37ca6 | ||
|
|
13aa1b2d1c | ||
|
|
92437c075e | ||
|
|
331c263587 | ||
|
|
7d4f6f636b | ||
|
|
5e90ef8e34 | ||
|
|
4885d680a7 | ||
|
|
97cbe0b894 | ||
|
|
301eea90ee | ||
|
|
ea4f9dd930 | ||
|
|
38928ab942 | ||
|
|
bfb67d10ec | ||
|
|
616bad8a3d | ||
|
|
e026ddf805 | ||
|
|
bec9ea101f | ||
|
|
54f3f755fb | ||
|
|
5b03109ba7 | ||
|
|
7ff9e810ff | ||
|
|
3036aff45e | ||
|
|
c366aa69a4 | ||
|
|
c8d225522d | ||
|
|
8ee5063669 | ||
|
|
9372e70747 | ||
|
|
4a4bd550a1 | ||
|
|
f53343d547 | ||
|
|
e4ed811b53 | ||
|
|
e9935dee31 | ||
|
|
2e1e6bb131 | ||
|
|
4a01ac4bd0 | ||
|
|
bd1a7c428f | ||
|
|
9a4a3dac47 | ||
|
|
4f2c35a0ee | ||
|
|
672df5d026 | ||
|
|
8cb648abc4 | ||
|
|
74f658c634 | ||
|
|
efbb54a232 | ||
|
|
3e2bd03e01 | ||
|
|
8dc92b70b9 | ||
|
|
4267be4e5a | ||
|
|
43ba6459eb | ||
|
|
8c5638915d | ||
|
|
3fab247c15 | ||
|
|
6ab0e2e9ab | ||
|
|
34369148d7 | ||
|
|
662ad1ed3a | ||
|
|
890cd74ee5 | ||
|
|
7ef616b31e | ||
|
|
bebcfa4bb5 | ||
|
|
25100c53aa | ||
|
|
fe20da87e7 | ||
|
|
18cb147c86 | ||
|
|
9c9e18586f | ||
|
|
0cd7192740 | ||
|
|
a2d9c4fb4b | ||
|
|
02fafd2ebc | ||
|
|
42577c915c | ||
|
|
73db9e11dd | ||
|
|
3125125c16 | ||
|
|
5335291408 | ||
|
|
36620559f9 | ||
|
|
360ea3a609 | ||
|
|
1cd362126c | ||
|
|
ae19fe95f6 | ||
|
|
3e34d402a2 | ||
|
|
cc7b97fbbb | ||
|
|
c1049985af | ||
|
|
214388ccf3 | ||
|
|
b1d6b7bfc0 | ||
|
|
ece3564dfd | ||
|
|
a88af1f077 | ||
|
|
d92f7b26c0 | ||
|
|
52198af750 | ||
|
|
d58bff2453 | ||
|
|
8c0a144ae4 | ||
|
|
106e4d5f36 | ||
|
|
66c28bd695 | ||
|
|
55e75296ff | ||
|
|
36cbb36737 | ||
|
|
978ca328ef | ||
|
|
7b383e0f7c | ||
|
|
faa5ab36aa | ||
|
|
c0a21969dd | ||
|
|
73d0a1723f | ||
|
|
7c515544ae | ||
|
|
b53c9a6299 | ||
|
|
35ff11e6e4 | ||
|
|
64ee4905cd | ||
|
|
e50dd7e50a | ||
|
|
6101ba9d95 | ||
|
|
8dc0c92c58 | ||
|
|
44c9f3765a | ||
|
|
92c05ca338 | ||
|
|
069b557906 | ||
|
|
692a0df909 | ||
|
|
aeafeb5441 | ||
|
|
a09d5fb355 | ||
|
|
d8017aa9aa | ||
|
|
702f56b9b5 | ||
|
|
183b117152 | ||
|
|
75b3fb4981 | ||
|
|
49e63de5fe | ||
|
|
4742cd32f2 | ||
|
|
377b73105d | ||
|
|
a5577efb3d | ||
|
|
2ec46dc5c9 | ||
|
|
42708ed93c | ||
|
|
2fabe95f6e | ||
|
|
ac1a3dab22 | ||
|
|
ad4011512d | ||
|
|
9ff1a25c8f | ||
|
|
8039b3d377 | ||
|
|
dd9017475c | ||
|
|
031499014f | ||
|
|
2a68d2a2ad | ||
|
|
31299fae6e | ||
|
|
4bac0db379 | ||
|
|
95e7d459d3 | ||
|
|
dd120b8d20 | ||
|
|
b6975676e5 | ||
|
|
a7951dae81 | ||
|
|
b0c918f7fb | ||
|
|
df54dc7dc9 | ||
|
|
0dd801ff6c | ||
|
|
398743ef95 | ||
|
|
337c74b81b | ||
|
|
680bb82ec3 | ||
|
|
17ed5411e3 | ||
|
|
d9bbcb6939 | ||
|
|
800e4de76f | ||
|
|
864dd468d9 | ||
|
|
ba833871bb | ||
|
|
e732bac78e | ||
|
|
28ea4015b4 | ||
|
|
a93d97cabd | ||
|
|
67f39f7c9b | ||
|
|
acd793b9e9 | ||
|
|
f74d61279d | ||
|
|
fcb8eacec0 | ||
|
|
c8fca2ba72 | ||
|
|
4feffa13eb | ||
|
|
3e330b25f4 | ||
|
|
9b2cae33af | ||
|
|
4b6371530c |
12
.circleci/build.sh
Executable file
12
.circleci/build.sh
Executable file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
circleci_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||||
|
needs_build="$(grep -rn '"build"' packages/*/package.json | cut -d: -f1)"
|
||||||
|
|
||||||
|
for pkg in $needs_build; do
|
||||||
|
dir="$(dirname "$pkg")"
|
||||||
|
cd "$circleci_dir/../$dir"
|
||||||
|
echo "Building \`$dir\`"
|
||||||
|
yarn build
|
||||||
|
done
|
||||||
@@ -20,21 +20,21 @@ jobs:
|
|||||||
- run:
|
- run:
|
||||||
name: Bootstrapping
|
name: Bootstrapping
|
||||||
command: yarn bootstrap
|
command: yarn bootstrap
|
||||||
|
- run:
|
||||||
|
name: Building
|
||||||
|
command: ./.circleci/build.sh
|
||||||
- run:
|
- run:
|
||||||
name: Linting
|
name: Linting
|
||||||
command: yarn lint
|
command: yarn lint
|
||||||
- run:
|
- run:
|
||||||
name: Tests
|
name: Tests and Coverage
|
||||||
command: yarn test
|
command: yarn test-coverage
|
||||||
- run:
|
- run:
|
||||||
name: Potentially save npm token
|
name: Potentially save npm token
|
||||||
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
||||||
- run:
|
- run:
|
||||||
name: Potentially publish canary release
|
name: Potentially publish releases to npm
|
||||||
command: "if ls ~/.npmrc >/dev/null 2>&1 && [[ $(git describe --exact-match 2> /dev/null || :) =~ -canary ]]; then yarn run lerna publish from-git --npm-tag canary --yes; else echo \"Did not publish\"; fi"
|
command: ./.circleci/publish.sh
|
||||||
- run:
|
|
||||||
name: Potentially publish stable release
|
|
||||||
command: "if ls ~/.npmrc >/dev/null 2>&1 && [[ ! $(git describe --exact-match 2> /dev/null || :) =~ -canary ]]; then yarn run lerna publish from-git --yes; else echo \"Did not publish\"; fi"
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
build-and-deploy:
|
build-and-deploy:
|
||||||
|
|||||||
24
.circleci/publish.sh
Executable file
24
.circleci/publish.sh
Executable file
@@ -0,0 +1,24 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [ ! -e ~/.npmrc ]; then
|
||||||
|
echo "~/.npmrc file does not exist, skipping publish"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
npm_tag=""
|
||||||
|
tag="$(git describe --tags --exact-match 2> /dev/null || :)"
|
||||||
|
|
||||||
|
if [ -z "$tag" ]; then
|
||||||
|
echo "Not a tagged commit, skipping publish"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$tag" =~ -canary ]]; then
|
||||||
|
echo "Publishing canary release"
|
||||||
|
npm_tag="--npm-tag canary"
|
||||||
|
else
|
||||||
|
echo "Publishing stable release"
|
||||||
|
fi
|
||||||
|
|
||||||
|
yarn run lerna publish from-git $npm_tag --yes
|
||||||
@@ -1,3 +1,10 @@
|
|||||||
/tmp/*
|
/tmp/*
|
||||||
/node_modules/*
|
/node_modules/*
|
||||||
/**/node_modules/*
|
/**/node_modules/*
|
||||||
|
/packages/now-go/go/*
|
||||||
|
/packages/now-build-utils/dist/*
|
||||||
|
/packages/now-node/dist/*
|
||||||
|
/packages/now-next/dist/*
|
||||||
|
/packages/now-node-bridge/*
|
||||||
|
/packages/now-python/*
|
||||||
|
/packages/now-optipng/dist/*
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
},
|
},
|
||||||
"overrides": [
|
"overrides": [
|
||||||
{
|
{
|
||||||
"files": ["test/**"],
|
"files": ["**/test/**"],
|
||||||
"rules": {
|
"rules": {
|
||||||
"import/no-extraneous-dependencies": 0
|
"import/no-extraneous-dependencies": 0
|
||||||
},
|
},
|
||||||
|
|||||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,3 +1,4 @@
|
|||||||
node_modules
|
node_modules
|
||||||
tmp
|
tmp
|
||||||
target/
|
target/
|
||||||
|
.next
|
||||||
4
.prettierrc.json
Normal file
4
.prettierrc.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5"
|
||||||
|
}
|
||||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"eslint.enable": false
|
||||||
|
}
|
||||||
@@ -34,10 +34,10 @@ Serverless:
|
|||||||
|
|
||||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||||
|
|
||||||
1. Serverless Next.js requires Next.js 8 or later, currently this version is out on the `canary` release channel:
|
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install next@canary
|
npm install next --save
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Add the `now-build` script to your `package.json`
|
2. Add the `now-build` script to your `package.json`
|
||||||
|
|||||||
@@ -8,10 +8,10 @@ This error occurs when you have your application is not configured for Serverles
|
|||||||
|
|
||||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||||
|
|
||||||
1. Serverless Next.js requires Next.js 8 or later, currently this version is out on the `canary` release channel:
|
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install next@canary
|
npm install next --save
|
||||||
```
|
```
|
||||||
|
|
||||||
2. Add the `now-build` script to your `package.json`
|
2. Add the `now-build` script to your `package.json`
|
||||||
@@ -29,11 +29,13 @@ npm install next@canary
|
|||||||
```js
|
```js
|
||||||
module.exports = {
|
module.exports = {
|
||||||
target: 'serverless'
|
target: 'serverless'
|
||||||
// Other options are still valid
|
// Other options
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||||
|
|
||||||
|
5. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||||
|
|
||||||
```js
|
```js
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -1,4 +1,8 @@
|
|||||||
module.exports = {
|
module.exports = {
|
||||||
testEnvironment: 'node',
|
testEnvironment: 'node',
|
||||||
collectCoverageFrom: ['packages/**/*.{js,jsx}', '!**/node_modules/**'],
|
collectCoverageFrom: [
|
||||||
|
'packages/(!test)/**/*.{js,jsx}',
|
||||||
|
'!**/node_modules/**',
|
||||||
|
'!**/test/**',
|
||||||
|
],
|
||||||
};
|
};
|
||||||
|
|||||||
17
package.json
17
package.json
@@ -14,16 +14,23 @@
|
|||||||
"bootstrap": "lerna bootstrap",
|
"bootstrap": "lerna bootstrap",
|
||||||
"publish-stable": "lerna version",
|
"publish-stable": "lerna version",
|
||||||
"publish-canary": "lerna version prerelease --preid canary",
|
"publish-canary": "lerna version prerelease --preid canary",
|
||||||
"lint": "tsc && eslint .",
|
"build": "./.circleci/build.sh",
|
||||||
"test": "jest --runInBand",
|
"lint": "eslint .",
|
||||||
|
"codecov": "codecov",
|
||||||
|
"test": "jest --runInBand --verbose",
|
||||||
|
"test-coverage": "jest --runInBand --verbose --coverage --globals \"{\\\"coverage\\\":true}\" && codecov",
|
||||||
"lint-staged": "lint-staged"
|
"lint-staged": "lint-staged"
|
||||||
},
|
},
|
||||||
"pre-commit": "lint-staged",
|
"pre-commit": "lint-staged",
|
||||||
"lint-staged": {
|
"lint-staged": {
|
||||||
"*.js": [
|
"*.js": [
|
||||||
"prettier --write --single-quote",
|
"prettier --write",
|
||||||
"eslint --fix",
|
"eslint --fix",
|
||||||
"git add"
|
"git add"
|
||||||
|
],
|
||||||
|
"*.ts": [
|
||||||
|
"prettier --write",
|
||||||
|
"git add"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -33,6 +40,7 @@
|
|||||||
"@types/node": "^10.12.8",
|
"@types/node": "^10.12.8",
|
||||||
"async-retry": "1.2.3",
|
"async-retry": "1.2.3",
|
||||||
"buffer-replace": "^1.0.0",
|
"buffer-replace": "^1.0.0",
|
||||||
|
"codecov": "^3.2.0",
|
||||||
"eslint": "^5.9.0",
|
"eslint": "^5.9.0",
|
||||||
"eslint-config-airbnb-base": "^13.1.0",
|
"eslint-config-airbnb-base": "^13.1.0",
|
||||||
"eslint-config-prettier": "^3.1.0",
|
"eslint-config-prettier": "^3.1.0",
|
||||||
@@ -43,7 +51,6 @@
|
|||||||
"lint-staged": "^8.0.4",
|
"lint-staged": "^8.0.4",
|
||||||
"node-fetch": "^2.3.0",
|
"node-fetch": "^2.3.0",
|
||||||
"pre-commit": "^1.2.2",
|
"pre-commit": "^1.2.2",
|
||||||
"prettier": "^1.15.2",
|
"prettier": "^1.15.2"
|
||||||
"typescript": "^3.1.6"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,9 @@ export IMPORT_CACHE="$LAMBDA_TASK_ROOT/.import-cache"
|
|||||||
export PATH="$IMPORT_CACHE/bin:$PATH"
|
export PATH="$IMPORT_CACHE/bin:$PATH"
|
||||||
|
|
||||||
# Load `import` and runtime
|
# Load `import` and runtime
|
||||||
|
# shellcheck disable=SC1090
|
||||||
. "$(which import)"
|
. "$(which import)"
|
||||||
|
# shellcheck disable=SC1090
|
||||||
. "$IMPORT_CACHE/runtime.sh"
|
. "$IMPORT_CACHE/runtime.sh"
|
||||||
|
|
||||||
# Load user code and process events in a loop forever
|
# Load user code and process events in a loop forever
|
||||||
|
|||||||
@@ -12,9 +12,10 @@ exports.config = {
|
|||||||
|
|
||||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||||
|
|
||||||
exports.build = async ({ files, entrypoint, config }) => {
|
exports.build = async ({
|
||||||
|
workPath, files, entrypoint, config,
|
||||||
|
}) => {
|
||||||
const srcDir = await getWritableDirectory();
|
const srcDir = await getWritableDirectory();
|
||||||
const workDir = await getWritableDirectory();
|
|
||||||
|
|
||||||
console.log('downloading files...');
|
console.log('downloading files...');
|
||||||
await download(files, srcDir);
|
await download(files, srcDir);
|
||||||
@@ -24,7 +25,7 @@ exports.build = async ({ files, entrypoint, config }) => {
|
|||||||
return o;
|
return o;
|
||||||
}, {});
|
}, {});
|
||||||
|
|
||||||
const IMPORT_CACHE = `${workDir}/.import-cache`;
|
const IMPORT_CACHE = `${workPath}/.import-cache`;
|
||||||
const env = Object.assign({}, process.env, configEnv, {
|
const env = Object.assign({}, process.env, configEnv, {
|
||||||
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
||||||
IMPORT_CACHE,
|
IMPORT_CACHE,
|
||||||
@@ -37,12 +38,12 @@ exports.build = async ({ files, entrypoint, config }) => {
|
|||||||
|
|
||||||
await execa(builderPath, [entrypoint], {
|
await execa(builderPath, [entrypoint], {
|
||||||
env,
|
env,
|
||||||
cwd: workDir,
|
cwd: workPath,
|
||||||
stdio: 'inherit',
|
stdio: 'inherit',
|
||||||
});
|
});
|
||||||
|
|
||||||
const lambda = await createLambda({
|
const lambda = await createLambda({
|
||||||
files: await glob('**', workDir),
|
files: await glob('**', workPath),
|
||||||
handler: entrypoint, // not actually used in `bootstrap`
|
handler: entrypoint, // not actually used in `bootstrap`
|
||||||
runtime: 'provided',
|
runtime: 'provided',
|
||||||
environment: Object.assign({}, configEnv, {
|
environment: Object.assign({}, configEnv, {
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/bash",
|
"name": "@now/bash",
|
||||||
"version": "0.1.2-canary.0",
|
"version": "0.2.0",
|
||||||
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
||||||
"main": "index.js",
|
"main": "index.js",
|
||||||
"author": "Nathan Rajlich <nate@zeit.co>",
|
"author": "Nathan Rajlich <nate@zeit.co>",
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
#!/bin/bash
|
||||||
import "static-binaries@1.0.0"
|
import "static-binaries@1.0.0"
|
||||||
static_binaries jq
|
static_binaries jq
|
||||||
|
|
||||||
@@ -13,12 +14,14 @@ _lambda_runtime_api() {
|
|||||||
|
|
||||||
_lambda_runtime_init() {
|
_lambda_runtime_init() {
|
||||||
# Initialize user code
|
# Initialize user code
|
||||||
|
# shellcheck disable=SC1090
|
||||||
. "$SCRIPT_FILENAME" || {
|
. "$SCRIPT_FILENAME" || {
|
||||||
local exit_code="$?"
|
local exit_code="$?"
|
||||||
local error
|
local error_message="Initialization failed for '$SCRIPT_FILENAME' (exit code $exit_code)"
|
||||||
error='{"exitCode":'"$exit_code"'}'
|
echo "$error_message" >&2
|
||||||
|
local error='{"errorMessage":"'"$error_message"'"}'
|
||||||
_lambda_runtime_api "init/error" -X POST -d "$error"
|
_lambda_runtime_api "init/error" -X POST -d "$error"
|
||||||
exit "$EXIT_CODE"
|
exit "$exit_code"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Process events
|
# Process events
|
||||||
@@ -45,18 +48,17 @@ _lambda_runtime_next() {
|
|||||||
local body
|
local body
|
||||||
body="$(mktemp)"
|
body="$(mktemp)"
|
||||||
|
|
||||||
local exit_code=0
|
|
||||||
REQUEST="$event"
|
|
||||||
|
|
||||||
# Stdin of the `handler` function is the HTTP request body.
|
# Stdin of the `handler` function is the HTTP request body.
|
||||||
# Need to use a fifo here instead of bash <() because Lambda
|
# Need to use a fifo here instead of bash <() because Lambda
|
||||||
# errors with "/dev/fd/63 not found" for some reason :/
|
# errors with "/dev/fd/63 not found" for some reason :/
|
||||||
local stdin
|
local stdin
|
||||||
stdin="$(mktemp --dry-run)"
|
stdin="$(mktemp -u)"
|
||||||
mkfifo "$stdin"
|
mkfifo "$stdin"
|
||||||
_lambda_runtime_body "$event" > "$stdin" &
|
_lambda_runtime_body < "$event" > "$stdin" &
|
||||||
|
|
||||||
|
local exit_code=0
|
||||||
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
||||||
|
|
||||||
rm -f "$event" "$stdin"
|
rm -f "$event" "$stdin"
|
||||||
|
|
||||||
if [ "$exit_code" -eq 0 ]; then
|
if [ "$exit_code" -eq 0 ]; then
|
||||||
@@ -68,18 +70,21 @@ _lambda_runtime_next() {
|
|||||||
| _lambda_runtime_api "invocation/$request_id/response" -X POST -d @- > /dev/null
|
| _lambda_runtime_api "invocation/$request_id/response" -X POST -d @- > /dev/null
|
||||||
rm -f "$body" "$_HEADERS"
|
rm -f "$body" "$_HEADERS"
|
||||||
else
|
else
|
||||||
echo "\`handler\` function return code: $exit_code"
|
local error_message="Invocation failed for 'handler' function in '$SCRIPT_FILENAME' (exit code $exit_code)"
|
||||||
_lambda_runtime_api "invocation/$request_id/error" -X POST -d @- > /dev/null <<< '{"exitCode":'"$exit_code"'}'
|
echo "$error_message" >&2
|
||||||
|
_lambda_runtime_api "invocation/$request_id/error" -X POST -d '{"errorMessage":"'"$error_message"'"}' > /dev/null
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
_lambda_runtime_body() {
|
_lambda_runtime_body() {
|
||||||
if [ "$(jq --raw-output '.body | type' < "$1")" = "string" ]; then
|
local event
|
||||||
if [ "$(jq --raw-output '.encoding' < "$1")" = "base64" ]; then
|
event="$(cat)"
|
||||||
jq --raw-output '.body' < "$1" | base64 -d
|
if [ "$(jq --raw-output '.body | type' <<< "$event")" = "string" ]; then
|
||||||
|
if [ "$(jq --raw-output '.encoding' <<< "$event")" = "base64" ]; then
|
||||||
|
jq --raw-output '.body' <<< "$event" | base64 --decode
|
||||||
else
|
else
|
||||||
# assume plain-text body
|
# assume plain-text body
|
||||||
jq --raw-output '.body' < "$1"
|
jq --raw-output '.body' <<< "$event"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@@ -97,7 +102,10 @@ http_response_header() {
|
|||||||
local value="$2"
|
local value="$2"
|
||||||
local tmp
|
local tmp
|
||||||
tmp="$(mktemp)"
|
tmp="$(mktemp)"
|
||||||
jq --arg name "$name" --arg value "$value" '.[$name] = $value' < "$_HEADERS" > "$tmp"
|
jq \
|
||||||
|
--arg name "$name" \
|
||||||
|
--arg value "$value" \
|
||||||
|
'.[$name] = $value' < "$_HEADERS" > "$tmp"
|
||||||
mv -f "$tmp" "$_HEADERS"
|
mv -f "$tmp" "$_HEADERS"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
3
packages/now-build-utils/.gitignore
vendored
Normal file
3
packages/now-build-utils/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
dist
|
||||||
|
test/symlinks-out
|
||||||
|
test/symlinks.zip
|
||||||
@@ -1 +1,3 @@
|
|||||||
|
/src
|
||||||
/test
|
/test
|
||||||
|
tmp
|
||||||
@@ -1,33 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index').FileBlob;
|
||||||
const intoStream = require('into-stream');
|
|
||||||
|
|
||||||
class FileBlob {
|
|
||||||
constructor({ mode = 0o100644, data }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
|
||||||
this.type = 'FileBlob';
|
|
||||||
this.mode = mode;
|
|
||||||
this.data = data;
|
|
||||||
}
|
|
||||||
|
|
||||||
static async fromStream({ mode = 0o100644, stream }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof stream.pipe === 'function'); // is-stream
|
|
||||||
const chunks = [];
|
|
||||||
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
|
|
||||||
stream.on('error', error => reject(error));
|
|
||||||
stream.on('end', () => resolve());
|
|
||||||
});
|
|
||||||
|
|
||||||
const data = Buffer.concat(chunks);
|
|
||||||
return new FileBlob({ mode, data });
|
|
||||||
}
|
|
||||||
|
|
||||||
toStream() {
|
|
||||||
return intoStream(this.data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = FileBlob;
|
|
||||||
|
|||||||
@@ -1,88 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index').FileFsRef;
|
||||||
const fs = require('fs-extra');
|
|
||||||
const multiStream = require('multistream');
|
|
||||||
const path = require('path');
|
|
||||||
const Sema = require('async-sema');
|
|
||||||
|
|
||||||
/** @typedef {{[filePath: string]: FileFsRef}} FsFiles */
|
|
||||||
|
|
||||||
const semaToPreventEMFILE = new Sema(30);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @argument {Object} options
|
|
||||||
* @argument {number} [options.mode=0o100644]
|
|
||||||
* @argument {string} options.fsPath
|
|
||||||
*/
|
|
||||||
class FileFsRef {
|
|
||||||
constructor({ mode = 0o100644, fsPath }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof fsPath === 'string');
|
|
||||||
/** @type {string} */
|
|
||||||
this.type = 'FileFsRef';
|
|
||||||
/** @type {number} */
|
|
||||||
this.mode = mode;
|
|
||||||
/** @type {string} */
|
|
||||||
this.fsPath = fsPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @argument {Object} options
|
|
||||||
* @argument {number} [options.mode=0o100644]
|
|
||||||
* @argument {NodeJS.ReadableStream} options.stream
|
|
||||||
* @argument {string} options.fsPath
|
|
||||||
* @returns {Promise<FileFsRef>}
|
|
||||||
*/
|
|
||||||
static async fromStream({ mode = 0o100644, stream, fsPath }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof stream.pipe === 'function'); // is-stream
|
|
||||||
assert(typeof fsPath === 'string');
|
|
||||||
await fs.mkdirp(path.dirname(fsPath));
|
|
||||||
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
const dest = fs.createWriteStream(fsPath);
|
|
||||||
stream.pipe(dest);
|
|
||||||
stream.on('error', reject);
|
|
||||||
dest.on('finish', resolve);
|
|
||||||
dest.on('error', reject);
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.chmod(fsPath, mode.toString(8).slice(-3));
|
|
||||||
return new FileFsRef({ mode, fsPath });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {Promise<NodeJS.ReadableStream>}
|
|
||||||
*/
|
|
||||||
async toStreamAsync() {
|
|
||||||
await semaToPreventEMFILE.acquire();
|
|
||||||
const release = () => semaToPreventEMFILE.release();
|
|
||||||
const stream = fs.createReadStream(this.fsPath);
|
|
||||||
stream.on('close', release);
|
|
||||||
stream.on('error', release);
|
|
||||||
return stream;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {NodeJS.ReadableStream}
|
|
||||||
*/
|
|
||||||
toStream() {
|
|
||||||
let flag;
|
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
|
||||||
return multiStream((cb) => {
|
|
||||||
if (flag) return cb(null, null);
|
|
||||||
flag = true;
|
|
||||||
|
|
||||||
this.toStreamAsync()
|
|
||||||
.then((stream) => {
|
|
||||||
cb(null, stream);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
cb(error, null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = FileFsRef;
|
|
||||||
|
|||||||
@@ -1,96 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index').FileRef;
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const multiStream = require('multistream');
|
|
||||||
const retry = require('async-retry');
|
|
||||||
const Sema = require('async-sema');
|
|
||||||
|
|
||||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
|
||||||
|
|
||||||
const semaToDownloadFromS3 = new Sema(10);
|
|
||||||
|
|
||||||
class BailableError extends Error {
|
|
||||||
constructor(...args) {
|
|
||||||
super(...args);
|
|
||||||
/** @type {boolean} */
|
|
||||||
this.bail = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @argument {Object} options
|
|
||||||
* @argument {number} [options.mode=0o100644]
|
|
||||||
* @argument {string} options.digest
|
|
||||||
*/
|
|
||||||
class FileRef {
|
|
||||||
constructor({ mode = 0o100644, digest }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof digest === 'string');
|
|
||||||
/** @type {string} */
|
|
||||||
this.type = 'FileRef';
|
|
||||||
/** @type {number} */
|
|
||||||
this.mode = mode;
|
|
||||||
/** @type {string} */
|
|
||||||
this.digest = digest;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {Promise<NodeJS.ReadableStream>}
|
|
||||||
*/
|
|
||||||
async toStreamAsync() {
|
|
||||||
let url;
|
|
||||||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
|
||||||
const digestParts = this.digest.split(':');
|
|
||||||
if (digestParts[0] === 'sha') {
|
|
||||||
// url = `https://s3.amazonaws.com/now-files/${digestParts[1]}`;
|
|
||||||
url = `https://dmmcy0pwk6bqi.cloudfront.net/${digestParts[1]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(url);
|
|
||||||
|
|
||||||
await semaToDownloadFromS3.acquire();
|
|
||||||
// console.time(`downloading ${url}`);
|
|
||||||
try {
|
|
||||||
return await retry(
|
|
||||||
async () => {
|
|
||||||
const resp = await fetch(url);
|
|
||||||
if (!resp.ok) {
|
|
||||||
const error = new BailableError(
|
|
||||||
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
|
||||||
);
|
|
||||||
if (resp.status === 403) error.bail = true;
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
return resp.body;
|
|
||||||
},
|
|
||||||
{ factor: 1, retries: 3 },
|
|
||||||
);
|
|
||||||
} finally {
|
|
||||||
// console.timeEnd(`downloading ${url}`);
|
|
||||||
semaToDownloadFromS3.release();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {NodeJS.ReadableStream}
|
|
||||||
*/
|
|
||||||
toStream() {
|
|
||||||
let flag;
|
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
|
||||||
return multiStream((cb) => {
|
|
||||||
if (flag) return cb(null, null);
|
|
||||||
flag = true;
|
|
||||||
|
|
||||||
this.toStreamAsync()
|
|
||||||
.then((stream) => {
|
|
||||||
cb(null, stream);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
cb(error, null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = FileRef;
|
|
||||||
|
|||||||
242
packages/now-build-utils/fs/bootstrap-yarn.js
vendored
242
packages/now-build-utils/fs/bootstrap-yarn.js
vendored
@@ -1,242 +0,0 @@
|
|||||||
/* eslint-disable arrow-body-style,no-multi-assign,no-param-reassign */
|
|
||||||
|
|
||||||
const MemoryFileSystem = require('memory-fs');
|
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const { spawnSync } = require('child_process');
|
|
||||||
|
|
||||||
const yarnPath = spawnSync('which', ['yarn'])
|
|
||||||
.stdout.toString()
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
const cachePath = spawnSync(yarnPath, ['cache', 'dir'])
|
|
||||||
.stdout.toString()
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
spawnSync(yarnPath, ['cache', 'clean']);
|
|
||||||
const vfs = new MemoryFileSystem();
|
|
||||||
|
|
||||||
function isInsideCachePath(filename) {
|
|
||||||
const relative = path.relative(cachePath, filename);
|
|
||||||
return !relative.startsWith('..');
|
|
||||||
}
|
|
||||||
|
|
||||||
function replaceFn(name, newFnFactory) {
|
|
||||||
const prevFn = fs[name];
|
|
||||||
fs[name] = newFnFactory(prevFn);
|
|
||||||
}
|
|
||||||
|
|
||||||
replaceFn('createWriteStream', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const stream = vfs.createWriteStream(...args);
|
|
||||||
|
|
||||||
stream.on('finish', () => {
|
|
||||||
setTimeout(() => {
|
|
||||||
stream.emit('close');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
setTimeout(() => {
|
|
||||||
stream.emit('open');
|
|
||||||
});
|
|
||||||
|
|
||||||
return stream;
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('readFile', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return vfs.readFile(...args.slice(0, -1), (error, result) => {
|
|
||||||
if (error) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
return callback(error, result);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('readdir', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const dirname = args[0];
|
|
||||||
if (!isInsideCachePath(dirname)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return prevFn.call(fs, dirname, (error, results) => {
|
|
||||||
if (error) {
|
|
||||||
results = [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return vfs.readdir(dirname, (error2, results2) => {
|
|
||||||
if (error2) {
|
|
||||||
return callback(error2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-restricted-syntax
|
|
||||||
for (const result2 of results2) {
|
|
||||||
if (!results.includes(result2)) {
|
|
||||||
results.push(result2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return callback(error2, results);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('stat', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return vfs.stat(...args.slice(0, -1), (error, result) => {
|
|
||||||
if (error) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
result.atime = result.mtime = new Date();
|
|
||||||
return callback(error, result);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('lstat', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return vfs.stat(...args.slice(0, -1), (error, result) => {
|
|
||||||
if (error) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
result.atime = result.mtime = new Date();
|
|
||||||
return callback(error, result);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('exists', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return vfs.exists(...args.slice(0, -1), (result) => {
|
|
||||||
if (!result) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
return callback(result);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('copyFile', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const src = args[0];
|
|
||||||
const dest = args[1];
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
|
|
||||||
if (isInsideCachePath(src) && !isInsideCachePath(dest)) {
|
|
||||||
const buffer = vfs.readFileSync(src);
|
|
||||||
return fs.writeFile(dest, buffer, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isInsideCachePath(src) && isInsideCachePath(dest)) {
|
|
||||||
const buffer = fs.readFileSync(src);
|
|
||||||
return vfs.writeFile(dest, buffer, callback);
|
|
||||||
}
|
|
||||||
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('writeFile', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
return vfs.writeFile(...args);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('mkdir', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const dirname = args[0];
|
|
||||||
if (!isInsideCachePath(dirname)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return prevFn.call(fs, dirname, (error) => {
|
|
||||||
if (error) {
|
|
||||||
return callback(error);
|
|
||||||
}
|
|
||||||
|
|
||||||
return vfs.mkdirp(dirname, callback);
|
|
||||||
});
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('utimes', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return setTimeout(callback, 0);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('chmod', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return setTimeout(callback, 0);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
replaceFn('chown', (prevFn) => {
|
|
||||||
return (...args) => {
|
|
||||||
const filename = args[0];
|
|
||||||
if (!isInsideCachePath(filename)) {
|
|
||||||
return prevFn.call(fs, ...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
const callback = args[args.length - 1];
|
|
||||||
return setTimeout(callback, 0);
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
require(yarnPath);
|
|
||||||
@@ -1,38 +1 @@
|
|||||||
const path = require('path');
|
module.exports = require('../dist/fs/download').default;
|
||||||
const FileFsRef = require('../file-fs-ref.js');
|
|
||||||
|
|
||||||
/** @typedef {import('../file-ref')} FileRef */
|
|
||||||
/** @typedef {import('../file-fs-ref')} FileFsRef */
|
|
||||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
|
||||||
/** @typedef {{[filePath: string]: FileFsRef}|{}} DownloadedFiles */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {FileRef|FileFsRef} file
|
|
||||||
* @param {string} fsPath
|
|
||||||
* @returns {Promise<FileFsRef>}
|
|
||||||
*/
|
|
||||||
async function downloadFile(file, fsPath) {
|
|
||||||
const { mode } = file;
|
|
||||||
const stream = file.toStream();
|
|
||||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download files to disk
|
|
||||||
* @argument {Files} files
|
|
||||||
* @argument {string} basePath
|
|
||||||
* @returns {Promise<DownloadedFiles>}
|
|
||||||
*/
|
|
||||||
module.exports = async function download(files, basePath) {
|
|
||||||
const files2 = {};
|
|
||||||
|
|
||||||
await Promise.all(
|
|
||||||
Object.keys(files).map(async (name) => {
|
|
||||||
const file = files[name];
|
|
||||||
const fsPath = path.join(basePath, name);
|
|
||||||
files2[name] = await downloadFile(file, fsPath);
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
return files2;
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,12 +1 @@
|
|||||||
const path = require('path');
|
module.exports = require('../dist/fs/get-writable-directory').default;
|
||||||
const fs = require('fs-extra');
|
|
||||||
|
|
||||||
const prod = process.env.AWS_EXECUTION_ENV || process.env.X_GOOGLE_CODE_LOCATION;
|
|
||||||
const TMP_PATH = prod ? '/tmp' : path.join(__dirname, 'tmp');
|
|
||||||
|
|
||||||
module.exports = async function getWritableDirectory() {
|
|
||||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
|
||||||
const directory = path.join(TMP_PATH, name);
|
|
||||||
await fs.mkdirp(directory);
|
|
||||||
return directory;
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,67 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('../dist/fs/glob').default;
|
||||||
const path = require('path');
|
|
||||||
const vanillaGlob = require('glob');
|
|
||||||
const FileFsRef = require('../file-fs-ref.js');
|
|
||||||
|
|
||||||
/** @typedef {import('fs').Stats} Stats */
|
|
||||||
/** @typedef {import('glob').IOptions} GlobOptions */
|
|
||||||
/** @typedef {import('../file-fs-ref').FsFiles|{}} GlobFiles */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @argument {string} pattern
|
|
||||||
* @argument {GlobOptions|string} opts
|
|
||||||
* @argument {string} [mountpoint]
|
|
||||||
* @returns {Promise<GlobFiles>}
|
|
||||||
*/
|
|
||||||
module.exports = function glob(pattern, opts = {}, mountpoint) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
/** @type {GlobOptions} */
|
|
||||||
let options;
|
|
||||||
if (typeof opts === 'string') {
|
|
||||||
options = { cwd: opts };
|
|
||||||
} else {
|
|
||||||
options = opts;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.cwd) {
|
|
||||||
throw new Error(
|
|
||||||
'Second argument (basePath) must be specified for names of resulting files',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!path.isAbsolute(options.cwd)) {
|
|
||||||
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
options.statCache = {};
|
|
||||||
options.stat = true;
|
|
||||||
options.dot = true;
|
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
|
||||||
vanillaGlob(pattern, options, (error, files) => {
|
|
||||||
if (error) return reject(error);
|
|
||||||
|
|
||||||
resolve(
|
|
||||||
files.reduce((files2, relativePath) => {
|
|
||||||
const fsPath = path.join(options.cwd, relativePath);
|
|
||||||
/** @type {Stats|any} */
|
|
||||||
const stat = options.statCache[fsPath];
|
|
||||||
assert(
|
|
||||||
stat,
|
|
||||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
|
||||||
);
|
|
||||||
if (stat && stat.isFile()) {
|
|
||||||
let finalPath = relativePath;
|
|
||||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
|
||||||
return {
|
|
||||||
...files2,
|
|
||||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return files2;
|
|
||||||
}, {}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,25 +1 @@
|
|||||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
module.exports = require('../dist/fs/rename').default;
|
||||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
|
||||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @callback delegate
|
|
||||||
* @argument {string} name
|
|
||||||
* @returns {string}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Rename files using delegate function
|
|
||||||
* @argument {Files} files
|
|
||||||
* @argument {delegate} delegate
|
|
||||||
* @returns {Files}
|
|
||||||
*/
|
|
||||||
module.exports = function rename(files, delegate) {
|
|
||||||
return Object.keys(files).reduce(
|
|
||||||
(newFiles, name) => ({
|
|
||||||
...newFiles,
|
|
||||||
[delegate(name)]: files[name],
|
|
||||||
}),
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,108 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('../dist/fs/run-user-scripts');
|
||||||
const fs = require('fs-extra');
|
|
||||||
const path = require('path');
|
|
||||||
const { spawn } = require('child_process');
|
|
||||||
|
|
||||||
const prod = process.env.AWS_EXECUTION_ENV
|
|
||||||
|| process.env.X_GOOGLE_CODE_LOCATION;
|
|
||||||
|
|
||||||
function spawnAsync(command, args, cwd) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
|
||||||
child.on('error', reject);
|
|
||||||
child.on('close', (code, signal) => (code !== 0
|
|
||||||
? reject(new Error(`Exited with ${code || signal}`))
|
|
||||||
: resolve()));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runShellScript(fsPath) {
|
|
||||||
assert(path.isAbsolute(fsPath));
|
|
||||||
const destPath = path.dirname(fsPath);
|
|
||||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function scanParentDirs(destPath, scriptName) {
|
|
||||||
assert(path.isAbsolute(destPath));
|
|
||||||
|
|
||||||
let hasScript = false;
|
|
||||||
let hasPackageLockJson = false;
|
|
||||||
let currentDestPath = destPath;
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
|
||||||
while (true) {
|
|
||||||
const packageJsonPath = path.join(currentDestPath, 'package.json');
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
if (await fs.exists(packageJsonPath)) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath));
|
|
||||||
hasScript = Boolean(
|
|
||||||
packageJson.scripts && scriptName && packageJson.scripts[scriptName],
|
|
||||||
);
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
hasPackageLockJson = await fs.exists(
|
|
||||||
path.join(currentDestPath, 'package-lock.json'),
|
|
||||||
);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newDestPath = path.dirname(currentDestPath);
|
|
||||||
if (currentDestPath === newDestPath) break;
|
|
||||||
currentDestPath = newDestPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
return { hasScript, hasPackageLockJson };
|
|
||||||
}
|
|
||||||
|
|
||||||
async function installDependencies(destPath, args = []) {
|
|
||||||
assert(path.isAbsolute(destPath));
|
|
||||||
|
|
||||||
let commandArgs = args;
|
|
||||||
console.log(`installing to ${destPath}`);
|
|
||||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
|
||||||
|
|
||||||
if (hasPackageLockJson) {
|
|
||||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
|
||||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath);
|
|
||||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath);
|
|
||||||
} else if (prod) {
|
|
||||||
console.log('using memory-fs for yarn cache');
|
|
||||||
await spawnAsync(
|
|
||||||
'node',
|
|
||||||
[path.join(__dirname, 'bootstrap-yarn.js'), '--cwd', destPath].concat(
|
|
||||||
commandArgs,
|
|
||||||
),
|
|
||||||
destPath,
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
await spawnAsync('yarn', ['--cwd', destPath].concat(commandArgs), destPath);
|
|
||||||
await spawnAsync('yarn', ['cache', 'clean'], destPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runPackageJsonScript(destPath, scriptName) {
|
|
||||||
assert(path.isAbsolute(destPath));
|
|
||||||
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
|
||||||
destPath,
|
|
||||||
scriptName,
|
|
||||||
);
|
|
||||||
if (!hasScript) return false;
|
|
||||||
|
|
||||||
if (hasPackageLockJson) {
|
|
||||||
console.log(`running "npm run ${scriptName}"`);
|
|
||||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
|
||||||
} else {
|
|
||||||
console.log(`running "yarn run ${scriptName}"`);
|
|
||||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath);
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
runShellScript,
|
|
||||||
installDependencies,
|
|
||||||
runNpmInstall: installDependencies,
|
|
||||||
runPackageJsonScript,
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,4 +1 @@
|
|||||||
const fastStreamToBuffer = require('fast-stream-to-buffer');
|
module.exports = require('../dist/fs/stream-to-buffer').default;
|
||||||
const { promisify } = require('util');
|
|
||||||
|
|
||||||
module.exports = promisify(fastStreamToBuffer);
|
|
||||||
|
|||||||
@@ -1,60 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index');
|
||||||
const Sema = require('async-sema');
|
|
||||||
const { ZipFile } = require('yazl');
|
|
||||||
const streamToBuffer = require('./fs/stream-to-buffer.js');
|
|
||||||
|
|
||||||
class Lambda {
|
|
||||||
constructor({
|
|
||||||
zipBuffer, handler, runtime, environment,
|
|
||||||
}) {
|
|
||||||
this.type = 'Lambda';
|
|
||||||
this.zipBuffer = zipBuffer;
|
|
||||||
this.handler = handler;
|
|
||||||
this.runtime = runtime;
|
|
||||||
this.environment = environment;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const sema = new Sema(10);
|
|
||||||
const mtime = new Date(1540000000000);
|
|
||||||
|
|
||||||
async function createLambda({
|
|
||||||
files, handler, runtime, environment = {},
|
|
||||||
}) {
|
|
||||||
assert(typeof files === 'object', '"files" must be an object');
|
|
||||||
assert(typeof handler === 'string', '"handler" is not a string');
|
|
||||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
|
||||||
assert(typeof environment === 'object', '"environment" is not an object');
|
|
||||||
|
|
||||||
await sema.acquire();
|
|
||||||
try {
|
|
||||||
const zipFile = new ZipFile();
|
|
||||||
const zipBuffer = await new Promise((resolve, reject) => {
|
|
||||||
Object.keys(files)
|
|
||||||
.sort()
|
|
||||||
.forEach((name) => {
|
|
||||||
const file = files[name];
|
|
||||||
const stream = file.toStream();
|
|
||||||
stream.on('error', reject);
|
|
||||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
|
||||||
});
|
|
||||||
|
|
||||||
zipFile.end();
|
|
||||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
|
||||||
});
|
|
||||||
|
|
||||||
return new Lambda({
|
|
||||||
zipBuffer,
|
|
||||||
handler,
|
|
||||||
runtime,
|
|
||||||
environment,
|
|
||||||
});
|
|
||||||
} finally {
|
|
||||||
sema.release();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
Lambda,
|
|
||||||
createLambda,
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/build-utils",
|
"name": "@now/build-utils",
|
||||||
"version": "0.4.33-canary.2",
|
"version": "0.5.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"main": "./dist/index.js",
|
||||||
|
"types": "./dist/index.d.js",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/zeit/now-builders.git",
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
@@ -10,16 +12,28 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"async-retry": "1.2.3",
|
"async-retry": "1.2.3",
|
||||||
"async-sema": "2.1.4",
|
"async-sema": "2.1.4",
|
||||||
"fast-stream-to-buffer": "1.0.0",
|
"end-of-stream": "1.4.1",
|
||||||
"fs-extra": "7.0.0",
|
"fs-extra": "7.0.0",
|
||||||
"glob": "7.1.3",
|
"glob": "7.1.3",
|
||||||
"into-stream": "4.0.0",
|
"into-stream": "5.0.0",
|
||||||
"memory-fs": "0.4.1",
|
"memory-fs": "0.4.1",
|
||||||
"multistream": "2.1.1",
|
"multistream": "2.1.1",
|
||||||
"node-fetch": "2.2.0",
|
"node-fetch": "2.2.0",
|
||||||
"yazl": "2.4.3"
|
"yazl": "2.4.3"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "jest"
|
"build": "tsc",
|
||||||
|
"test": "tsc && jest",
|
||||||
|
"prepublish": "tsc"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/async-retry": "^1.2.1",
|
||||||
|
"@types/end-of-stream": "^1.4.0",
|
||||||
|
"@types/fs-extra": "^5.0.5",
|
||||||
|
"@types/glob": "^7.1.1",
|
||||||
|
"@types/node-fetch": "^2.1.6",
|
||||||
|
"@types/yazl": "^2.4.1",
|
||||||
|
"execa": "^1.0.0",
|
||||||
|
"typescript": "3.3.4000"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
46
packages/now-build-utils/src/file-blob.ts
Normal file
46
packages/now-build-utils/src/file-blob.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import intoStream from 'into-stream';
|
||||||
|
import { File } from './types';
|
||||||
|
|
||||||
|
interface FileBlobOptions {
|
||||||
|
mode?: number;
|
||||||
|
data: string | Buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FromStreamOptions {
|
||||||
|
mode?: number;
|
||||||
|
stream: NodeJS.ReadableStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class FileBlob implements File {
|
||||||
|
public type: 'FileBlob';
|
||||||
|
public mode: number;
|
||||||
|
public data: string | Buffer;
|
||||||
|
|
||||||
|
constructor({ mode = 0o100644, data }: FileBlobOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
||||||
|
this.type = 'FileBlob';
|
||||||
|
this.mode = mode;
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fromStream({ mode = 0o100644, stream }: FromStreamOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof stream.pipe === 'function'); // is-stream
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
|
||||||
|
stream.on('error', error => reject(error));
|
||||||
|
stream.on('end', () => resolve());
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = Buffer.concat(chunks);
|
||||||
|
return new FileBlob({ mode, data });
|
||||||
|
}
|
||||||
|
|
||||||
|
toStream(): NodeJS.ReadableStream {
|
||||||
|
return intoStream(this.data);
|
||||||
|
}
|
||||||
|
}
|
||||||
90
packages/now-build-utils/src/file-fs-ref.ts
Normal file
90
packages/now-build-utils/src/file-fs-ref.ts
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import multiStream from 'multistream';
|
||||||
|
import path from 'path';
|
||||||
|
import Sema from 'async-sema';
|
||||||
|
import { File } from './types';
|
||||||
|
|
||||||
|
const semaToPreventEMFILE = new Sema(20);
|
||||||
|
|
||||||
|
interface FileFsRefOptions {
|
||||||
|
mode?: number;
|
||||||
|
fsPath: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FromStreamOptions {
|
||||||
|
mode: number;
|
||||||
|
stream: NodeJS.ReadableStream;
|
||||||
|
fsPath: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
class FileFsRef implements File {
|
||||||
|
public type: 'FileFsRef';
|
||||||
|
public mode: number;
|
||||||
|
public fsPath: string;
|
||||||
|
|
||||||
|
constructor({ mode = 0o100644, fsPath }: FileFsRefOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof fsPath === 'string');
|
||||||
|
this.type = 'FileFsRef';
|
||||||
|
this.mode = mode;
|
||||||
|
this.fsPath = fsPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fromFsPath({ mode, fsPath }: FileFsRefOptions): Promise<FileFsRef> {
|
||||||
|
let m = mode;
|
||||||
|
if (!m) {
|
||||||
|
const stat = await fs.lstat(fsPath);
|
||||||
|
m = stat.mode;
|
||||||
|
}
|
||||||
|
return new FileFsRef({ mode: m, fsPath });
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fromStream({ mode = 0o100644, stream, fsPath }: FromStreamOptions): Promise<FileFsRef> {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof stream.pipe === 'function'); // is-stream
|
||||||
|
assert(typeof fsPath === 'string');
|
||||||
|
await fs.mkdirp(path.dirname(fsPath));
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const dest = fs.createWriteStream(fsPath, {
|
||||||
|
mode: mode & 0o777
|
||||||
|
});
|
||||||
|
stream.pipe(dest);
|
||||||
|
stream.on('error', reject);
|
||||||
|
dest.on('finish', resolve);
|
||||||
|
dest.on('error', reject);
|
||||||
|
});
|
||||||
|
|
||||||
|
return new FileFsRef({ mode, fsPath });
|
||||||
|
}
|
||||||
|
|
||||||
|
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||||
|
await semaToPreventEMFILE.acquire();
|
||||||
|
const release = () => semaToPreventEMFILE.release();
|
||||||
|
const stream = fs.createReadStream(this.fsPath);
|
||||||
|
stream.on('close', release);
|
||||||
|
stream.on('error', release);
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
toStream(): NodeJS.ReadableStream {
|
||||||
|
let flag = false;
|
||||||
|
|
||||||
|
// eslint-disable-next-line consistent-return
|
||||||
|
return multiStream((cb) => {
|
||||||
|
if (flag) return cb(null, null);
|
||||||
|
flag = true;
|
||||||
|
|
||||||
|
this.toStreamAsync()
|
||||||
|
.then((stream) => {
|
||||||
|
cb(null, stream);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
cb(error, null);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export = FileFsRef;
|
||||||
93
packages/now-build-utils/src/file-ref.ts
Normal file
93
packages/now-build-utils/src/file-ref.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import multiStream from 'multistream';
|
||||||
|
import retry from 'async-retry';
|
||||||
|
import Sema from 'async-sema';
|
||||||
|
import { File } from './types';
|
||||||
|
|
||||||
|
interface FileRefOptions {
|
||||||
|
mode?: number;
|
||||||
|
digest: string;
|
||||||
|
mutable?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
const semaToDownloadFromS3 = new Sema(5);
|
||||||
|
|
||||||
|
class BailableError extends Error {
|
||||||
|
public bail: boolean;
|
||||||
|
|
||||||
|
constructor(...args: string[]) {
|
||||||
|
super(...args);
|
||||||
|
this.bail = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class FileRef implements File {
|
||||||
|
public type: 'FileRef';
|
||||||
|
public mode: number;
|
||||||
|
public digest: string;
|
||||||
|
public mutable: boolean;
|
||||||
|
|
||||||
|
constructor({ mode = 0o100644, digest, mutable = false }: FileRefOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof digest === 'string');
|
||||||
|
assert(typeof mutable === 'boolean');
|
||||||
|
this.type = 'FileRef';
|
||||||
|
this.mode = mode;
|
||||||
|
this.digest = digest;
|
||||||
|
this.mutable = mutable;
|
||||||
|
}
|
||||||
|
|
||||||
|
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||||
|
let url = '';
|
||||||
|
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
||||||
|
const digestParts = this.digest.split(':');
|
||||||
|
if (digestParts[0] === 'sha') {
|
||||||
|
url = this.mutable
|
||||||
|
? `https://s3.amazonaws.com/now-files/${digestParts[1]}`
|
||||||
|
: `https://dmmcy0pwk6bqi.cloudfront.net/${digestParts[1]}`;
|
||||||
|
} else {
|
||||||
|
throw new Error('Expected digest to be sha');
|
||||||
|
}
|
||||||
|
|
||||||
|
await semaToDownloadFromS3.acquire();
|
||||||
|
// console.time(`downloading ${url}`);
|
||||||
|
try {
|
||||||
|
return await retry(
|
||||||
|
async () => {
|
||||||
|
const resp = await fetch(url);
|
||||||
|
if (!resp.ok) {
|
||||||
|
const error = new BailableError(
|
||||||
|
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
||||||
|
);
|
||||||
|
if (resp.status === 403) error.bail = true;
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
return resp.body;
|
||||||
|
},
|
||||||
|
{ factor: 1, retries: 3 },
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
// console.timeEnd(`downloading ${url}`);
|
||||||
|
semaToDownloadFromS3.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toStream(): NodeJS.ReadableStream {
|
||||||
|
let flag = false;
|
||||||
|
|
||||||
|
// eslint-disable-next-line consistent-return
|
||||||
|
return multiStream((cb) => {
|
||||||
|
if (flag) return cb(null, null);
|
||||||
|
flag = true;
|
||||||
|
|
||||||
|
this.toStreamAsync()
|
||||||
|
.then((stream) => {
|
||||||
|
cb(null, stream);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
cb(error, null);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
62
packages/now-build-utils/src/fs/download.ts
Normal file
62
packages/now-build-utils/src/fs/download.ts
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import path from 'path';
|
||||||
|
import FileFsRef from '../file-fs-ref';
|
||||||
|
import { File, Files, Meta } from '../types';
|
||||||
|
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
||||||
|
|
||||||
|
export interface DownloadedFiles {
|
||||||
|
[filePath: string]: FileFsRef
|
||||||
|
}
|
||||||
|
|
||||||
|
const S_IFMT = 61440; /* 0170000 type of file */
|
||||||
|
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||||
|
|
||||||
|
export function isSymbolicLink(mode: number): boolean {
|
||||||
|
return (mode & S_IFMT) === S_IFLNK;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||||
|
const { mode } = file;
|
||||||
|
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
||||||
|
const [ target ] = await Promise.all([
|
||||||
|
readlink((file as FileFsRef).fsPath),
|
||||||
|
mkdirp(path.dirname(fsPath))
|
||||||
|
]);
|
||||||
|
await symlink(target, fsPath);
|
||||||
|
return FileFsRef.fromFsPath({ mode, fsPath });
|
||||||
|
} else {
|
||||||
|
const stream = file.toStream();
|
||||||
|
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeFile(basePath: string, fileMatched: string) {
|
||||||
|
const file = path.join(basePath, fileMatched);
|
||||||
|
await remove(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function download(files: Files, basePath: string, meta?: Meta): Promise<DownloadedFiles> {
|
||||||
|
const files2: DownloadedFiles = {};
|
||||||
|
const { filesChanged = null, filesRemoved = null } = meta || {};
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
Object.keys(files).map(async (name) => {
|
||||||
|
// If the file does not exist anymore, remove it.
|
||||||
|
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
||||||
|
await removeFile(basePath, name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a file didn't change, do not re-download it.
|
||||||
|
if (Array.isArray(filesChanged) && !filesChanged.includes(name)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const file = files[name];
|
||||||
|
const fsPath = path.join(basePath, name);
|
||||||
|
|
||||||
|
files2[name] = await downloadFile(file, fsPath);
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return files2;
|
||||||
|
}
|
||||||
10
packages/now-build-utils/src/fs/get-writable-directory.ts
Normal file
10
packages/now-build-utils/src/fs/get-writable-directory.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { join } from 'path';
|
||||||
|
import { tmpdir } from 'os';
|
||||||
|
import { mkdirp } from 'fs-extra';
|
||||||
|
|
||||||
|
export default async function getWritableDirectory() {
|
||||||
|
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||||
|
const directory = join(tmpdir(), name);
|
||||||
|
await mkdirp(directory);
|
||||||
|
return directory;
|
||||||
|
}
|
||||||
66
packages/now-build-utils/src/fs/glob.ts
Normal file
66
packages/now-build-utils/src/fs/glob.ts
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import path from 'path';
|
||||||
|
import assert from 'assert';
|
||||||
|
import vanillaGlob_ from 'glob';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
import { lstat, Stats } from 'fs-extra';
|
||||||
|
import FileFsRef from '../file-fs-ref';
|
||||||
|
|
||||||
|
type GlobOptions = vanillaGlob_.IOptions;
|
||||||
|
|
||||||
|
interface FsFiles {
|
||||||
|
[filePath: string]: FileFsRef
|
||||||
|
}
|
||||||
|
|
||||||
|
const vanillaGlob = promisify(vanillaGlob_);
|
||||||
|
|
||||||
|
export default async function glob(pattern: string, opts: GlobOptions | string, mountpoint?: string): Promise<FsFiles> {
|
||||||
|
let options: GlobOptions;
|
||||||
|
if (typeof opts === 'string') {
|
||||||
|
options = { cwd: opts };
|
||||||
|
} else {
|
||||||
|
options = opts;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.cwd) {
|
||||||
|
throw new Error(
|
||||||
|
'Second argument (basePath) must be specified for names of resulting files',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!path.isAbsolute(options.cwd)) {
|
||||||
|
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const results: FsFiles = {};
|
||||||
|
|
||||||
|
options.symlinks = {};
|
||||||
|
options.statCache = {};
|
||||||
|
options.stat = true;
|
||||||
|
options.dot = true;
|
||||||
|
|
||||||
|
const files = await vanillaGlob(pattern, options);
|
||||||
|
|
||||||
|
for (const relativePath of files) {
|
||||||
|
const fsPath = path.join(options.cwd!, relativePath);
|
||||||
|
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||||
|
assert(
|
||||||
|
stat,
|
||||||
|
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
||||||
|
);
|
||||||
|
if (stat.isFile()) {
|
||||||
|
const isSymlink = options.symlinks![fsPath];
|
||||||
|
if (isSymlink) {
|
||||||
|
stat = await lstat(fsPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalPath = relativePath;
|
||||||
|
if (mountpoint) {
|
||||||
|
finalPath = path.join(mountpoint, finalPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
results[finalPath] = new FileFsRef({ mode: stat.mode, fsPath });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
12
packages/now-build-utils/src/fs/rename.ts
Normal file
12
packages/now-build-utils/src/fs/rename.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { Files } from '../types';
|
||||||
|
type Delegate = (name: string) => string;
|
||||||
|
|
||||||
|
export default function rename(files: Files, delegate: Delegate): Files {
|
||||||
|
return Object.keys(files).reduce(
|
||||||
|
(newFiles, name) => ({
|
||||||
|
...newFiles,
|
||||||
|
[delegate(name)]: files[name],
|
||||||
|
}),
|
||||||
|
{},
|
||||||
|
);
|
||||||
|
}
|
||||||
138
packages/now-build-utils/src/fs/run-user-scripts.ts
Normal file
138
packages/now-build-utils/src/fs/run-user-scripts.ts
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'path';
|
||||||
|
import { spawn, SpawnOptions } from 'child_process';
|
||||||
|
|
||||||
|
function spawnAsync(command: string, args: string[], cwd: string, opts: SpawnOptions = {}) {
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
|
const stderrLogs: Buffer[] = []
|
||||||
|
opts = { stdio: 'inherit', cwd, ...opts };
|
||||||
|
const child = spawn(command, args, opts);
|
||||||
|
|
||||||
|
if (opts.stdio === 'pipe'){
|
||||||
|
child.stderr.on('data', data => stderrLogs.push(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
child.on('error', reject);
|
||||||
|
child.on('close', (code, signal) => {
|
||||||
|
if (code === 0) {
|
||||||
|
return resolve()
|
||||||
|
}
|
||||||
|
|
||||||
|
const errorLogs = stderrLogs.map(line => line.toString()).join('');
|
||||||
|
if (opts.stdio !== 'inherit') {
|
||||||
|
reject(new Error(`Exited with ${code || signal}\n${errorLogs}`));
|
||||||
|
} else {
|
||||||
|
reject(new Error(`Exited with ${code || signal}`));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function chmodPlusX(fsPath: string) {
|
||||||
|
const s = await fs.stat(fsPath);
|
||||||
|
const newMode = s.mode | 64 | 8 | 1; // eslint-disable-line no-bitwise
|
||||||
|
if (s.mode === newMode) return;
|
||||||
|
const base8 = newMode.toString(8).slice(-3);
|
||||||
|
await fs.chmod(fsPath, base8);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runShellScript(fsPath: string) {
|
||||||
|
assert(path.isAbsolute(fsPath));
|
||||||
|
const destPath = path.dirname(fsPath);
|
||||||
|
await chmodPlusX(fsPath);
|
||||||
|
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function scanParentDirs(destPath: string, scriptName?: string) {
|
||||||
|
assert(path.isAbsolute(destPath));
|
||||||
|
|
||||||
|
let hasScript = false;
|
||||||
|
let hasPackageLockJson = false;
|
||||||
|
let currentDestPath = destPath;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
const packageJsonPath = path.join(currentDestPath, 'package.json');
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
if (await fs.pathExists(packageJsonPath)) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||||
|
hasScript = Boolean(
|
||||||
|
packageJson.scripts && scriptName && packageJson.scripts[scriptName],
|
||||||
|
);
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
hasPackageLockJson = await fs.pathExists(
|
||||||
|
path.join(currentDestPath, 'package-lock.json'),
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newDestPath = path.dirname(currentDestPath);
|
||||||
|
if (currentDestPath === newDestPath) break;
|
||||||
|
currentDestPath = newDestPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
return { hasScript, hasPackageLockJson };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function installDependencies(destPath: string, args: string[] = []) {
|
||||||
|
assert(path.isAbsolute(destPath));
|
||||||
|
|
||||||
|
let commandArgs = args;
|
||||||
|
console.log(`installing to ${destPath}`);
|
||||||
|
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||||
|
|
||||||
|
const opts = {
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
// This is a little hack to force `node-gyp` to build for the
|
||||||
|
// Node.js version that `@now/node` and `@now/node-server` use
|
||||||
|
npm_config_target: '8.10.0',
|
||||||
|
},
|
||||||
|
stdio: 'pipe'
|
||||||
|
};
|
||||||
|
|
||||||
|
if (hasPackageLockJson) {
|
||||||
|
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||||
|
await spawnAsync(
|
||||||
|
'npm',
|
||||||
|
['install'].concat(commandArgs),
|
||||||
|
destPath,
|
||||||
|
opts as SpawnOptions
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
await spawnAsync(
|
||||||
|
'yarn',
|
||||||
|
['--cwd', destPath].concat(commandArgs),
|
||||||
|
destPath,
|
||||||
|
opts as SpawnOptions,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runPackageJsonScript(
|
||||||
|
destPath: string,
|
||||||
|
scriptName: string,
|
||||||
|
opts?: SpawnOptions
|
||||||
|
) {
|
||||||
|
assert(path.isAbsolute(destPath));
|
||||||
|
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
||||||
|
destPath,
|
||||||
|
scriptName,
|
||||||
|
);
|
||||||
|
if (!hasScript) return false;
|
||||||
|
|
||||||
|
if (hasPackageLockJson) {
|
||||||
|
console.log(`running "npm run ${scriptName}"`);
|
||||||
|
await spawnAsync('npm', ['run', scriptName], destPath, opts);
|
||||||
|
} else {
|
||||||
|
console.log(`running "yarn run ${scriptName}"`);
|
||||||
|
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const runNpmInstall = installDependencies;
|
||||||
26
packages/now-build-utils/src/fs/stream-to-buffer.ts
Normal file
26
packages/now-build-utils/src/fs/stream-to-buffer.ts
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import eos from 'end-of-stream';
|
||||||
|
|
||||||
|
export default function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||||
|
return new Promise<Buffer>((resolve, reject) => {
|
||||||
|
const buffers: Buffer[] = [];
|
||||||
|
|
||||||
|
stream.on('data', buffers.push.bind(buffers))
|
||||||
|
|
||||||
|
eos(stream, (err) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
switch (buffers.length) {
|
||||||
|
case 0:
|
||||||
|
resolve(Buffer.allocUnsafe(0));
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
resolve(buffers[0]);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
resolve(Buffer.concat(buffers));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
34
packages/now-build-utils/src/index.ts
Normal file
34
packages/now-build-utils/src/index.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import FileBlob from './file-blob';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
import FileRef from './file-ref';
|
||||||
|
import { File, Files, AnalyzeOptions, BuildOptions, PrepareCacheOptions, ShouldServeOptions, Meta } from './types';
|
||||||
|
import { Lambda, createLambda } from './lambda';
|
||||||
|
import download from './fs/download';
|
||||||
|
import getWriteableDirectory from './fs/get-writable-directory'
|
||||||
|
import glob from './fs/glob';
|
||||||
|
import rename from './fs/rename';
|
||||||
|
import { installDependencies, runPackageJsonScript, runNpmInstall, runShellScript } from './fs/run-user-scripts';
|
||||||
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
|
import shouldServe from './should-serve';
|
||||||
|
|
||||||
|
export {
|
||||||
|
FileBlob,
|
||||||
|
FileFsRef,
|
||||||
|
FileRef,
|
||||||
|
Files,
|
||||||
|
File,
|
||||||
|
Meta,
|
||||||
|
Lambda,
|
||||||
|
createLambda,
|
||||||
|
download,
|
||||||
|
getWriteableDirectory,
|
||||||
|
glob,
|
||||||
|
rename,
|
||||||
|
installDependencies, runPackageJsonScript, runNpmInstall, runShellScript,
|
||||||
|
streamToBuffer,
|
||||||
|
AnalyzeOptions,
|
||||||
|
BuildOptions,
|
||||||
|
PrepareCacheOptions,
|
||||||
|
ShouldServeOptions,
|
||||||
|
shouldServe,
|
||||||
|
};
|
||||||
104
packages/now-build-utils/src/lambda.ts
Normal file
104
packages/now-build-utils/src/lambda.ts
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import Sema from 'async-sema';
|
||||||
|
import { ZipFile } from 'yazl';
|
||||||
|
import { readlink } from 'fs-extra';
|
||||||
|
import { Files } from './types';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
import { isSymbolicLink } from './fs/download';
|
||||||
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
|
|
||||||
|
interface Environment {
|
||||||
|
[key: string]: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LambdaOptions {
|
||||||
|
zipBuffer: Buffer;
|
||||||
|
handler: string;
|
||||||
|
runtime: string;
|
||||||
|
environment: Environment;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CreateLambdaOptions {
|
||||||
|
files: Files;
|
||||||
|
handler: string;
|
||||||
|
runtime: string;
|
||||||
|
environment?: Environment;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Lambda {
|
||||||
|
public type: 'Lambda';
|
||||||
|
public zipBuffer: Buffer;
|
||||||
|
public handler: string;
|
||||||
|
public runtime: string;
|
||||||
|
public environment: Environment;
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
zipBuffer, handler, runtime, environment,
|
||||||
|
}: LambdaOptions) {
|
||||||
|
this.type = 'Lambda';
|
||||||
|
this.zipBuffer = zipBuffer;
|
||||||
|
this.handler = handler;
|
||||||
|
this.runtime = runtime;
|
||||||
|
this.environment = environment;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const sema = new Sema(10);
|
||||||
|
const mtime = new Date(1540000000000);
|
||||||
|
|
||||||
|
export async function createLambda({
|
||||||
|
files, handler, runtime, environment = {},
|
||||||
|
}: CreateLambdaOptions): Promise<Lambda> {
|
||||||
|
assert(typeof files === 'object', '"files" must be an object');
|
||||||
|
assert(typeof handler === 'string', '"handler" is not a string');
|
||||||
|
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||||
|
assert(typeof environment === 'object', '"environment" is not an object');
|
||||||
|
|
||||||
|
await sema.acquire();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const zipBuffer = await createZip(files);
|
||||||
|
return new Lambda({
|
||||||
|
zipBuffer,
|
||||||
|
handler,
|
||||||
|
runtime,
|
||||||
|
environment,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
sema.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createZip(files: Files): Promise<Buffer> {
|
||||||
|
const names = Object.keys(files).sort();
|
||||||
|
|
||||||
|
const symlinkTargets = new Map<string, string>();
|
||||||
|
for (const name of names) {
|
||||||
|
const file = files[name];
|
||||||
|
if (file.mode && isSymbolicLink(file.mode) && file.type === 'FileFsRef') {
|
||||||
|
const symlinkTarget = await readlink((file as FileFsRef).fsPath);
|
||||||
|
symlinkTargets.set(name, symlinkTarget);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const zipFile = new ZipFile();
|
||||||
|
const zipBuffer = await new Promise<Buffer>((resolve, reject) => {
|
||||||
|
for (const name of names) {
|
||||||
|
const file = files[name];
|
||||||
|
const opts = { mode: file.mode, mtime };
|
||||||
|
const symlinkTarget = symlinkTargets.get(name);
|
||||||
|
if (typeof symlinkTarget === 'string') {
|
||||||
|
zipFile.addBuffer(Buffer.from(symlinkTarget, 'utf8'), name, opts);
|
||||||
|
} else {
|
||||||
|
const stream = file.toStream() as import('stream').Readable;
|
||||||
|
stream.on('error', reject);
|
||||||
|
zipFile.addReadStream(stream, name, opts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
zipFile.end();
|
||||||
|
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
||||||
|
});
|
||||||
|
|
||||||
|
return zipBuffer;
|
||||||
|
}
|
||||||
27
packages/now-build-utils/src/should-serve.ts
Normal file
27
packages/now-build-utils/src/should-serve.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { parse } from 'path';
|
||||||
|
import { ShouldServeOptions } from './types';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
|
||||||
|
export default function shouldServe({
|
||||||
|
entrypoint,
|
||||||
|
files,
|
||||||
|
requestPath
|
||||||
|
}: ShouldServeOptions): boolean {
|
||||||
|
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
||||||
|
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
||||||
|
|
||||||
|
if (entrypoint === requestPath && hasProp(files, entrypoint)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { dir, name } = parse(entrypoint);
|
||||||
|
if (name === 'index' && dir === requestPath && hasProp(files, entrypoint)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
||||||
|
return Object.hasOwnProperty.call(obj, key)
|
||||||
|
}
|
||||||
154
packages/now-build-utils/src/types.ts
Normal file
154
packages/now-build-utils/src/types.ts
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import FileRef from './file-ref';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
|
||||||
|
export interface File {
|
||||||
|
type: string;
|
||||||
|
mode: number;
|
||||||
|
toStream: () => NodeJS.ReadableStream;
|
||||||
|
fsPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Files {
|
||||||
|
[filePath: string]: File;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Config {
|
||||||
|
[key: string]: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Meta {
|
||||||
|
isDev?: boolean;
|
||||||
|
requestPath?: string;
|
||||||
|
filesChanged?: string[];
|
||||||
|
filesRemoved?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnalyzeOptions {
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: {
|
||||||
|
[filePath: string]: FileRef;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process. This directory will be populated with the restored cache.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BuildOptions {
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: Files;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process. This directory will be populated with the restored cache.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Metadata related to the invoker of the builder, used by `now dev`.
|
||||||
|
* Builders may use the properties on this object to change behavior based
|
||||||
|
* on the build environment.
|
||||||
|
*/
|
||||||
|
meta?: Meta;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PrepareCacheOptions {
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: Files;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you can build a cache to use for
|
||||||
|
* the next run.
|
||||||
|
*/
|
||||||
|
cachePath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ShouldServeOptions {
|
||||||
|
/**
|
||||||
|
* A path string from a request.
|
||||||
|
*/
|
||||||
|
requestPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: {
|
||||||
|
[path: string]: FileFsRef;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process. This directory will be populated with the restored cache.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
}
|
||||||
1
packages/now-build-utils/test/symlinks/a.txt
Normal file
1
packages/now-build-utils/test/symlinks/a.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
contents
|
||||||
1
packages/now-build-utils/test/symlinks/link.txt
Symbolic link
1
packages/now-build-utils/test/symlinks/link.txt
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
./a.txt
|
||||||
@@ -1,13 +1,18 @@
|
|||||||
/* global beforeAll, expect, it, jest */
|
/* global beforeAll, expect, it, jest */
|
||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const fs = require('fs-extra');
|
||||||
|
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||||
|
const execa = require('execa');
|
||||||
|
const assert = require('assert');
|
||||||
|
const { glob, download } = require('../');
|
||||||
|
const { createZip } = require('../dist/lambda');
|
||||||
|
|
||||||
const {
|
const {
|
||||||
packAndDeploy,
|
packAndDeploy,
|
||||||
testDeployment,
|
testDeployment,
|
||||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||||
|
|
||||||
jest.setTimeout(2 * 60 * 1000);
|
jest.setTimeout(4 * 60 * 1000);
|
||||||
const builderUrl = '@canary';
|
const builderUrl = '@canary';
|
||||||
let buildUtilsUrl;
|
let buildUtilsUrl;
|
||||||
|
|
||||||
@@ -17,6 +22,48 @@ beforeAll(async () => {
|
|||||||
console.log('buildUtilsUrl', buildUtilsUrl);
|
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// unit tests
|
||||||
|
|
||||||
|
it('should re-create symlinks properly', async () => {
|
||||||
|
const files = await glob('**', path.join(__dirname, 'symlinks'));
|
||||||
|
assert.equal(Object.keys(files).length, 2);
|
||||||
|
|
||||||
|
const outDir = path.join(__dirname, 'symlinks-out');
|
||||||
|
await fs.remove(outDir);
|
||||||
|
|
||||||
|
const files2 = await download(files, outDir);
|
||||||
|
assert.equal(Object.keys(files2).length, 2);
|
||||||
|
|
||||||
|
const [linkStat, aStat] = await Promise.all([
|
||||||
|
fs.lstat(path.join(outDir, 'link.txt')),
|
||||||
|
fs.lstat(path.join(outDir, 'a.txt')),
|
||||||
|
]);
|
||||||
|
assert(linkStat.isSymbolicLink());
|
||||||
|
assert(aStat.isFile());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create zip files with symlinks properly', async () => {
|
||||||
|
const files = await glob('**', path.join(__dirname, 'symlinks'));
|
||||||
|
assert.equal(Object.keys(files).length, 2);
|
||||||
|
|
||||||
|
const outFile = path.join(__dirname, 'symlinks.zip');
|
||||||
|
await fs.remove(outFile);
|
||||||
|
|
||||||
|
const outDir = path.join(__dirname, 'symlinks-out');
|
||||||
|
await fs.remove(outDir);
|
||||||
|
await fs.mkdirp(outDir);
|
||||||
|
|
||||||
|
await fs.writeFile(outFile, await createZip(files));
|
||||||
|
await execa('unzip', [outFile], { cwd: outDir });
|
||||||
|
|
||||||
|
const [linkStat, aStat] = await Promise.all([
|
||||||
|
fs.lstat(path.join(outDir, 'link.txt')),
|
||||||
|
fs.lstat(path.join(outDir, 'a.txt')),
|
||||||
|
]);
|
||||||
|
assert(linkStat.isSymbolicLink());
|
||||||
|
assert(aStat.isFile());
|
||||||
|
});
|
||||||
|
|
||||||
// own fixtures
|
// own fixtures
|
||||||
|
|
||||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||||
|
|||||||
24
packages/now-build-utils/tsconfig.json
Normal file
24
packages/now-build-utils/tsconfig.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"declaration": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"lib": ["esnext"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"noEmitOnError": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"outDir": "./dist",
|
||||||
|
"types": ["node"],
|
||||||
|
"strict": true,
|
||||||
|
"target": "esnext"
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src/**/*"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"node_modules"
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/cgi",
|
"name": "@now/cgi",
|
||||||
"version": "0.0.16-canary.0",
|
"version": "0.1.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
5
packages/now-go/.gitignore
vendored
5
packages/now-go/.gitignore
vendored
@@ -1,4 +1,5 @@
|
|||||||
node_modules
|
node_modules
|
||||||
*.log
|
*.log
|
||||||
launcher
|
/?.js
|
||||||
bin
|
/go
|
||||||
|
/get-exported-function-name
|
||||||
|
|||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
mkdir -p bin
|
|
||||||
cd util
|
|
||||||
GOOS=linux GOARCH=amd64 go build get-exported-function-name.go
|
|
||||||
mv get-exported-function-name ../bin/
|
|
||||||
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
const path = require('path');
|
|
||||||
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const tar = require('tar');
|
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
|
|
||||||
const url = 'https://dl.google.com/go/go1.11.1.linux-amd64.tar.gz';
|
|
||||||
|
|
||||||
module.exports = async () => {
|
|
||||||
const res = await fetch(url);
|
|
||||||
const dir = await getWritableDirectory();
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
throw new Error(`Failed to download: ${url}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
res.body
|
|
||||||
.on('error', reject)
|
|
||||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
|
||||||
.on('finish', () => resolve(path.join(dir, 'bin', 'go')));
|
|
||||||
});
|
|
||||||
};
|
|
||||||
129
packages/now-go/go-helpers.js
Normal file
129
packages/now-go/go-helpers.js
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
const tar = require('tar');
|
||||||
|
const execa = require('execa');
|
||||||
|
const fetch = require('node-fetch');
|
||||||
|
const { mkdirp } = require('fs-extra');
|
||||||
|
const { dirname, join } = require('path');
|
||||||
|
const debug = require('debug')('@now/go:go-helpers');
|
||||||
|
|
||||||
|
const archMap = new Map([['x64', 'amd64'], ['x86', '386']]);
|
||||||
|
const platformMap = new Map([['win32', 'windows']]);
|
||||||
|
|
||||||
|
// Location where the `go` binary will be installed after `postinstall`
|
||||||
|
const GO_DIR = join(__dirname, 'go');
|
||||||
|
const GO_BIN = join(GO_DIR, 'bin/go');
|
||||||
|
|
||||||
|
const getPlatform = p => platformMap.get(p) || p;
|
||||||
|
const getArch = a => archMap.get(a) || a;
|
||||||
|
const getGoUrl = (version, platform, arch) => {
|
||||||
|
const goArch = getArch(arch);
|
||||||
|
const goPlatform = getPlatform(platform);
|
||||||
|
const ext = platform === 'win32' ? 'zip' : 'tar.gz';
|
||||||
|
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
async function getExportedFunctionName(filePath) {
|
||||||
|
debug('Detecting handler name for %o', filePath);
|
||||||
|
const bin = join(__dirname, 'get-exported-function-name');
|
||||||
|
const args = [filePath];
|
||||||
|
const name = await execa.stdout(bin, args);
|
||||||
|
debug('Detected exported name %o', name);
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a `$GOPATH` directory tree, as per `go help gopath` instructions.
|
||||||
|
// Without this, `go` won't recognize the `$GOPATH`.
|
||||||
|
function createGoPathTree(goPath, platform, arch) {
|
||||||
|
const tuple = `${getPlatform(platform)}_${getArch(arch)}`;
|
||||||
|
debug('Creating GOPATH directory structure for %o (%s)', goPath, tuple);
|
||||||
|
return Promise.all([
|
||||||
|
mkdirp(join(goPath, 'bin')),
|
||||||
|
mkdirp(join(goPath, 'pkg', tuple)),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function get({ src } = {}) {
|
||||||
|
const args = ['get'];
|
||||||
|
if (src) {
|
||||||
|
debug('Fetching `go` dependencies for file %o', src);
|
||||||
|
args.push(src);
|
||||||
|
} else {
|
||||||
|
debug('Fetching `go` dependencies for cwd %o', this.cwd);
|
||||||
|
}
|
||||||
|
await this(...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function build({ src, dest }) {
|
||||||
|
debug('Building `go` binary %o -> %o', src, dest);
|
||||||
|
let sources;
|
||||||
|
if (Array.isArray(src)) {
|
||||||
|
sources = src;
|
||||||
|
} else {
|
||||||
|
sources = [src];
|
||||||
|
}
|
||||||
|
await this('build', '-o', dest, ...sources);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function createGo(
|
||||||
|
goPath,
|
||||||
|
platform = process.platform,
|
||||||
|
arch = process.arch,
|
||||||
|
opts = {},
|
||||||
|
goMod = false,
|
||||||
|
) {
|
||||||
|
const env = {
|
||||||
|
...process.env,
|
||||||
|
PATH: `${dirname(GO_BIN)}:${process.env.PATH}`,
|
||||||
|
GOPATH: goPath,
|
||||||
|
...opts.env,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (goMod) {
|
||||||
|
env.GO111MODULE = 'on';
|
||||||
|
}
|
||||||
|
|
||||||
|
function go(...args) {
|
||||||
|
debug('Exec %o', `go ${args.join(' ')}`);
|
||||||
|
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||||
|
}
|
||||||
|
go.cwd = opts.cwd || process.cwd();
|
||||||
|
go.get = get;
|
||||||
|
go.build = build;
|
||||||
|
go.goPath = goPath;
|
||||||
|
await createGoPathTree(goPath, platform, arch);
|
||||||
|
return go;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadGo(
|
||||||
|
dir = GO_DIR,
|
||||||
|
version = '1.12',
|
||||||
|
platform = process.platform,
|
||||||
|
arch = process.arch,
|
||||||
|
) {
|
||||||
|
debug('Installing `go` v%s to %o for %s %s', version, dir, platform, arch);
|
||||||
|
|
||||||
|
const url = getGoUrl(version, platform, arch);
|
||||||
|
debug('Downloading `go` URL: %o', url);
|
||||||
|
const res = await fetch(url);
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`Failed to download: ${url} (${res.status})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: use a zip extractor when `ext === "zip"`
|
||||||
|
await mkdirp(dir);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
res.body
|
||||||
|
.on('error', reject)
|
||||||
|
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||||
|
.on('error', reject)
|
||||||
|
.on('finish', resolve);
|
||||||
|
});
|
||||||
|
|
||||||
|
return createGo(dir, platform, arch);
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
createGo,
|
||||||
|
downloadGo,
|
||||||
|
getExportedFunctionName,
|
||||||
|
};
|
||||||
@@ -1,126 +1,198 @@
|
|||||||
const path = require('path');
|
const { join, sep, dirname } = require('path');
|
||||||
const { mkdirp, readFile, writeFile } = require('fs-extra');
|
const {
|
||||||
|
readFile, writeFile, pathExists, move,
|
||||||
|
} = require('fs-extra');
|
||||||
|
|
||||||
const execa = require('execa');
|
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js'); // eslint-disable-line import/no-extraneous-dependencies
|
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
const { createGo, getExportedFunctionName } = require('./go-helpers');
|
||||||
const downloadGit = require('lambda-git');
|
|
||||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const downloadGoBin = require('./download-go-bin');
|
|
||||||
|
|
||||||
// creates a `$GOPATH` directory tree, as per
|
const config = {
|
||||||
// `go help gopath`'s instructions.
|
|
||||||
// without this, Go won't recognize the `$GOPATH`
|
|
||||||
async function createGoPathTree(goPath) {
|
|
||||||
await mkdirp(path.join(goPath, 'bin'));
|
|
||||||
await mkdirp(path.join(goPath, 'pkg', 'linux_amd64'));
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.config = {
|
|
||||||
maxLambdaSize: '10mb',
|
maxLambdaSize: '10mb',
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.build = async ({ files, entrypoint }) => {
|
async function build({ files, entrypoint }) {
|
||||||
console.log('downloading files...');
|
console.log('Downloading user files...');
|
||||||
|
|
||||||
const gitPath = await getWritableDirectory();
|
const [goPath, outDir] = await Promise.all([
|
||||||
const goPath = await getWritableDirectory();
|
getWritableDirectory(),
|
||||||
const srcPath = path.join(goPath, 'src', 'lambda');
|
getWritableDirectory(),
|
||||||
const outDir = await getWritableDirectory();
|
]);
|
||||||
|
|
||||||
await createGoPathTree(goPath);
|
|
||||||
|
|
||||||
|
const srcPath = join(goPath, 'src', 'lambda');
|
||||||
const downloadedFiles = await download(files, srcPath);
|
const downloadedFiles = await download(files, srcPath);
|
||||||
|
|
||||||
console.log('downloading go binary...');
|
console.log(`Parsing AST for "${entrypoint}"`);
|
||||||
const goBin = await downloadGoBin();
|
let parseFunctionName;
|
||||||
|
|
||||||
console.log('downloading git binary...');
|
|
||||||
// downloads a git binary that works on Amazon Linux and sets
|
|
||||||
// `process.env.GIT_EXEC_PATH` so `go(1)` can see it
|
|
||||||
await downloadGit({ targetDirectory: gitPath });
|
|
||||||
|
|
||||||
const goEnv = {
|
|
||||||
...process.env,
|
|
||||||
GOOS: 'linux',
|
|
||||||
GOARCH: 'amd64',
|
|
||||||
GOPATH: goPath,
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(`parsing AST for "${entrypoint}"`);
|
|
||||||
let handlerFunctionName = '';
|
|
||||||
try {
|
try {
|
||||||
handlerFunctionName = await execa.stdout(
|
parseFunctionName = await getExportedFunctionName(
|
||||||
path.join(__dirname, 'bin', 'get-exported-function-name'),
|
downloadedFiles[entrypoint].fsPath,
|
||||||
[downloadedFiles[entrypoint].fsPath],
|
|
||||||
);
|
);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log(`failed to parse AST for "${entrypoint}"`);
|
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (handlerFunctionName === '') {
|
if (!parseFunctionName) {
|
||||||
const e = new Error(
|
const err = new Error(
|
||||||
`Could not find an exported function on "${entrypoint}"`,
|
`Could not find an exported function in "${entrypoint}"`,
|
||||||
);
|
);
|
||||||
console.log(e.message);
|
console.log(err.message);
|
||||||
throw e;
|
throw err;
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(
|
const handlerFunctionName = parseFunctionName.split(',')[0];
|
||||||
`Found exported function "${handlerFunctionName}" on "${entrypoint}"`,
|
|
||||||
);
|
|
||||||
|
|
||||||
const origianlMainGoContents = await readFile(
|
console.log(
|
||||||
path.join(__dirname, 'main.go'),
|
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`,
|
||||||
'utf8',
|
|
||||||
);
|
);
|
||||||
const mainGoContents = origianlMainGoContents.replace(
|
|
||||||
'__NOW_HANDLER_FUNC_NAME',
|
|
||||||
handlerFunctionName,
|
|
||||||
);
|
|
||||||
// in order to allow the user to have `main.go`, we need our `main.go` to be called something else
|
|
||||||
const mainGoFileName = 'main__now__go__.go';
|
|
||||||
|
|
||||||
// we need `main.go` in the same dir as the entrypoint,
|
// we need `main.go` in the same dir as the entrypoint,
|
||||||
// otherwise `go build` will refuse to build
|
// otherwise `go build` will refuse to build
|
||||||
const entrypointDirname = path.dirname(downloadedFiles[entrypoint].fsPath);
|
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||||
|
|
||||||
// Go doesn't like to build files in different directories,
|
// check if package name other than main
|
||||||
// so now we place `main.go` together with the user code
|
const packageName = parseFunctionName.split(',')[1];
|
||||||
await writeFile(path.join(entrypointDirname, mainGoFileName), mainGoContents);
|
const isGoModExist = await pathExists(join(entrypointDirname, 'go.mod'));
|
||||||
|
if (packageName !== 'main') {
|
||||||
console.log('installing dependencies');
|
const go = await createGo(
|
||||||
// `go get` will look at `*.go` (note we set `cwd`), parse
|
goPath,
|
||||||
// the `import`s and download any packages that aren't part of the stdlib
|
process.platform,
|
||||||
try {
|
process.arch,
|
||||||
await execa(goBin, ['get'], {
|
{
|
||||||
env: goEnv,
|
cwd: entrypointDirname,
|
||||||
cwd: entrypointDirname,
|
},
|
||||||
stdio: 'inherit',
|
true,
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to `go get`');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('running go build...');
|
|
||||||
try {
|
|
||||||
await execa(
|
|
||||||
goBin,
|
|
||||||
[
|
|
||||||
'build',
|
|
||||||
'-o',
|
|
||||||
path.join(outDir, 'handler'),
|
|
||||||
path.join(entrypointDirname, mainGoFileName),
|
|
||||||
downloadedFiles[entrypoint].fsPath,
|
|
||||||
],
|
|
||||||
{ env: goEnv, cwd: entrypointDirname, stdio: 'inherit' },
|
|
||||||
);
|
);
|
||||||
} catch (err) {
|
if (!isGoModExist) {
|
||||||
console.log('failed to `go build`');
|
try {
|
||||||
throw err;
|
const defaultGoModContent = `module ${packageName}`;
|
||||||
|
|
||||||
|
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(`failed to create default go.mod for ${packageName}`);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mainModGoFileName = 'main__mod__.go';
|
||||||
|
const modMainGoContents = await readFile(
|
||||||
|
join(__dirname, mainModGoFileName),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
|
||||||
|
let goPackageName = `${packageName}/${packageName}`;
|
||||||
|
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||||
|
|
||||||
|
if (isGoModExist) {
|
||||||
|
const goModContents = await readFile(
|
||||||
|
join(entrypointDirname, 'go.mod'),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
goPackageName = `${
|
||||||
|
goModContents.split('\n')[0].split(' ')[1]
|
||||||
|
}/${packageName}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mainModGoContents = modMainGoContents
|
||||||
|
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||||
|
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||||
|
|
||||||
|
// write main__mod__.go
|
||||||
|
await writeFile(
|
||||||
|
join(entrypointDirname, mainModGoFileName),
|
||||||
|
mainModGoContents,
|
||||||
|
);
|
||||||
|
|
||||||
|
// move user go file to folder
|
||||||
|
try {
|
||||||
|
// default path
|
||||||
|
let finalDestination = join(entrypointDirname, packageName, entrypoint);
|
||||||
|
const entrypointArr = entrypoint.split(sep);
|
||||||
|
|
||||||
|
// if `entrypoint` include folder, only use filename
|
||||||
|
if (entrypointArr.length > 1) {
|
||||||
|
finalDestination = join(
|
||||||
|
entrypointDirname,
|
||||||
|
packageName,
|
||||||
|
entrypointArr.pop(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await move(downloadedFiles[entrypoint].fsPath, finalDestination);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to move entry to package folder');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('tidy go.mod file');
|
||||||
|
try {
|
||||||
|
// ensure go.mod up-to-date
|
||||||
|
await go('mod', 'tidy');
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go mod tidy`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Running `go build`...');
|
||||||
|
const destPath = join(outDir, 'handler');
|
||||||
|
try {
|
||||||
|
const src = [join(entrypointDirname, mainModGoFileName)];
|
||||||
|
await go.build({ src, dest: destPath });
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go build`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const go = await createGo(
|
||||||
|
goPath,
|
||||||
|
process.platform,
|
||||||
|
process.arch,
|
||||||
|
{
|
||||||
|
cwd: entrypointDirname,
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
);
|
||||||
|
const origianlMainGoContents = await readFile(
|
||||||
|
join(__dirname, 'main.go'),
|
||||||
|
'utf8',
|
||||||
|
);
|
||||||
|
const mainGoContents = origianlMainGoContents.replace(
|
||||||
|
'__NOW_HANDLER_FUNC_NAME',
|
||||||
|
handlerFunctionName,
|
||||||
|
);
|
||||||
|
|
||||||
|
// in order to allow the user to have `main.go`,
|
||||||
|
// we need our `main.go` to be called something else
|
||||||
|
const mainGoFileName = 'main__now__go__.go';
|
||||||
|
|
||||||
|
// Go doesn't like to build files in different directories,
|
||||||
|
// so now we place `main.go` together with the user code
|
||||||
|
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||||
|
|
||||||
|
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||||
|
// and download any packages that aren't part of the stdlib
|
||||||
|
try {
|
||||||
|
await go.get();
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go get`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Running `go build`...');
|
||||||
|
const destPath = join(outDir, 'handler');
|
||||||
|
try {
|
||||||
|
const src = [
|
||||||
|
join(entrypointDirname, mainGoFileName),
|
||||||
|
downloadedFiles[entrypoint].fsPath,
|
||||||
|
];
|
||||||
|
await go.build({ src, dest: destPath });
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go build`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const lambda = await createLambda({
|
const lambda = await createLambda({
|
||||||
@@ -133,4 +205,6 @@ exports.build = async ({ files, entrypoint }) => {
|
|||||||
return {
|
return {
|
||||||
[entrypoint]: lambda,
|
[entrypoint]: lambda,
|
||||||
};
|
};
|
||||||
};
|
}
|
||||||
|
|
||||||
|
module.exports = { config, build };
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
now "../../utils/go/bridge"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
|
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||||
)
|
)
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
|
|||||||
12
packages/now-go/main__mod__.go
Normal file
12
packages/now-go/main__mod__.go
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"__NOW_HANDLER_PACKAGE_NAME"
|
||||||
|
|
||||||
|
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
now.Start(http.HandlerFunc(__NOW_HANDLER_FUNC_NAME))
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/go",
|
"name": "@now/go",
|
||||||
"version": "0.2.13-canary.0",
|
"version": "0.4.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@@ -8,25 +8,20 @@
|
|||||||
"directory": "packages/now-go"
|
"directory": "packages/now-go"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "best -I test/*.js",
|
"postinstall": "node ./util/install"
|
||||||
"prepublish": "./build.sh"
|
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"bin",
|
"*.js",
|
||||||
"download-go-bin.js",
|
"main.go",
|
||||||
"index.js",
|
"main__mod__.go",
|
||||||
"main.go"
|
"util"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"debug": "^4.1.1",
|
||||||
"execa": "^1.0.0",
|
"execa": "^1.0.0",
|
||||||
"fs-extra": "^7.0.0",
|
"fs-extra": "^7.0.0",
|
||||||
"lambda-git": "^0.1.2",
|
|
||||||
"mkdirp-promise": "5.0.1",
|
"mkdirp-promise": "5.0.1",
|
||||||
"node-fetch": "^2.2.1",
|
"node-fetch": "^2.2.1",
|
||||||
"tar": "4.4.6"
|
"tar": "4.4.6"
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@zeit/best": "0.4.3",
|
|
||||||
"rmfr": "2.0.0"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
13
packages/now-go/test/fixtures/01-cowsay/index.go
vendored
Normal file
13
packages/now-go/test/fixtures/01-cowsay/index.go
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package cowsay
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
say "github.com/dhruvbird/go-cowsay"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, say.Format("cow:RANDOMNESS_PLACEHOLDER"))
|
||||||
|
}
|
||||||
11
packages/now-go/test/fixtures/01-cowsay/now.json
vendored
Normal file
11
packages/now-go/test/fixtures/01-cowsay/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{ "src": "index.go", "use": "@now/go" },
|
||||||
|
{ "src": "subdirectory/index.go", "use": "@now/go" }
|
||||||
|
],
|
||||||
|
"probes": [
|
||||||
|
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||||
|
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
||||||
|
]
|
||||||
|
}
|
||||||
13
packages/now-go/test/fixtures/01-cowsay/subdirectory/index.go
vendored
Normal file
13
packages/now-go/test/fixtures/01-cowsay/subdirectory/index.go
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package subcow
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
say "github.com/dhruvbird/go-cowsay"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, say.Format("subcow:RANDOMNESS_PLACEHOLDER"))
|
||||||
|
}
|
||||||
33
packages/now-go/test/test.js
Normal file
33
packages/now-go/test/test.js
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
/* global beforeAll, expect, it, jest */
|
||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const {
|
||||||
|
packAndDeploy,
|
||||||
|
testDeployment,
|
||||||
|
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||||
|
|
||||||
|
jest.setTimeout(4 * 60 * 1000);
|
||||||
|
const buildUtilsUrl = '@canary';
|
||||||
|
let builderUrl;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const builderPath = path.resolve(__dirname, '..');
|
||||||
|
builderUrl = await packAndDeploy(builderPath);
|
||||||
|
console.log('builderUrl', builderUrl);
|
||||||
|
});
|
||||||
|
|
||||||
|
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-restricted-syntax
|
||||||
|
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||||
|
// eslint-disable-next-line no-loop-func
|
||||||
|
it(`should build ${fixture}`, async () => {
|
||||||
|
await expect(
|
||||||
|
testDeployment(
|
||||||
|
{ builderUrl, buildUtilsUrl },
|
||||||
|
path.join(fixturesPath, fixture),
|
||||||
|
),
|
||||||
|
).resolves.toBeDefined();
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -34,7 +34,7 @@ func main() {
|
|||||||
if fn.Name.IsExported() == true {
|
if fn.Name.IsExported() == true {
|
||||||
// we found the first exported function
|
// we found the first exported function
|
||||||
// we're done!
|
// we're done!
|
||||||
fmt.Print(fn.Name.Name)
|
fmt.Print(fn.Name.Name, ",", parsed.Name.Name)
|
||||||
os.Exit(0)
|
os.Exit(0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
18
packages/now-go/util/install.js
Normal file
18
packages/now-go/util/install.js
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
const { join } = require('path');
|
||||||
|
const { downloadGo } = require('../go-helpers');
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
// First download the `go` binary for this platform/arch.
|
||||||
|
const go = await downloadGo();
|
||||||
|
|
||||||
|
// Build the `get-exported-function-name` helper program.
|
||||||
|
// `go get` is not necessary because the program has no external deps.
|
||||||
|
const src = join(__dirname, 'get-exported-function-name.go');
|
||||||
|
const dest = join(__dirname, '../get-exported-function-name');
|
||||||
|
await go.build({ src, dest });
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -11,7 +11,6 @@ const defaultOptions = {
|
|||||||
removeRedundantAttributes: true,
|
removeRedundantAttributes: true,
|
||||||
useShortDoctype: true,
|
useShortDoctype: true,
|
||||||
collapseWhitespace: true,
|
collapseWhitespace: true,
|
||||||
collapseInlineTagWhitespace: true,
|
|
||||||
collapseBooleanAttributes: true,
|
collapseBooleanAttributes: true,
|
||||||
caseSensitive: true,
|
caseSensitive: true,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/html-minifier",
|
"name": "@now/html-minifier",
|
||||||
"version": "1.0.8-canary.0",
|
"version": "1.1.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/lambda",
|
"name": "@now/lambda",
|
||||||
"version": "0.4.10-canary.0",
|
"version": "0.5.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ const {
|
|||||||
testDeployment,
|
testDeployment,
|
||||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||||
|
|
||||||
jest.setTimeout(2 * 60 * 1000);
|
jest.setTimeout(4 * 60 * 1000);
|
||||||
const buildUtilsUrl = '@canary';
|
const buildUtilsUrl = '@canary';
|
||||||
let builderUrl;
|
let builderUrl;
|
||||||
|
|
||||||
|
|||||||
@@ -34,8 +34,6 @@ exports.build = async ({ files, entrypoint, config }) => {
|
|||||||
stream: stream.pipe(unifiedStream(processor)),
|
stream: stream.pipe(unifiedStream(processor)),
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(result.data.toString());
|
|
||||||
|
|
||||||
const replacedEntrypoint = entrypoint.replace(/\.[^.]+$/, '.html');
|
const replacedEntrypoint = entrypoint.replace(/\.[^.]+$/, '.html');
|
||||||
|
|
||||||
return { [replacedEntrypoint]: result };
|
return { [replacedEntrypoint]: result };
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/md",
|
"name": "@now/md",
|
||||||
"version": "0.4.10-canary.0",
|
"version": "0.5.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ const {
|
|||||||
testDeployment,
|
testDeployment,
|
||||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||||
|
|
||||||
jest.setTimeout(2 * 60 * 1000);
|
jest.setTimeout(4 * 60 * 1000);
|
||||||
const buildUtilsUrl = '@canary';
|
const buildUtilsUrl = '@canary';
|
||||||
let builderUrl;
|
let builderUrl;
|
||||||
|
|
||||||
|
|||||||
@@ -43,17 +43,8 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
|||||||
return glob('**', outDir, mountpoint);
|
return glob('**', outDir, mountpoint);
|
||||||
};
|
};
|
||||||
|
|
||||||
exports.prepareCache = async ({ cachePath }) => {
|
exports.prepareCache = async ({ workPath }) => ({
|
||||||
console.log('writing package.json...');
|
...(await glob('node_modules/**', workPath)),
|
||||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.15' } };
|
...(await glob('package-lock.json', workPath)),
|
||||||
const packageJsonPath = path.join(cachePath, 'package.json');
|
...(await glob('yarn.lock', workPath)),
|
||||||
await writeFile(packageJsonPath, JSON.stringify(packageJson));
|
});
|
||||||
console.log('running npm install...');
|
|
||||||
await runNpmInstall(path.dirname(packageJsonPath), ['--prod']);
|
|
||||||
|
|
||||||
return {
|
|
||||||
...(await glob('node_modules/**', cachePath)),
|
|
||||||
...(await glob('package-lock.json', cachePath)),
|
|
||||||
...(await glob('yarn.lock', cachePath)),
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/mdx-deck",
|
"name": "@now/mdx-deck",
|
||||||
"version": "0.4.19-canary.0",
|
"version": "0.5.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ const {
|
|||||||
testDeployment,
|
testDeployment,
|
||||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||||
|
|
||||||
jest.setTimeout(2 * 60 * 1000);
|
jest.setTimeout(4 * 60 * 1000);
|
||||||
const buildUtilsUrl = '@canary';
|
const buildUtilsUrl = '@canary';
|
||||||
let builderUrl;
|
let builderUrl;
|
||||||
|
|
||||||
|
|||||||
2
packages/now-next/.gitignore
vendored
Normal file
2
packages/now-next/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
/dist
|
||||||
|
/src/now__bridge.d.ts
|
||||||
11
packages/now-next/getBridgeTypes.sh
Executable file
11
packages/now-next/getBridgeTypes.sh
Executable file
@@ -0,0 +1,11 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
bridge_entrypoint="$(node -p 'require.resolve("@now/node-bridge")')"
|
||||||
|
bridge_defs="$(dirname "$bridge_entrypoint")/bridge.d.ts"
|
||||||
|
|
||||||
|
if [ ! -e "$bridge_defs" ]; then
|
||||||
|
yarn install --cwd "$bridge_entrypoint"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cp -v "$bridge_defs" src/now__bridge.d.ts
|
||||||
@@ -1,336 +0,0 @@
|
|||||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const FileBlob = require('@now/build-utils/file-blob'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const path = require('path');
|
|
||||||
const { readFile, writeFile, unlink } = require('fs.promised');
|
|
||||||
const {
|
|
||||||
runNpmInstall,
|
|
||||||
runPackageJsonScript,
|
|
||||||
} = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const semver = require('semver');
|
|
||||||
const nextLegacyVersions = require('./legacy-versions');
|
|
||||||
const {
|
|
||||||
excludeFiles,
|
|
||||||
validateEntrypoint,
|
|
||||||
includeOnlyEntryDirectory,
|
|
||||||
moveEntryDirectoryToRoot,
|
|
||||||
normalizePackageJson,
|
|
||||||
excludeStaticDirectory,
|
|
||||||
onlyStaticDirectory,
|
|
||||||
} = require('./utils');
|
|
||||||
|
|
||||||
/** @typedef { import('@now/build-utils/file-ref').Files } Files */
|
|
||||||
/** @typedef { import('@now/build-utils/fs/download').DownloadedFiles } DownloadedFiles */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @typedef {Object} BuildParamsType
|
|
||||||
* @property {Files} files - Files object
|
|
||||||
* @property {string} entrypoint - Entrypoint specified for the builder
|
|
||||||
* @property {string} workPath - Working directory for this build
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Read package.json from files
|
|
||||||
* @param {DownloadedFiles} files
|
|
||||||
*/
|
|
||||||
async function readPackageJson(files) {
|
|
||||||
if (!files['package.json']) {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
const packageJsonPath = files['package.json'].fsPath;
|
|
||||||
return JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write package.json
|
|
||||||
* @param {string} workPath
|
|
||||||
* @param {Object} packageJson
|
|
||||||
*/
|
|
||||||
async function writePackageJson(workPath, packageJson) {
|
|
||||||
await writeFile(
|
|
||||||
path.join(workPath, 'package.json'),
|
|
||||||
JSON.stringify(packageJson, null, 2),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Write .npmrc with npm auth token
|
|
||||||
* @param {string} workPath
|
|
||||||
* @param {string} token
|
|
||||||
*/
|
|
||||||
async function writeNpmRc(workPath, token) {
|
|
||||||
await writeFile(
|
|
||||||
path.join(workPath, '.npmrc'),
|
|
||||||
`//registry.npmjs.org/:_authToken=${token}`,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.config = {
|
|
||||||
maxLambdaSize: '5mb',
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {BuildParamsType} buildParams
|
|
||||||
* @returns {Promise<Files>}
|
|
||||||
*/
|
|
||||||
exports.build = async ({ files, workPath, entrypoint }) => {
|
|
||||||
validateEntrypoint(entrypoint);
|
|
||||||
|
|
||||||
console.log('downloading user files...');
|
|
||||||
const entryDirectory = path.dirname(entrypoint);
|
|
||||||
const filesOnlyEntryDirectory = includeOnlyEntryDirectory(
|
|
||||||
files,
|
|
||||||
entryDirectory,
|
|
||||||
);
|
|
||||||
const filesWithEntryDirectoryRoot = moveEntryDirectoryToRoot(
|
|
||||||
filesOnlyEntryDirectory,
|
|
||||||
entryDirectory,
|
|
||||||
);
|
|
||||||
const filesWithoutStaticDirectory = excludeStaticDirectory(
|
|
||||||
filesWithEntryDirectoryRoot,
|
|
||||||
);
|
|
||||||
const downloadedFiles = await download(filesWithoutStaticDirectory, workPath);
|
|
||||||
|
|
||||||
const pkg = await readPackageJson(downloadedFiles);
|
|
||||||
|
|
||||||
let nextVersion;
|
|
||||||
if (pkg.dependencies && pkg.dependencies.next) {
|
|
||||||
nextVersion = pkg.dependencies.next;
|
|
||||||
} else if (pkg.devDependencies && pkg.devDependencies.next) {
|
|
||||||
nextVersion = pkg.devDependencies.next;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!nextVersion) {
|
|
||||||
throw new Error(
|
|
||||||
'No Next.js version could be detected in "package.json". Make sure `"next"` is installed in "dependencies" or "devDependencies"',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const isLegacy = (() => {
|
|
||||||
// If version is using the dist-tag instead of a version range
|
|
||||||
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// If the version is an exact match with the legacy versions
|
|
||||||
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
|
||||||
// When the version can't be matched with legacy versions, so it must be a newer version
|
|
||||||
if (maxSatisfying === null) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
})();
|
|
||||||
|
|
||||||
console.log(`MODE: ${isLegacy ? 'legacy' : 'serverless'}`);
|
|
||||||
|
|
||||||
if (isLegacy) {
|
|
||||||
try {
|
|
||||||
await unlink(path.join(workPath, 'yarn.lock'));
|
|
||||||
} catch (err) {
|
|
||||||
console.log('no yarn.lock removed');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
await unlink(path.join(workPath, 'package-lock.json'));
|
|
||||||
} catch (err) {
|
|
||||||
console.log('no package-lock.json removed');
|
|
||||||
}
|
|
||||||
|
|
||||||
console.warn(
|
|
||||||
"WARNING: your application is being deployed in @now/next's legacy mode. http://err.sh/zeit/now-builders/now-next-legacy-mode",
|
|
||||||
);
|
|
||||||
|
|
||||||
console.log('normalizing package.json');
|
|
||||||
const packageJson = normalizePackageJson(pkg);
|
|
||||||
console.log('normalized package.json result: ', packageJson);
|
|
||||||
await writePackageJson(workPath, packageJson);
|
|
||||||
} else if (!pkg.scripts || !pkg.scripts['now-build']) {
|
|
||||||
console.warn(
|
|
||||||
'WARNING: "now-build" script not found. Adding \'"now-build": "next build"\' to "package.json" automatically',
|
|
||||||
);
|
|
||||||
pkg.scripts = {
|
|
||||||
'now-build': 'next build',
|
|
||||||
...(pkg.scripts || {}),
|
|
||||||
};
|
|
||||||
console.log('normalized package.json result: ', pkg);
|
|
||||||
await writePackageJson(workPath, pkg);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.env.NPM_AUTH_TOKEN) {
|
|
||||||
console.log('found NPM_AUTH_TOKEN in environment, creating .npmrc');
|
|
||||||
await writeNpmRc(workPath, process.env.NPM_AUTH_TOKEN);
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('installing dependencies...');
|
|
||||||
await runNpmInstall(workPath, ['--prefer-offline']);
|
|
||||||
console.log('running user script...');
|
|
||||||
await runPackageJsonScript(workPath, 'now-build');
|
|
||||||
|
|
||||||
if (isLegacy) {
|
|
||||||
console.log('running npm install --production...');
|
|
||||||
await runNpmInstall(workPath, ['--prefer-offline', '--production']);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.env.NPM_AUTH_TOKEN) {
|
|
||||||
await unlink(path.join(workPath, '.npmrc'));
|
|
||||||
}
|
|
||||||
|
|
||||||
const lambdas = {};
|
|
||||||
|
|
||||||
if (isLegacy) {
|
|
||||||
const filesAfterBuild = await glob('**', workPath);
|
|
||||||
|
|
||||||
console.log('preparing lambda files...');
|
|
||||||
let buildId;
|
|
||||||
try {
|
|
||||||
buildId = await readFile(
|
|
||||||
path.join(workPath, '.next', 'BUILD_ID'),
|
|
||||||
'utf8',
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
console.error(
|
|
||||||
'BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"',
|
|
||||||
);
|
|
||||||
throw new Error('Missing BUILD_ID');
|
|
||||||
}
|
|
||||||
const dotNextRootFiles = await glob('.next/*', workPath);
|
|
||||||
const dotNextServerRootFiles = await glob('.next/server/*', workPath);
|
|
||||||
const nodeModules = excludeFiles(
|
|
||||||
await glob('node_modules/**', workPath),
|
|
||||||
file => file.startsWith('node_modules/.cache'),
|
|
||||||
);
|
|
||||||
const launcherFiles = {
|
|
||||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
|
||||||
};
|
|
||||||
const nextFiles = {
|
|
||||||
...nodeModules,
|
|
||||||
...dotNextRootFiles,
|
|
||||||
...dotNextServerRootFiles,
|
|
||||||
...launcherFiles,
|
|
||||||
};
|
|
||||||
if (filesAfterBuild['next.config.js']) {
|
|
||||||
nextFiles['next.config.js'] = filesAfterBuild['next.config.js'];
|
|
||||||
}
|
|
||||||
const pages = await glob(
|
|
||||||
'**/*.js',
|
|
||||||
path.join(workPath, '.next', 'server', 'static', buildId, 'pages'),
|
|
||||||
);
|
|
||||||
const launcherPath = path.join(__dirname, 'legacy-launcher.js');
|
|
||||||
const launcherData = await readFile(launcherPath, 'utf8');
|
|
||||||
|
|
||||||
await Promise.all(
|
|
||||||
Object.keys(pages).map(async (page) => {
|
|
||||||
// These default pages don't have to be handled as they'd always 404
|
|
||||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const pathname = page.replace(/\.js$/, '');
|
|
||||||
const launcher = launcherData.replace(
|
|
||||||
'PATHNAME_PLACEHOLDER',
|
|
||||||
`/${pathname.replace(/(^|\/)index$/, '')}`,
|
|
||||||
);
|
|
||||||
|
|
||||||
const pageFiles = {
|
|
||||||
[`.next/server/static/${buildId}/pages/_document.js`]: filesAfterBuild[
|
|
||||||
`.next/server/static/${buildId}/pages/_document.js`
|
|
||||||
],
|
|
||||||
[`.next/server/static/${buildId}/pages/_app.js`]: filesAfterBuild[
|
|
||||||
`.next/server/static/${buildId}/pages/_app.js`
|
|
||||||
],
|
|
||||||
[`.next/server/static/${buildId}/pages/_error.js`]: filesAfterBuild[
|
|
||||||
`.next/server/static/${buildId}/pages/_error.js`
|
|
||||||
],
|
|
||||||
[`.next/server/static/${buildId}/pages/${page}`]: filesAfterBuild[
|
|
||||||
`.next/server/static/${buildId}/pages/${page}`
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(`Creating lambda for page: "${page}"...`);
|
|
||||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
|
||||||
files: {
|
|
||||||
...nextFiles,
|
|
||||||
...pageFiles,
|
|
||||||
'now__launcher.js': new FileBlob({ data: launcher }),
|
|
||||||
},
|
|
||||||
handler: 'now__launcher.launcher',
|
|
||||||
runtime: 'nodejs8.10',
|
|
||||||
});
|
|
||||||
console.log(`Created lambda for page: "${page}"`);
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
console.log('preparing lambda files...');
|
|
||||||
const launcherFiles = {
|
|
||||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
|
||||||
'now__launcher.js': new FileFsRef({
|
|
||||||
fsPath: path.join(__dirname, 'launcher.js'),
|
|
||||||
}),
|
|
||||||
};
|
|
||||||
const pages = await glob(
|
|
||||||
'**/*.js',
|
|
||||||
path.join(workPath, '.next', 'serverless', 'pages'),
|
|
||||||
);
|
|
||||||
|
|
||||||
const pageKeys = Object.keys(pages);
|
|
||||||
|
|
||||||
if (pageKeys.length === 0) {
|
|
||||||
throw new Error(
|
|
||||||
'No serverless pages were built. https://err.sh/zeit/now-builders/now-next-no-serverless-pages-built',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await Promise.all(
|
|
||||||
pageKeys.map(async (page) => {
|
|
||||||
// These default pages don't have to be handled as they'd always 404
|
|
||||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const pathname = page.replace(/\.js$/, '');
|
|
||||||
|
|
||||||
console.log(`Creating lambda for page: "${page}"...`);
|
|
||||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
|
||||||
files: {
|
|
||||||
...launcherFiles,
|
|
||||||
'page.js': pages[page],
|
|
||||||
},
|
|
||||||
handler: 'now__launcher.launcher',
|
|
||||||
runtime: 'nodejs8.10',
|
|
||||||
});
|
|
||||||
console.log(`Created lambda for page: "${page}"`);
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const nextStaticFiles = await glob(
|
|
||||||
'**',
|
|
||||||
path.join(workPath, '.next', 'static'),
|
|
||||||
);
|
|
||||||
const staticFiles = Object.keys(nextStaticFiles).reduce(
|
|
||||||
(mappedFiles, file) => ({
|
|
||||||
...mappedFiles,
|
|
||||||
[path.join(entryDirectory, `_next/static/${file}`)]: nextStaticFiles[file],
|
|
||||||
}),
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
|
|
||||||
const nextStaticDirectory = onlyStaticDirectory(filesWithEntryDirectoryRoot);
|
|
||||||
const staticDirectoryFiles = Object.keys(nextStaticDirectory).reduce(
|
|
||||||
(mappedFiles, file) => ({
|
|
||||||
...mappedFiles,
|
|
||||||
[path.join(entryDirectory, file)]: nextStaticDirectory[file],
|
|
||||||
}),
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
|
|
||||||
return { ...lambdas, ...staticFiles, ...staticDirectoryFiles };
|
|
||||||
};
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
process.env.NODE_ENV = 'production';
|
|
||||||
|
|
||||||
const { Server } = require('http');
|
|
||||||
const { Bridge } = require('./now__bridge.js');
|
|
||||||
const page = require('./page.js');
|
|
||||||
|
|
||||||
const bridge = new Bridge();
|
|
||||||
bridge.port = 3000;
|
|
||||||
|
|
||||||
const server = new Server(page.render);
|
|
||||||
server.listen(bridge.port);
|
|
||||||
|
|
||||||
exports.launcher = bridge.launcher;
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
const { Server } = require('http');
|
|
||||||
const next = require('next-server');
|
|
||||||
const url = require('url');
|
|
||||||
const { Bridge } = require('./now__bridge.js');
|
|
||||||
|
|
||||||
const bridge = new Bridge();
|
|
||||||
bridge.port = 3000;
|
|
||||||
|
|
||||||
process.env.NODE_ENV = 'production';
|
|
||||||
|
|
||||||
const app = next({});
|
|
||||||
|
|
||||||
const server = new Server((req, res) => {
|
|
||||||
const parsedUrl = url.parse(req.url, true);
|
|
||||||
app.render(req, res, 'PATHNAME_PLACEHOLDER', parsedUrl.query, parsedUrl);
|
|
||||||
});
|
|
||||||
server.listen(bridge.port);
|
|
||||||
|
|
||||||
exports.launcher = bridge.launcher;
|
|
||||||
@@ -1,16 +1,33 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/next",
|
"name": "@now/next",
|
||||||
"version": "0.0.85-canary.1",
|
"version": "0.2.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"main": "./dist/index",
|
||||||
|
"scripts": {
|
||||||
|
"build": "./getBridgeTypes.sh && tsc",
|
||||||
|
"test": "npm run build && jest",
|
||||||
|
"prepublish": "yarn run build"
|
||||||
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/zeit/now-builders.git",
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
"directory": "packages/now-next"
|
"directory": "packages/now-next"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@now/node-bridge": "0.1.4",
|
"@now/node-bridge": "^1.1.0",
|
||||||
"execa": "^1.0.0",
|
"fs-extra": "^7.0.0",
|
||||||
"fs.promised": "^3.0.0",
|
"get-port": "^5.0.0",
|
||||||
|
"resolve-from": "^5.0.0",
|
||||||
"semver": "^5.6.0"
|
"semver": "^5.6.0"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/next-server": "^8.0.0",
|
||||||
|
"@types/resolve-from": "^5.0.1",
|
||||||
|
"@types/semver": "^6.0.0",
|
||||||
|
"jest": "^24.7.1",
|
||||||
|
"typescript": "^3.4.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
41
packages/now-next/src/dev-server.ts
Normal file
41
packages/now-next/src/dev-server.ts
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import resolveFrom from 'resolve-from';
|
||||||
|
import { parse } from 'url';
|
||||||
|
import getPort from 'get-port';
|
||||||
|
import { createServer } from 'http';
|
||||||
|
|
||||||
|
export interface ProcessEnv {
|
||||||
|
[key: string]: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(env: ProcessEnv, cwd: string) {
|
||||||
|
const next = require(resolveFrom(cwd, 'next'));
|
||||||
|
const app = next({ dev: true, dir: cwd });
|
||||||
|
const handler = app.getRequestHandler();
|
||||||
|
|
||||||
|
const openPort = await getPort({
|
||||||
|
port: [ 5000, 4000 ]
|
||||||
|
});
|
||||||
|
|
||||||
|
const url = `http://localhost:${openPort}`;
|
||||||
|
|
||||||
|
// Prepare for incoming requests
|
||||||
|
await app.prepare();
|
||||||
|
|
||||||
|
createServer((req, res) => {
|
||||||
|
const parsedUrl = parse(req.url || '', true);
|
||||||
|
handler(req, res, parsedUrl);
|
||||||
|
}).listen(openPort, (error: NodeJS.ErrnoException) => {
|
||||||
|
if (error) {
|
||||||
|
console.error(error);
|
||||||
|
process.exit(1);
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.send) {
|
||||||
|
process.send(url);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
main(process.env as ProcessEnv, process.cwd());
|
||||||
478
packages/now-next/src/index.ts
Normal file
478
packages/now-next/src/index.ts
Normal file
@@ -0,0 +1,478 @@
|
|||||||
|
import { ChildProcess, fork, SpawnOptions } from 'child_process';
|
||||||
|
import {
|
||||||
|
pathExists,
|
||||||
|
readFile,
|
||||||
|
unlink as unlinkFile,
|
||||||
|
writeFile,
|
||||||
|
} from 'fs-extra';
|
||||||
|
import os from 'os';
|
||||||
|
import path from 'path';
|
||||||
|
import semver from 'semver';
|
||||||
|
|
||||||
|
import {
|
||||||
|
BuildOptions,
|
||||||
|
createLambda,
|
||||||
|
download,
|
||||||
|
FileBlob,
|
||||||
|
FileFsRef,
|
||||||
|
Files,
|
||||||
|
glob,
|
||||||
|
Lambda,
|
||||||
|
PrepareCacheOptions,
|
||||||
|
runNpmInstall,
|
||||||
|
runPackageJsonScript,
|
||||||
|
} from '@now/build-utils';
|
||||||
|
|
||||||
|
import nextLegacyVersions from './legacy-versions';
|
||||||
|
import {
|
||||||
|
excludeFiles,
|
||||||
|
getNextConfig,
|
||||||
|
getPathsInside,
|
||||||
|
getRoutes,
|
||||||
|
includeOnlyEntryDirectory,
|
||||||
|
normalizePackageJson,
|
||||||
|
onlyStaticDirectory,
|
||||||
|
stringMap,
|
||||||
|
validateEntrypoint,
|
||||||
|
} from './utils';
|
||||||
|
|
||||||
|
interface BuildParamsMeta {
|
||||||
|
isDev: boolean | undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BuildParamsType extends BuildOptions {
|
||||||
|
files: Files;
|
||||||
|
entrypoint: string;
|
||||||
|
workPath: string;
|
||||||
|
meta: BuildParamsMeta;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const version = 2;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read package.json from files
|
||||||
|
*/
|
||||||
|
async function readPackageJson(entryPath: string) {
|
||||||
|
const packagePath = path.join(entryPath, 'package.json');
|
||||||
|
|
||||||
|
try {
|
||||||
|
return JSON.parse(await readFile(packagePath, 'utf8'));
|
||||||
|
} catch (err) {
|
||||||
|
console.log('package.json not found in entry');
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write package.json
|
||||||
|
*/
|
||||||
|
async function writePackageJson(workPath: string, packageJson: Object) {
|
||||||
|
await writeFile(
|
||||||
|
path.join(workPath, 'package.json'),
|
||||||
|
JSON.stringify(packageJson, null, 2)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write .npmrc with npm auth token
|
||||||
|
*/
|
||||||
|
async function writeNpmRc(workPath: string, token: string) {
|
||||||
|
await writeFile(
|
||||||
|
path.join(workPath, '.npmrc'),
|
||||||
|
`//registry.npmjs.org/:_authToken=${token}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getNextVersion(packageJson: {
|
||||||
|
dependencies?: { [key: string]: string };
|
||||||
|
devDependencies?: { [key: string]: string };
|
||||||
|
}) {
|
||||||
|
let nextVersion;
|
||||||
|
if (packageJson.dependencies && packageJson.dependencies.next) {
|
||||||
|
nextVersion = packageJson.dependencies.next;
|
||||||
|
} else if (packageJson.devDependencies && packageJson.devDependencies.next) {
|
||||||
|
nextVersion = packageJson.devDependencies.next;
|
||||||
|
}
|
||||||
|
return nextVersion;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isLegacyNext(nextVersion: string) {
|
||||||
|
// If version is using the dist-tag instead of a version range
|
||||||
|
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the version is an exact match with the legacy versions
|
||||||
|
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
||||||
|
// When the version can't be matched with legacy versions, so it must be a newer version
|
||||||
|
if (maxSatisfying === null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const name = '[@now/next]';
|
||||||
|
const urls: stringMap = {};
|
||||||
|
|
||||||
|
function startDevServer(entryPath: string) {
|
||||||
|
const forked = fork(path.join(__dirname, 'dev-server.js'), [], {
|
||||||
|
cwd: entryPath,
|
||||||
|
execArgv: [],
|
||||||
|
env: {
|
||||||
|
NOW_REGION: 'dev1',
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
const getUrl = () =>
|
||||||
|
new Promise<string>((resolve, reject) => {
|
||||||
|
forked.on('message', resolve);
|
||||||
|
forked.on('error', reject);
|
||||||
|
});
|
||||||
|
|
||||||
|
return { forked, getUrl };
|
||||||
|
}
|
||||||
|
|
||||||
|
export const config = {
|
||||||
|
maxLambdaSize: '5mb',
|
||||||
|
};
|
||||||
|
|
||||||
|
export const build = async ({
|
||||||
|
files,
|
||||||
|
workPath,
|
||||||
|
entrypoint,
|
||||||
|
meta = {} as BuildParamsMeta,
|
||||||
|
}: BuildParamsType): Promise<{
|
||||||
|
routes?: any[];
|
||||||
|
output: Files;
|
||||||
|
watch?: string[];
|
||||||
|
childProcesses: ChildProcess[];
|
||||||
|
}> => {
|
||||||
|
validateEntrypoint(entrypoint);
|
||||||
|
|
||||||
|
const routes: any[] = [];
|
||||||
|
const entryDirectory = path.dirname(entrypoint);
|
||||||
|
const entryPath = path.join(workPath, entryDirectory);
|
||||||
|
const dotNext = path.join(entryPath, '.next');
|
||||||
|
|
||||||
|
console.log(`${name} Downloading user files...`);
|
||||||
|
await download(files, workPath, meta);
|
||||||
|
|
||||||
|
const pkg = await readPackageJson(entryPath);
|
||||||
|
const nextVersion = getNextVersion(pkg);
|
||||||
|
|
||||||
|
if (!nextVersion) {
|
||||||
|
throw new Error(
|
||||||
|
'No Next.js version could be detected in "package.json". Make sure `"next"` is installed in "dependencies" or "devDependencies"'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.env.__NEXT_BUILDER_EXPERIMENTAL_TARGET = 'serverless';
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
// eslint-disable-next-line no-underscore-dangle
|
||||||
|
process.env.__NEXT_BUILDER_EXPERIMENTAL_DEBUG = 'true';
|
||||||
|
let childProcess: ChildProcess | undefined;
|
||||||
|
|
||||||
|
// If this is the initial build, we want to start the server
|
||||||
|
if (!urls[entrypoint]) {
|
||||||
|
console.log(`${name} Installing dependencies...`);
|
||||||
|
await runNpmInstall(entryPath, ['--prefer-offline']);
|
||||||
|
const { forked, getUrl } = startDevServer(entryPath);
|
||||||
|
urls[entrypoint] = await getUrl();
|
||||||
|
childProcess = forked;
|
||||||
|
console.log(
|
||||||
|
`${name} Development server for ${entrypoint} running at ${
|
||||||
|
urls[entrypoint]
|
||||||
|
}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathsInside = getPathsInside(entryDirectory, files);
|
||||||
|
|
||||||
|
return {
|
||||||
|
output: {},
|
||||||
|
routes: getRoutes(entryDirectory, pathsInside, files, urls[entrypoint]),
|
||||||
|
watch: pathsInside,
|
||||||
|
childProcesses: childProcess ? [childProcess] : [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await pathExists(dotNext)) {
|
||||||
|
console.warn(
|
||||||
|
'WARNING: You should not upload the `.next` directory. See https://zeit.co/docs/v2/deployments/official-builders/next-js-now-next/ for more details.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const isLegacy = isLegacyNext(nextVersion);
|
||||||
|
|
||||||
|
console.log(`MODE: ${isLegacy ? 'legacy' : 'serverless'}`);
|
||||||
|
|
||||||
|
if (isLegacy) {
|
||||||
|
try {
|
||||||
|
await unlinkFile(path.join(entryPath, 'yarn.lock'));
|
||||||
|
} catch (err) {
|
||||||
|
console.log('no yarn.lock removed');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
await unlinkFile(path.join(entryPath, 'package-lock.json'));
|
||||||
|
} catch (err) {
|
||||||
|
console.log('no package-lock.json removed');
|
||||||
|
}
|
||||||
|
|
||||||
|
console.warn(
|
||||||
|
"WARNING: your application is being deployed in @now/next's legacy mode. http://err.sh/zeit/now-builders/now-next-legacy-mode"
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log('normalizing package.json');
|
||||||
|
const packageJson = normalizePackageJson(pkg);
|
||||||
|
console.log('normalized package.json result: ', packageJson);
|
||||||
|
await writePackageJson(entryPath, packageJson);
|
||||||
|
} else if (!pkg.scripts || !pkg.scripts['now-build']) {
|
||||||
|
console.warn(
|
||||||
|
'WARNING: "now-build" script not found. Adding \'"now-build": "next build"\' to "package.json" automatically'
|
||||||
|
);
|
||||||
|
pkg.scripts = {
|
||||||
|
'now-build': 'next build',
|
||||||
|
...(pkg.scripts || {}),
|
||||||
|
};
|
||||||
|
console.log('normalized package.json result: ', pkg);
|
||||||
|
await writePackageJson(entryPath, pkg);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.NPM_AUTH_TOKEN) {
|
||||||
|
console.log('found NPM_AUTH_TOKEN in environment, creating .npmrc');
|
||||||
|
await writeNpmRc(entryPath, process.env.NPM_AUTH_TOKEN);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('installing dependencies...');
|
||||||
|
await runNpmInstall(entryPath, ['--prefer-offline']);
|
||||||
|
|
||||||
|
console.log('running user script...');
|
||||||
|
const memoryToConsume = Math.floor(os.totalmem() / 1024 ** 2) - 128;
|
||||||
|
await runPackageJsonScript(entryPath, 'now-build', {
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
NODE_OPTIONS: `--max_old_space_size=${memoryToConsume}`,
|
||||||
|
},
|
||||||
|
} as SpawnOptions);
|
||||||
|
|
||||||
|
if (isLegacy) {
|
||||||
|
console.log('running npm install --production...');
|
||||||
|
await runNpmInstall(entryPath, ['--prefer-offline', '--production']);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.NPM_AUTH_TOKEN) {
|
||||||
|
await unlinkFile(path.join(entryPath, '.npmrc'));
|
||||||
|
}
|
||||||
|
|
||||||
|
const lambdas: { [key: string]: Lambda } = {};
|
||||||
|
|
||||||
|
if (isLegacy) {
|
||||||
|
const filesAfterBuild = await glob('**', entryPath);
|
||||||
|
|
||||||
|
console.log('preparing lambda files...');
|
||||||
|
let buildId: string;
|
||||||
|
try {
|
||||||
|
buildId = await readFile(
|
||||||
|
path.join(entryPath, '.next', 'BUILD_ID'),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
console.error(
|
||||||
|
'BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"'
|
||||||
|
);
|
||||||
|
throw new Error('Missing BUILD_ID');
|
||||||
|
}
|
||||||
|
const dotNextRootFiles = await glob('.next/*', entryPath);
|
||||||
|
const dotNextServerRootFiles = await glob('.next/server/*', entryPath);
|
||||||
|
const nodeModules = excludeFiles(
|
||||||
|
await glob('node_modules/**', entryPath),
|
||||||
|
file => file.startsWith('node_modules/.cache')
|
||||||
|
);
|
||||||
|
const launcherFiles = {
|
||||||
|
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||||
|
};
|
||||||
|
const nextFiles: { [key: string]: FileFsRef } = {
|
||||||
|
...nodeModules,
|
||||||
|
...dotNextRootFiles,
|
||||||
|
...dotNextServerRootFiles,
|
||||||
|
...launcherFiles,
|
||||||
|
};
|
||||||
|
if (filesAfterBuild['next.config.js']) {
|
||||||
|
nextFiles['next.config.js'] = filesAfterBuild['next.config.js'];
|
||||||
|
}
|
||||||
|
const pages = await glob(
|
||||||
|
'**/*.js',
|
||||||
|
path.join(entryPath, '.next', 'server', 'static', buildId, 'pages')
|
||||||
|
);
|
||||||
|
const launcherPath = path.join(__dirname, 'legacy-launcher.js');
|
||||||
|
const launcherData = await readFile(launcherPath, 'utf8');
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
Object.keys(pages).map(async page => {
|
||||||
|
// These default pages don't have to be handled as they'd always 404
|
||||||
|
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathname = page.replace(/\.js$/, '');
|
||||||
|
const launcher = launcherData.replace(
|
||||||
|
'PATHNAME_PLACEHOLDER',
|
||||||
|
`/${pathname.replace(/(^|\/)index$/, '')}`
|
||||||
|
);
|
||||||
|
|
||||||
|
const pageFiles = {
|
||||||
|
[`.next/server/static/${buildId}/pages/_document.js`]: filesAfterBuild[
|
||||||
|
`.next/server/static/${buildId}/pages/_document.js`
|
||||||
|
],
|
||||||
|
[`.next/server/static/${buildId}/pages/_app.js`]: filesAfterBuild[
|
||||||
|
`.next/server/static/${buildId}/pages/_app.js`
|
||||||
|
],
|
||||||
|
[`.next/server/static/${buildId}/pages/_error.js`]: filesAfterBuild[
|
||||||
|
`.next/server/static/${buildId}/pages/_error.js`
|
||||||
|
],
|
||||||
|
[`.next/server/static/${buildId}/pages/${page}`]: filesAfterBuild[
|
||||||
|
`.next/server/static/${buildId}/pages/${page}`
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
console.log(`Creating lambda for page: "${page}"...`);
|
||||||
|
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||||
|
files: {
|
||||||
|
...nextFiles,
|
||||||
|
...pageFiles,
|
||||||
|
'now__launcher.js': new FileBlob({ data: launcher }),
|
||||||
|
},
|
||||||
|
handler: 'now__launcher.launcher',
|
||||||
|
runtime: 'nodejs8.10',
|
||||||
|
});
|
||||||
|
console.log(`Created lambda for page: "${page}"`);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log('preparing lambda files...');
|
||||||
|
const launcherFiles = {
|
||||||
|
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||||
|
'now__launcher.js': new FileFsRef({
|
||||||
|
fsPath: path.join(__dirname, 'launcher.js'),
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
const pages = await glob(
|
||||||
|
'**/*.js',
|
||||||
|
path.join(entryPath, '.next', 'serverless', 'pages')
|
||||||
|
);
|
||||||
|
|
||||||
|
const pageKeys = Object.keys(pages);
|
||||||
|
|
||||||
|
if (pageKeys.length === 0) {
|
||||||
|
const nextConfig = await getNextConfig(workPath, entryPath);
|
||||||
|
|
||||||
|
if (nextConfig != null) {
|
||||||
|
console.info('Found next.config.js:');
|
||||||
|
console.info(nextConfig);
|
||||||
|
console.info();
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(
|
||||||
|
'No serverless pages were built. https://err.sh/zeit/now-builders/now-next-no-serverless-pages-built'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// An optional assets folder that is placed alongside every page entrypoint
|
||||||
|
const assets = await glob(
|
||||||
|
'assets/**',
|
||||||
|
path.join(entryPath, '.next', 'serverless')
|
||||||
|
);
|
||||||
|
|
||||||
|
const assetKeys = Object.keys(assets);
|
||||||
|
if (assetKeys.length > 0) {
|
||||||
|
console.log('detected assets to be bundled with lambda:');
|
||||||
|
assetKeys.forEach(assetFile => console.log(`\t${assetFile}`));
|
||||||
|
}
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
pageKeys.map(async page => {
|
||||||
|
// These default pages don't have to be handled as they'd always 404
|
||||||
|
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const pathname = page.replace(/\.js$/, '');
|
||||||
|
|
||||||
|
console.log(`Creating lambda for page: "${page}"...`);
|
||||||
|
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||||
|
files: {
|
||||||
|
...launcherFiles,
|
||||||
|
...assets,
|
||||||
|
'page.js': pages[page],
|
||||||
|
},
|
||||||
|
handler: 'now__launcher.launcher',
|
||||||
|
runtime: 'nodejs8.10',
|
||||||
|
});
|
||||||
|
console.log(`Created lambda for page: "${page}"`);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const nextStaticFiles = await glob(
|
||||||
|
'**',
|
||||||
|
path.join(entryPath, '.next', 'static')
|
||||||
|
);
|
||||||
|
const staticFiles = Object.keys(nextStaticFiles).reduce(
|
||||||
|
(mappedFiles, file) => ({
|
||||||
|
...mappedFiles,
|
||||||
|
[path.join(entryDirectory, `_next/static/${file}`)]: nextStaticFiles[
|
||||||
|
file
|
||||||
|
],
|
||||||
|
}),
|
||||||
|
{}
|
||||||
|
);
|
||||||
|
|
||||||
|
const staticDirectoryFiles = onlyStaticDirectory(
|
||||||
|
includeOnlyEntryDirectory(files, entryDirectory),
|
||||||
|
entryDirectory
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
output: { ...lambdas, ...staticFiles, ...staticDirectoryFiles },
|
||||||
|
routes: [],
|
||||||
|
watch: [],
|
||||||
|
childProcesses: [],
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const prepareCache = async ({
|
||||||
|
workPath,
|
||||||
|
entrypoint,
|
||||||
|
}: PrepareCacheOptions) => {
|
||||||
|
console.log('preparing cache ...');
|
||||||
|
const entryDirectory = path.dirname(entrypoint);
|
||||||
|
const entryPath = path.join(workPath, entryDirectory);
|
||||||
|
|
||||||
|
const pkg = await readPackageJson(entryPath);
|
||||||
|
const nextVersion = getNextVersion(pkg);
|
||||||
|
if (!nextVersion) throw new Error('Could not parse Next.js version');
|
||||||
|
const isLegacy = isLegacyNext(nextVersion);
|
||||||
|
|
||||||
|
if (isLegacy) {
|
||||||
|
// skip caching legacy mode (swapping deps between all and production can get bug-prone)
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('producing cache file manifest ...');
|
||||||
|
const cacheEntrypoint = path.relative(workPath, entryPath);
|
||||||
|
const cache = {
|
||||||
|
...(await glob(path.join(cacheEntrypoint, 'node_modules/**'), workPath)),
|
||||||
|
...(await glob(path.join(cacheEntrypoint, '.next/cache/**'), workPath)),
|
||||||
|
...(await glob(path.join(cacheEntrypoint, 'package-lock.json'), workPath)),
|
||||||
|
...(await glob(path.join(cacheEntrypoint, 'yarn.lock'), workPath)),
|
||||||
|
};
|
||||||
|
console.log('cache file manifest produced');
|
||||||
|
return cache;
|
||||||
|
};
|
||||||
13
packages/now-next/src/launcher.ts
Normal file
13
packages/now-next/src/launcher.ts
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
if (!process.env.NODE_ENV) {
|
||||||
|
process.env.NODE_ENV = process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||||
|
}
|
||||||
|
|
||||||
|
const { Server } = require('http');
|
||||||
|
const { Bridge } = require('./now__bridge');
|
||||||
|
const page = require('./page');
|
||||||
|
|
||||||
|
const server = new Server(page.render);
|
||||||
|
const bridge = new Bridge(server);
|
||||||
|
bridge.listen();
|
||||||
|
|
||||||
|
exports.launcher = bridge.launcher;
|
||||||
20
packages/now-next/src/legacy-launcher.ts
Normal file
20
packages/now-next/src/legacy-launcher.ts
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
import { Server } from 'http';
|
||||||
|
import next from 'next-server';
|
||||||
|
import url from 'url';
|
||||||
|
import { Bridge } from './now__bridge';
|
||||||
|
|
||||||
|
if (!process.env.NODE_ENV) {
|
||||||
|
process.env.NODE_ENV = process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||||
|
}
|
||||||
|
|
||||||
|
const app = next({});
|
||||||
|
|
||||||
|
const server = new Server((req, res) => {
|
||||||
|
const parsedUrl = url.parse(req.url || '', true);
|
||||||
|
app.render(req, res, 'PATHNAME_PLACEHOLDER', parsedUrl.query, parsedUrl);
|
||||||
|
});
|
||||||
|
|
||||||
|
const bridge = new Bridge(server);
|
||||||
|
bridge.listen();
|
||||||
|
|
||||||
|
exports.launcher = bridge.launcher;
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
module.exports = [
|
export default [
|
||||||
'0.1.0',
|
'0.1.0',
|
||||||
'0.1.1',
|
'0.1.1',
|
||||||
'0.2.0',
|
'0.2.0',
|
||||||
222
packages/now-next/src/utils.ts
Normal file
222
packages/now-next/src/utils.ts
Normal file
@@ -0,0 +1,222 @@
|
|||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'path';
|
||||||
|
import { Files } from '@now/build-utils';
|
||||||
|
|
||||||
|
type stringMap = {[key: string]: string};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate if the entrypoint is allowed to be used
|
||||||
|
*/
|
||||||
|
function validateEntrypoint(entrypoint: string) {
|
||||||
|
if (
|
||||||
|
!/package\.json$/.exec(entrypoint)
|
||||||
|
&& !/next\.config\.js$/.exec(entrypoint)
|
||||||
|
) {
|
||||||
|
throw new Error(
|
||||||
|
'Specified "src" for "@now/next" has to be "package.json" or "next.config.js"',
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exclude certain files from the files object
|
||||||
|
*/
|
||||||
|
function excludeFiles(files: Files, matcher: (filePath: string) => boolean): Files {
|
||||||
|
return Object.keys(files).reduce((newFiles, filePath) => {
|
||||||
|
if (matcher(filePath)) {
|
||||||
|
return newFiles;
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
...newFiles,
|
||||||
|
[filePath]: files[filePath],
|
||||||
|
};
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new Files object holding only the entrypoint files
|
||||||
|
*/
|
||||||
|
function includeOnlyEntryDirectory(files: Files, entryDirectory: string): Files {
|
||||||
|
if (entryDirectory === '.') {
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
|
function matcher(filePath: string) {
|
||||||
|
return !filePath.startsWith(entryDirectory);
|
||||||
|
}
|
||||||
|
|
||||||
|
return excludeFiles(files, matcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Exclude package manager lockfiles from files
|
||||||
|
*/
|
||||||
|
function excludeLockFiles(files: Files): Files {
|
||||||
|
const newFiles = files;
|
||||||
|
if (newFiles['package-lock.json']) {
|
||||||
|
delete newFiles['package-lock.json'];
|
||||||
|
}
|
||||||
|
if (newFiles['yarn.lock']) {
|
||||||
|
delete newFiles['yarn.lock'];
|
||||||
|
}
|
||||||
|
return files;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Include the static directory from files
|
||||||
|
*/
|
||||||
|
function onlyStaticDirectory(files: Files, entryDir: string): Files {
|
||||||
|
function matcher(filePath: string) {
|
||||||
|
return !filePath.startsWith(path.join(entryDir, 'static'));
|
||||||
|
}
|
||||||
|
|
||||||
|
return excludeFiles(files, matcher);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enforce specific package.json configuration for smallest possible lambda
|
||||||
|
*/
|
||||||
|
function normalizePackageJson(defaultPackageJson: {dependencies?: stringMap, devDependencies?: stringMap, scripts?: stringMap} = {}) {
|
||||||
|
const dependencies: stringMap = {};
|
||||||
|
const devDependencies: stringMap = {
|
||||||
|
...defaultPackageJson.dependencies,
|
||||||
|
...defaultPackageJson.devDependencies,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (devDependencies.react) {
|
||||||
|
dependencies.react = devDependencies.react;
|
||||||
|
delete devDependencies.react;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (devDependencies['react-dom']) {
|
||||||
|
dependencies['react-dom'] = devDependencies['react-dom'];
|
||||||
|
delete devDependencies['react-dom'];
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
...defaultPackageJson,
|
||||||
|
dependencies: {
|
||||||
|
// react and react-dom can be overwritten
|
||||||
|
react: 'latest',
|
||||||
|
'react-dom': 'latest',
|
||||||
|
...dependencies, // override react if user provided it
|
||||||
|
// next-server is forced to canary
|
||||||
|
'next-server': 'v7.0.2-canary.49',
|
||||||
|
},
|
||||||
|
devDependencies: {
|
||||||
|
...devDependencies,
|
||||||
|
// next is forced to canary
|
||||||
|
next: 'v7.0.2-canary.49',
|
||||||
|
// next-server is a dependency here
|
||||||
|
'next-server': undefined,
|
||||||
|
},
|
||||||
|
scripts: {
|
||||||
|
...defaultPackageJson.scripts,
|
||||||
|
'now-build': 'NODE_OPTIONS=--max_old_space_size=3000 next build --lambdas',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getNextConfig(workPath: string, entryPath: string) {
|
||||||
|
const entryConfig = path.join(entryPath, './next.config.js');
|
||||||
|
if (await fs.pathExists(entryConfig)) {
|
||||||
|
return fs.readFile(entryConfig, 'utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
const workConfig = path.join(workPath, './next.config.js');
|
||||||
|
if (await fs.pathExists(workConfig)) {
|
||||||
|
return fs.readFile(workConfig, 'utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function pathIsInside(firstPath: string, secondPath: string) {
|
||||||
|
return !path.relative(firstPath, secondPath).startsWith('..');
|
||||||
|
}
|
||||||
|
|
||||||
|
function getPathsInside(entryDirectory: string, files: Files) {
|
||||||
|
const watch: string[] = [];
|
||||||
|
|
||||||
|
for (const file of Object.keys(files)) {
|
||||||
|
// If the file is outside of the entrypoint directory, we do
|
||||||
|
// not want to monitor it for changes.
|
||||||
|
if (!pathIsInside(entryDirectory, file)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
watch.push(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
return watch;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getRoutes(entryDirectory: string, pathsInside: string[], files: Files, url: string): any[] {
|
||||||
|
const filesInside: Files = {};
|
||||||
|
const prefix = entryDirectory === `.` ? `/` : `/${entryDirectory}/`;
|
||||||
|
|
||||||
|
for (const file of Object.keys(files)) {
|
||||||
|
if (!pathsInside.includes(file)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
filesInside[file] = files[file];
|
||||||
|
}
|
||||||
|
|
||||||
|
const routes: any[] = [
|
||||||
|
{
|
||||||
|
src: `${prefix}_next/(.*)`,
|
||||||
|
dest: `${url}/_next/$1`
|
||||||
|
},
|
||||||
|
{
|
||||||
|
src: `${prefix}static/(.*)`,
|
||||||
|
dest: `${url}/static/$1`
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
for (const file of Object.keys(filesInside)) {
|
||||||
|
const relativePath = path.relative(entryDirectory, file);
|
||||||
|
const isPage = pathIsInside('pages', relativePath);
|
||||||
|
|
||||||
|
if (!isPage) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const relativeToPages = path.relative('pages', relativePath);
|
||||||
|
const extension = path.extname(relativeToPages);
|
||||||
|
const pageName = relativeToPages.replace(extension, '');
|
||||||
|
|
||||||
|
if (pageName.startsWith('_')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
routes.push({
|
||||||
|
src: `${prefix}${pageName}`,
|
||||||
|
dest: `${url}/${pageName}`
|
||||||
|
});
|
||||||
|
|
||||||
|
if (pageName.endsWith('index')) {
|
||||||
|
const resolvedIndex = pageName.replace('/index', '').replace('index', '');
|
||||||
|
|
||||||
|
routes.push({
|
||||||
|
src: `${prefix}${resolvedIndex}`,
|
||||||
|
dest: `${url}/${resolvedIndex}`
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return routes;
|
||||||
|
}
|
||||||
|
|
||||||
|
export {
|
||||||
|
excludeFiles,
|
||||||
|
validateEntrypoint,
|
||||||
|
includeOnlyEntryDirectory,
|
||||||
|
excludeLockFiles,
|
||||||
|
normalizePackageJson,
|
||||||
|
onlyStaticDirectory,
|
||||||
|
getNextConfig,
|
||||||
|
getPathsInside,
|
||||||
|
getRoutes,
|
||||||
|
stringMap,
|
||||||
|
};
|
||||||
@@ -1,17 +1,17 @@
|
|||||||
/* global it, expect */
|
/* global it, expect */
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const runBuildLambda = require('../../lib/run-build-lambda');
|
const runBuildLambda = require('../../../../test/lib/run-build-lambda');
|
||||||
|
|
||||||
const FOUR_MINUTES = 240000;
|
const FOUR_MINUTES = 240000;
|
||||||
|
|
||||||
it(
|
it(
|
||||||
'Should build the standard example',
|
'Should build the standard example',
|
||||||
async () => {
|
async () => {
|
||||||
const { buildResult } = await runBuildLambda(
|
const {
|
||||||
path.join(__dirname, 'standard'),
|
buildResult: { output },
|
||||||
);
|
} = await runBuildLambda(path.join(__dirname, 'standard'));
|
||||||
expect(buildResult.index).toBeDefined();
|
expect(output.index).toBeDefined();
|
||||||
const filePaths = Object.keys(buildResult);
|
const filePaths = Object.keys(output);
|
||||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||||
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
||||||
@@ -23,11 +23,12 @@ it(
|
|||||||
it(
|
it(
|
||||||
'Should build the monorepo example',
|
'Should build the monorepo example',
|
||||||
async () => {
|
async () => {
|
||||||
const { buildResult } = await runBuildLambda(
|
const {
|
||||||
path.join(__dirname, 'monorepo'),
|
buildResult: { output },
|
||||||
);
|
} = await runBuildLambda(path.join(__dirname, 'monorepo'));
|
||||||
expect(buildResult['www/index']).toBeDefined();
|
expect(output['www/index']).toBeDefined();
|
||||||
const filePaths = Object.keys(buildResult);
|
expect(output['www/static/test.txt']).toBeDefined();
|
||||||
|
const filePaths = Object.keys(output);
|
||||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||||
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
||||||
@@ -39,11 +40,11 @@ it(
|
|||||||
it(
|
it(
|
||||||
'Should build the legacy standard example',
|
'Should build the legacy standard example',
|
||||||
async () => {
|
async () => {
|
||||||
const { buildResult } = await runBuildLambda(
|
const {
|
||||||
path.join(__dirname, 'legacy-standard'),
|
buildResult: { output },
|
||||||
);
|
} = await runBuildLambda(path.join(__dirname, 'legacy-standard'));
|
||||||
expect(buildResult.index).toBeDefined();
|
expect(output.index).toBeDefined();
|
||||||
const filePaths = Object.keys(buildResult);
|
const filePaths = Object.keys(output);
|
||||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||||
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
||||||
@@ -55,10 +56,10 @@ it(
|
|||||||
it(
|
it(
|
||||||
'Should build the legacy custom dependency test',
|
'Should build the legacy custom dependency test',
|
||||||
async () => {
|
async () => {
|
||||||
const { buildResult } = await runBuildLambda(
|
const {
|
||||||
path.join(__dirname, 'legacy-custom-dependency'),
|
buildResult: { output },
|
||||||
);
|
} = await runBuildLambda(path.join(__dirname, 'legacy-custom-dependency'));
|
||||||
expect(buildResult.index).toBeDefined();
|
expect(output.index).toBeDefined();
|
||||||
},
|
},
|
||||||
FOUR_MINUTES,
|
FOUR_MINUTES,
|
||||||
);
|
);
|
||||||
@@ -75,12 +76,23 @@ it('Should throw when package.json or next.config.js is not the "src"', async ()
|
|||||||
});
|
});
|
||||||
|
|
||||||
it(
|
it(
|
||||||
'Should build the static-files test',
|
'Should build the static-files test on legacy',
|
||||||
async () => {
|
async () => {
|
||||||
const { buildResult } = await runBuildLambda(
|
const {
|
||||||
path.join(__dirname, 'legacy-static-files'),
|
buildResult: { output },
|
||||||
);
|
} = await runBuildLambda(path.join(__dirname, 'legacy-static-files'));
|
||||||
expect(buildResult['static/test.txt']).toBeDefined();
|
expect(output['static/test.txt']).toBeDefined();
|
||||||
|
},
|
||||||
|
FOUR_MINUTES,
|
||||||
|
);
|
||||||
|
|
||||||
|
it(
|
||||||
|
'Should build the static-files test',
|
||||||
|
async () => {
|
||||||
|
const {
|
||||||
|
buildResult: { output },
|
||||||
|
} = await runBuildLambda(path.join(__dirname, 'static-files'));
|
||||||
|
expect(output['static/test.txt']).toBeDefined();
|
||||||
},
|
},
|
||||||
FOUR_MINUTES,
|
FOUR_MINUTES,
|
||||||
);
|
);
|
||||||
@@ -0,0 +1 @@
|
|||||||
|
module.exports = () => 'Hello!';
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user