mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
220 Commits
@now/php@0
...
@now/pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bec9ea101f | ||
|
|
54f3f755fb | ||
|
|
5b03109ba7 | ||
|
|
7ff9e810ff | ||
|
|
3036aff45e | ||
|
|
c366aa69a4 | ||
|
|
c8d225522d | ||
|
|
8ee5063669 | ||
|
|
9372e70747 | ||
|
|
4a4bd550a1 | ||
|
|
f53343d547 | ||
|
|
e4ed811b53 | ||
|
|
e9935dee31 | ||
|
|
2e1e6bb131 | ||
|
|
4a01ac4bd0 | ||
|
|
bd1a7c428f | ||
|
|
9a4a3dac47 | ||
|
|
4f2c35a0ee | ||
|
|
672df5d026 | ||
|
|
8cb648abc4 | ||
|
|
74f658c634 | ||
|
|
efbb54a232 | ||
|
|
3e2bd03e01 | ||
|
|
8dc92b70b9 | ||
|
|
4267be4e5a | ||
|
|
43ba6459eb | ||
|
|
8c5638915d | ||
|
|
3fab247c15 | ||
|
|
6ab0e2e9ab | ||
|
|
34369148d7 | ||
|
|
662ad1ed3a | ||
|
|
890cd74ee5 | ||
|
|
7ef616b31e | ||
|
|
bebcfa4bb5 | ||
|
|
25100c53aa | ||
|
|
fe20da87e7 | ||
|
|
18cb147c86 | ||
|
|
9c9e18586f | ||
|
|
0cd7192740 | ||
|
|
a2d9c4fb4b | ||
|
|
02fafd2ebc | ||
|
|
42577c915c | ||
|
|
73db9e11dd | ||
|
|
3125125c16 | ||
|
|
5335291408 | ||
|
|
36620559f9 | ||
|
|
360ea3a609 | ||
|
|
1cd362126c | ||
|
|
ae19fe95f6 | ||
|
|
3e34d402a2 | ||
|
|
cc7b97fbbb | ||
|
|
c1049985af | ||
|
|
214388ccf3 | ||
|
|
b1d6b7bfc0 | ||
|
|
ece3564dfd | ||
|
|
a88af1f077 | ||
|
|
d92f7b26c0 | ||
|
|
52198af750 | ||
|
|
d58bff2453 | ||
|
|
8c0a144ae4 | ||
|
|
106e4d5f36 | ||
|
|
66c28bd695 | ||
|
|
55e75296ff | ||
|
|
36cbb36737 | ||
|
|
978ca328ef | ||
|
|
7b383e0f7c | ||
|
|
faa5ab36aa | ||
|
|
c0a21969dd | ||
|
|
73d0a1723f | ||
|
|
7c515544ae | ||
|
|
b53c9a6299 | ||
|
|
35ff11e6e4 | ||
|
|
64ee4905cd | ||
|
|
e50dd7e50a | ||
|
|
6101ba9d95 | ||
|
|
8dc0c92c58 | ||
|
|
44c9f3765a | ||
|
|
92c05ca338 | ||
|
|
069b557906 | ||
|
|
692a0df909 | ||
|
|
aeafeb5441 | ||
|
|
a09d5fb355 | ||
|
|
d8017aa9aa | ||
|
|
702f56b9b5 | ||
|
|
183b117152 | ||
|
|
75b3fb4981 | ||
|
|
49e63de5fe | ||
|
|
4742cd32f2 | ||
|
|
377b73105d | ||
|
|
a5577efb3d | ||
|
|
2ec46dc5c9 | ||
|
|
42708ed93c | ||
|
|
2fabe95f6e | ||
|
|
ac1a3dab22 | ||
|
|
ad4011512d | ||
|
|
9ff1a25c8f | ||
|
|
8039b3d377 | ||
|
|
dd9017475c | ||
|
|
031499014f | ||
|
|
2a68d2a2ad | ||
|
|
31299fae6e | ||
|
|
4bac0db379 | ||
|
|
95e7d459d3 | ||
|
|
dd120b8d20 | ||
|
|
b6975676e5 | ||
|
|
a7951dae81 | ||
|
|
b0c918f7fb | ||
|
|
df54dc7dc9 | ||
|
|
0dd801ff6c | ||
|
|
398743ef95 | ||
|
|
337c74b81b | ||
|
|
680bb82ec3 | ||
|
|
17ed5411e3 | ||
|
|
d9bbcb6939 | ||
|
|
800e4de76f | ||
|
|
864dd468d9 | ||
|
|
ba833871bb | ||
|
|
e732bac78e | ||
|
|
28ea4015b4 | ||
|
|
a93d97cabd | ||
|
|
67f39f7c9b | ||
|
|
acd793b9e9 | ||
|
|
f74d61279d | ||
|
|
fcb8eacec0 | ||
|
|
c8fca2ba72 | ||
|
|
4feffa13eb | ||
|
|
3e330b25f4 | ||
|
|
9b2cae33af | ||
|
|
4b6371530c | ||
|
|
9e1d577fc0 | ||
|
|
cf2f542c71 | ||
|
|
e608861e4e | ||
|
|
a99b999209 | ||
|
|
fd9c6e7847 | ||
|
|
b2ad3a6147 | ||
|
|
997d3c2a30 | ||
|
|
ca575bf0a6 | ||
|
|
4c2e93ccef | ||
|
|
4d6437d235 | ||
|
|
0d8058d062 | ||
|
|
2b5cdfc0a7 | ||
|
|
69a41f78fb | ||
|
|
a013d59d62 | ||
|
|
173a29cfdb | ||
|
|
3f73451311 | ||
|
|
2fc706be43 | ||
|
|
0fb7eb6093 | ||
|
|
aa43c0bc87 | ||
|
|
3c5925a6e3 | ||
|
|
9fc7b047f5 | ||
|
|
ecae29457f | ||
|
|
77d445af71 | ||
|
|
79251ad180 | ||
|
|
a215dc9103 | ||
|
|
ea7d8f0f6c | ||
|
|
2232efc984 | ||
|
|
b64ce0f3c0 | ||
|
|
74233d50ad | ||
|
|
8aebec9fc3 | ||
|
|
54584b7763 | ||
|
|
d163fcbd71 | ||
|
|
04c2996c76 | ||
|
|
2b69b898ed | ||
|
|
846aa11d6a | ||
|
|
a314a74479 | ||
|
|
40f029a72c | ||
|
|
493d8a778f | ||
|
|
cb5dcb658f | ||
|
|
d77287d07b | ||
|
|
61d66bd957 | ||
|
|
ae73df9e3c | ||
|
|
cb09ae5bbf | ||
|
|
a28eeacdaa | ||
|
|
dd9d46d555 | ||
|
|
4472331ee0 | ||
|
|
ac69836b44 | ||
|
|
15949a4ab4 | ||
|
|
697ada9d73 | ||
|
|
cafbe30fa3 | ||
|
|
583ebcc526 | ||
|
|
52d1bd410c | ||
|
|
11d0753bc1 | ||
|
|
538710fe56 | ||
|
|
2828c89e8d | ||
|
|
e40b45a939 | ||
|
|
38ba8a36fc | ||
|
|
0323c505a3 | ||
|
|
17ee07f4f6 | ||
|
|
0a6ada77ac | ||
|
|
4d817dd67d | ||
|
|
9682a7cc0b | ||
|
|
3456f23b3e | ||
|
|
800ca2cb0e | ||
|
|
ba54b4d706 | ||
|
|
e9482d66a9 | ||
|
|
401b669363 | ||
|
|
a2a0ede1f6 | ||
|
|
3c9fcff743 | ||
|
|
e5aa526583 | ||
|
|
822b0ee3de | ||
|
|
d612e46233 | ||
|
|
77ee10cead | ||
|
|
fb2029c464 | ||
|
|
3b15755054 | ||
|
|
4f65cc3aa8 | ||
|
|
9936e35280 | ||
|
|
a04fd242b8 | ||
|
|
17bc6174a9 | ||
|
|
a7c2d9648a | ||
|
|
faa5ea9e21 | ||
|
|
c52f30c898 | ||
|
|
d675edf1dc | ||
|
|
f85c4f496f | ||
|
|
d52d7904c2 | ||
|
|
79232024bd | ||
|
|
660b787bc3 | ||
|
|
2dbf983ddb | ||
|
|
0866ba9391 | ||
|
|
d259a722a0 | ||
|
|
bf77c51f64 |
12
.circleci/build.sh
Executable file
12
.circleci/build.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
circleci_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||
needs_build="$(grep -rn '"build"' packages/*/package.json | cut -d: -f1)"
|
||||
|
||||
for pkg in $needs_build; do
|
||||
dir="$(dirname "$pkg")"
|
||||
cd "$circleci_dir/../$dir"
|
||||
echo "Building \`$dir\`"
|
||||
yarn build
|
||||
done
|
||||
@@ -23,6 +23,9 @@ jobs:
|
||||
- run:
|
||||
name: Linting
|
||||
command: yarn lint
|
||||
- run:
|
||||
name: Building
|
||||
command: ./.circleci/build.sh
|
||||
- run:
|
||||
name: Tests
|
||||
command: yarn test
|
||||
@@ -30,11 +33,8 @@ jobs:
|
||||
name: Potentially save npm token
|
||||
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
||||
- run:
|
||||
name: Potentially publish canary release
|
||||
command: "if ls ~/.npmrc >/dev/null 2>&1 && [[ $(git describe --exact-match 2> /dev/null || :) =~ -canary ]]; then yarn run lerna publish from-git --npm-tag canary --yes; else echo \"Did not publish\"; fi"
|
||||
- run:
|
||||
name: Potentially publish stable release
|
||||
command: "if ls ~/.npmrc >/dev/null 2>&1 && [[ ! $(git describe --exact-match 2> /dev/null || :) =~ -canary ]]; then yarn run lerna publish from-git --yes; else echo \"Did not publish\"; fi"
|
||||
name: Potentially publish releases to npm
|
||||
command: ./.circleci/publish.sh
|
||||
workflows:
|
||||
version: 2
|
||||
build-and-deploy:
|
||||
|
||||
24
.circleci/publish.sh
Executable file
24
.circleci/publish.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
if [ ! -e ~/.npmrc ]; then
|
||||
echo "~/.npmrc file does not exist, skipping publish"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
npm_tag=""
|
||||
tag="$(git describe --tags --exact-match 2> /dev/null || :)"
|
||||
|
||||
if [ -z "$tag" ]; then
|
||||
echo "Not a tagged commit, skipping publish"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$tag" =~ -canary ]]; then
|
||||
echo "Publishing canary release"
|
||||
npm_tag="--npm-tag canary"
|
||||
else
|
||||
echo "Publishing stable release"
|
||||
fi
|
||||
|
||||
yarn run lerna publish from-git $npm_tag --yes
|
||||
@@ -1,3 +1,4 @@
|
||||
/tmp/*
|
||||
/node_modules/*
|
||||
/**/node_modules/*
|
||||
/packages/now-go/go/*
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -1,2 +1,4 @@
|
||||
node_modules
|
||||
tmp
|
||||
target/
|
||||
.next
|
||||
3
.prettierrc.json
Normal file
3
.prettierrc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"singleQuote": true
|
||||
}
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
This is the full list of official Builders provided by the ZEIT team.
|
||||
|
||||
More details here: http://zeit.co/docs
|
||||
More details here: https://zeit.co/docs/v2/deployments/builders/overview/
|
||||
|
||||
### Publishing to npm
|
||||
|
||||
|
||||
73
errors/now-next-legacy-mode.md
Normal file
73
errors/now-next-legacy-mode.md
Normal file
@@ -0,0 +1,73 @@
|
||||
# `@now/next` Legacy Mode
|
||||
|
||||
#### Why This Warning Occurred
|
||||
|
||||
`@now/next` has two modes: `legacy` and `serverless`. You will always want to use the `serverless` mode. `legacy` is to provide backwards compatibility with previous `@now/next` versions.
|
||||
|
||||
The differences:
|
||||
|
||||
Legacy:
|
||||
|
||||
- Minimal lambda size of `2.2Mb` (approximately)
|
||||
- Forces `next@v7.0.2-canary.49` and `next-server@v7.0.2-canary.49`
|
||||
- Forces all `dependencies` to be `devDependencies`
|
||||
- Loads `next.config.js` on bootup, breaking sometimes when users didn't use `phases` to load files
|
||||
- Used `next-server` which is the full Next.js server with routing etc.
|
||||
- Runs `npm install`
|
||||
- Runs `npm run now-build`
|
||||
- Runs `npm install --production` after build
|
||||
|
||||
Serverless:
|
||||
|
||||
- Minimal lambda size of `49Kb` (approximately)
|
||||
- Uses Next.js build targets (`target: 'serverless'`) in `next.config.js`. [documentation](https://github.com/zeit/next.js#summary)
|
||||
- Does not make changes to your application dependencies
|
||||
- Does not load `next.config.js` ([as per the serverless target documentation](https://github.com/zeit/next.js#summary))
|
||||
- Runs `npm install`
|
||||
- Runs `npm run now-build`
|
||||
- Does not run `npm install --production` as the output from the build is all that's needed to bundle lambdas.
|
||||
- No runtime dependencies, meaning smaller lambda functions
|
||||
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
||||
|
||||
|
||||
#### Possible Ways to Fix It
|
||||
|
||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||
|
||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||
|
||||
```
|
||||
npm install next --save
|
||||
```
|
||||
|
||||
2. Add the `now-build` script to your `package.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
3. Add `target: 'serverless'` to `next.config.js`
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
target: 'serverless'
|
||||
// Other options are still valid
|
||||
}
|
||||
```
|
||||
|
||||
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||
|
||||
```js
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "package.json", "use": "@now/next" }]
|
||||
}
|
||||
```
|
||||
|
||||
### Useful Links
|
||||
|
||||
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||
45
errors/now-next-no-serverless-pages-built.md
Normal file
45
errors/now-next-no-serverless-pages-built.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# `@now/next` No Serverless Pages Built
|
||||
|
||||
#### Why This Error Occurred
|
||||
|
||||
This error occurs when you have your application is not configured for Serverless Next.js build output.
|
||||
|
||||
#### Possible Ways to Fix It
|
||||
|
||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||
|
||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||
|
||||
```
|
||||
npm install next --save
|
||||
```
|
||||
|
||||
2. Add the `now-build` script to your `package.json`
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
}
|
||||
```
|
||||
|
||||
3. Add `target: 'serverless'` to `next.config.js`
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
target: 'serverless'
|
||||
// Other options
|
||||
}
|
||||
```
|
||||
|
||||
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||
|
||||
5. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||
|
||||
```js
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "package.json", "use": "@now/next" }]
|
||||
}
|
||||
```
|
||||
@@ -15,13 +15,13 @@
|
||||
"publish-stable": "lerna version",
|
||||
"publish-canary": "lerna version prerelease --preid canary",
|
||||
"lint": "tsc && eslint .",
|
||||
"test": "jest --runInBand",
|
||||
"test": "jest --runInBand --verbose",
|
||||
"lint-staged": "lint-staged"
|
||||
},
|
||||
"pre-commit": "lint-staged",
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"prettier --write --single-quote",
|
||||
"prettier --write",
|
||||
"eslint --fix",
|
||||
"git add"
|
||||
]
|
||||
@@ -31,6 +31,7 @@
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/multistream": "^2.1.1",
|
||||
"@types/node": "^10.12.8",
|
||||
"async-retry": "1.2.3",
|
||||
"buffer-replace": "^1.0.0",
|
||||
"eslint": "^5.9.0",
|
||||
"eslint-config-airbnb-base": "^13.1.0",
|
||||
|
||||
32
packages/now-bash/.editorconfig
Normal file
32
packages/now-bash/.editorconfig
Normal file
@@ -0,0 +1,32 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[{*.json,*.json.example,*.gyp,*.yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
# Ideal settings - some plugins might support these.
|
||||
[*.js]
|
||||
quote_type = single
|
||||
|
||||
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}]
|
||||
curly_bracket_next_line = false
|
||||
spaces_around_operators = true
|
||||
spaces_around_brackets = outside
|
||||
# close enough to 1TB
|
||||
indent_brace_style = K&R
|
||||
@@ -33,17 +33,8 @@ if declare -f build > /dev/null; then
|
||||
build "$@"
|
||||
fi
|
||||
|
||||
# Ensure the entrypoint defined a `serve` function
|
||||
if ! declare -f serve > /dev/null; then
|
||||
echo "ERROR: A \`serve\` function must be defined in \"$ENTRYPOINT\"!" >&2
|
||||
# Ensure the entrypoint defined a `handler` function
|
||||
if ! declare -f handler > /dev/null; then
|
||||
echo "ERROR: A \`handler\` function must be defined in \"$ENTRYPOINT\"!" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Show a tree of the final lambda build
|
||||
show_tree() {
|
||||
import "static-binaries@0.0.6"
|
||||
static_binaries tree
|
||||
echo "Final lambda file tree:"
|
||||
tree -a .
|
||||
}
|
||||
IMPORT_DEBUG= IMPORT_CACHE="$(mktemp -d)" show_tree
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
const execa = require('execa');
|
||||
const { join } = require('path');
|
||||
const snakeCase = require('snake-case');
|
||||
const glob = require('@now/build-utils/fs/glob');
|
||||
const download = require('@now/build-utils/fs/download');
|
||||
const { createLambda } = require('@now/build-utils/lambda');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory');
|
||||
const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '10mb',
|
||||
@@ -12,9 +12,10 @@ exports.config = {
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
exports.build = async ({
|
||||
workPath, files, entrypoint, config,
|
||||
}) => {
|
||||
const srcDir = await getWritableDirectory();
|
||||
const workDir = await getWritableDirectory();
|
||||
|
||||
console.log('downloading files...');
|
||||
await download(files, srcDir);
|
||||
@@ -24,7 +25,7 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
return o;
|
||||
}, {});
|
||||
|
||||
const IMPORT_CACHE = `${workDir}/.import-cache`;
|
||||
const IMPORT_CACHE = `${workPath}/.import-cache`;
|
||||
const env = Object.assign({}, process.env, configEnv, {
|
||||
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
||||
IMPORT_CACHE,
|
||||
@@ -37,11 +38,12 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
|
||||
await execa(builderPath, [entrypoint], {
|
||||
env,
|
||||
cwd: workDir,
|
||||
cwd: workPath,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', workDir),
|
||||
files: await glob('**', workPath),
|
||||
handler: entrypoint, // not actually used in `bootstrap`
|
||||
runtime: 'provided',
|
||||
environment: Object.assign({}, configEnv, {
|
||||
|
||||
@@ -1,10 +1,15 @@
|
||||
{
|
||||
"name": "@now/bash",
|
||||
"version": "0.0.3",
|
||||
"version": "0.1.4-canary.0",
|
||||
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
||||
"main": "index.js",
|
||||
"author": "Nathan Rajlich <nate@zeit.co>",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-bash"
|
||||
},
|
||||
"files": [
|
||||
"builder.sh",
|
||||
"runtime.sh",
|
||||
@@ -15,8 +20,5 @@
|
||||
"dependencies": {
|
||||
"execa": "^1.0.0",
|
||||
"snake-case": "^2.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import "static-binaries@0.0.6"
|
||||
import "static-binaries@1.0.0"
|
||||
static_binaries jq
|
||||
|
||||
# These get reset upon each request
|
||||
@@ -35,11 +35,10 @@ _lambda_runtime_next() {
|
||||
# Get an event
|
||||
local event
|
||||
event="$(mktemp)"
|
||||
_lambda_runtime_api invocation/next -D "$headers" | jq -r '.body' > "$event"
|
||||
_lambda_runtime_api invocation/next -D "$headers" | jq --raw-output --monochrome-output '.body' > "$event"
|
||||
|
||||
local request_id
|
||||
request_id="$(grep -Fi Lambda-Runtime-Aws-Request-Id "$headers" | tr -d '[:space:]' | cut -d: -f2)"
|
||||
echo "Request-Id: $request_id" >&2
|
||||
rm -f "$headers"
|
||||
|
||||
# Execute the handler function from the script
|
||||
@@ -49,41 +48,41 @@ _lambda_runtime_next() {
|
||||
local exit_code=0
|
||||
REQUEST="$event"
|
||||
|
||||
# Stdin of the `serve` function is the HTTP request body.
|
||||
# Stdin of the `handler` function is the HTTP request body.
|
||||
# Need to use a fifo here instead of bash <() because Lambda
|
||||
# errors with "/dev/fd/63 not found" for some reason :/
|
||||
local stdin
|
||||
stdin="$(mktemp --dry-run)"
|
||||
stdin="$(mktemp -u)"
|
||||
mkfifo "$stdin"
|
||||
_lambda_runtime_body "$event" > "$stdin" &
|
||||
_lambda_runtime_body < "$event" > "$stdin" &
|
||||
|
||||
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
||||
|
||||
serve "$event" < "$stdin" > "$body" || exit_code="$?"
|
||||
rm -f "$event" "$stdin"
|
||||
|
||||
if [ "$exit_code" -eq 0 ]; then
|
||||
# Send the response
|
||||
local response
|
||||
response="$(jq -cnMr \
|
||||
jq --raw-input --raw-output --compact-output --slurp --monochrome-output \
|
||||
--arg statusCode "$(cat "$_STATUS_CODE")" \
|
||||
--argjson headers "$(cat "$_HEADERS")" \
|
||||
--arg body "$(base64 --wrap=0 < "$body")" \
|
||||
'{statusCode:$statusCode|tonumber, headers:$headers, encoding:"base64", body:$body}')"
|
||||
'{statusCode:$statusCode|tonumber, headers:$headers, encoding:"base64", body:.|@base64}' < "$body" \
|
||||
| _lambda_runtime_api "invocation/$request_id/response" -X POST -d @- > /dev/null
|
||||
rm -f "$body" "$_HEADERS"
|
||||
_lambda_runtime_api "invocation/$request_id/response" -X POST -d "$response"
|
||||
else
|
||||
local error
|
||||
error='{"exitCode":'"$exit_code"'}'
|
||||
_lambda_runtime_api "invocation/$request_id/error" -X POST -d "$error"
|
||||
echo "\`handler\` function return code: $exit_code"
|
||||
_lambda_runtime_api "invocation/$request_id/error" -X POST -d @- > /dev/null <<< '{"exitCode":'"$exit_code"'}'
|
||||
fi
|
||||
}
|
||||
|
||||
_lambda_runtime_body() {
|
||||
if [ "$(jq -r '.body | type' < "$1")" = "string" ]; then
|
||||
if [ "$(jq -r '.encoding' < "$1")" = "base64" ]; then
|
||||
jq -r '.body' < "$1" | base64 -d
|
||||
local event
|
||||
event="$(cat)"
|
||||
if [ "$(jq --raw-output '.body | type' <<< "$event")" = "string" ]; then
|
||||
if [ "$(jq --raw-output '.encoding' <<< "$event")" = "base64" ]; then
|
||||
jq --raw-output '.body' <<< "$event" | base64 --decode
|
||||
else
|
||||
# assume plain-text body
|
||||
jq -r '.body' < "$1"
|
||||
jq --raw-output '.body' <<< "$event"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
/test
|
||||
tmp
|
||||
@@ -26,6 +26,18 @@ class FileFsRef {
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a `FileFsRef` with the correct `mode` from the file system.
|
||||
*
|
||||
* @argument {Object} options
|
||||
* @argument {string} options.fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
static async fromFsPath({ fsPath }) {
|
||||
const { mode } = await fs.lstat(fsPath);
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
|
||||
@@ -49,14 +49,14 @@ class FileRef {
|
||||
assert(url);
|
||||
|
||||
await semaToDownloadFromS3.acquire();
|
||||
console.time(`downloading ${url}`);
|
||||
// console.time(`downloading ${url}`);
|
||||
try {
|
||||
return await retry(
|
||||
async () => {
|
||||
const resp = await fetch(url);
|
||||
if (!resp.ok) {
|
||||
const error = new BailableError(
|
||||
`${resp.status} ${resp.statusText}`,
|
||||
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
||||
);
|
||||
if (resp.status === 403) error.bail = true;
|
||||
throw error;
|
||||
@@ -66,7 +66,7 @@ class FileRef {
|
||||
{ factor: 1, retries: 3 },
|
||||
);
|
||||
} finally {
|
||||
console.timeEnd(`downloading ${url}`);
|
||||
// console.timeEnd(`downloading ${url}`);
|
||||
semaToDownloadFromS3.release();
|
||||
}
|
||||
}
|
||||
|
||||
83
packages/now-build-utils/fs/bootstrap-yarn.js
vendored
83
packages/now-build-utils/fs/bootstrap-yarn.js
vendored
@@ -1,83 +0,0 @@
|
||||
const MemoryFileSystem = require('memory-fs');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
const yarnPath = spawnSync('which', ['yarn'])
|
||||
.stdout.toString()
|
||||
.trim();
|
||||
|
||||
const cachePath = spawnSync(yarnPath, ['cache', 'dir'])
|
||||
.stdout.toString()
|
||||
.trim();
|
||||
|
||||
spawnSync(yarnPath, ['cache', 'clean']);
|
||||
const vfs = new MemoryFileSystem();
|
||||
|
||||
function isOutsideCachePath(filename) {
|
||||
const relative = path.relative(cachePath, filename);
|
||||
return relative.startsWith('..');
|
||||
}
|
||||
|
||||
const saveCreateWriteStream = fs.createWriteStream;
|
||||
fs.createWriteStream = (...args) => {
|
||||
const filename = args[0];
|
||||
if (isOutsideCachePath(filename)) {
|
||||
return saveCreateWriteStream.call(fs, ...args);
|
||||
}
|
||||
|
||||
vfs.mkdirpSync(path.dirname(filename));
|
||||
fs.writeFileSync(filename, Buffer.alloc(0));
|
||||
const stream = vfs.createWriteStream(...args);
|
||||
|
||||
stream.on('finish', () => {
|
||||
setTimeout(() => {
|
||||
stream.emit('close');
|
||||
});
|
||||
});
|
||||
|
||||
return stream;
|
||||
};
|
||||
|
||||
const saveReadFile = fs.readFile;
|
||||
fs.readFile = (...args) => {
|
||||
const filename = args[0];
|
||||
if (isOutsideCachePath(filename)) {
|
||||
return saveReadFile.call(fs, ...args);
|
||||
}
|
||||
|
||||
const callback = args[args.length - 1];
|
||||
return vfs.readFile(...args.slice(0, -1), (error, result) => {
|
||||
if (error) {
|
||||
saveReadFile.call(fs, ...args);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(error, result);
|
||||
});
|
||||
};
|
||||
|
||||
const saveCopyFile = fs.copyFile;
|
||||
fs.copyFile = (...args) => {
|
||||
const src = args[0];
|
||||
if (isOutsideCachePath(src)) {
|
||||
return saveCopyFile.call(fs, ...args);
|
||||
}
|
||||
|
||||
const dest = args[1];
|
||||
const callback = args[args.length - 1];
|
||||
const buffer = vfs.readFileSync(src);
|
||||
return fs.writeFile(dest, buffer, callback);
|
||||
};
|
||||
|
||||
const saveWriteFile = fs.writeFile;
|
||||
fs.writeFile = (...args) => {
|
||||
const filename = args[0];
|
||||
if (isOutsideCachePath(filename)) {
|
||||
return saveWriteFile.call(fs, ...args);
|
||||
}
|
||||
|
||||
return vfs.writeFile(...args);
|
||||
};
|
||||
|
||||
require(yarnPath);
|
||||
@@ -1,12 +1,10 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const dev = !process.env.AWS_EXECUTION_ENV;
|
||||
const TMP_PATH = dev ? path.join(process.cwd(), 'tmp') : '/tmp';
|
||||
const { join } = require('path');
|
||||
const { tmpdir } = require('os');
|
||||
const { mkdirp } = require('fs-extra');
|
||||
|
||||
module.exports = async function getWritableDirectory() {
|
||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||
const directory = path.join(TMP_PATH, name);
|
||||
await fs.mkdirp(directory);
|
||||
const directory = join(tmpdir(), name);
|
||||
await mkdirp(directory);
|
||||
return directory;
|
||||
};
|
||||
|
||||
@@ -3,9 +3,9 @@ const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
function spawnAsync(command, args, cwd) {
|
||||
function spawnAsync(command, args, cwd, opts = {}) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd, ...opts });
|
||||
child.on('error', reject);
|
||||
child.on('close', (code, signal) => (code !== 0
|
||||
? reject(new Error(`Exited with ${code || signal}`))
|
||||
@@ -13,9 +13,18 @@ function spawnAsync(command, args, cwd) {
|
||||
});
|
||||
}
|
||||
|
||||
async function chmodPlusX(fsPath) {
|
||||
const s = await fs.stat(fsPath);
|
||||
const newMode = s.mode | 64 | 8 | 1; // eslint-disable-line no-bitwise
|
||||
if (s.mode === newMode) return;
|
||||
const base8 = newMode.toString(8).slice(-3);
|
||||
await fs.chmod(fsPath, base8);
|
||||
}
|
||||
|
||||
async function runShellScript(fsPath) {
|
||||
assert(path.isAbsolute(fsPath));
|
||||
const destPath = path.dirname(fsPath);
|
||||
await chmodPlusX(fsPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||
return true;
|
||||
}
|
||||
@@ -52,29 +61,34 @@ async function scanParentDirs(destPath, scriptName) {
|
||||
return { hasScript, hasPackageLockJson };
|
||||
}
|
||||
|
||||
async function runNpmInstall(destPath, args = []) {
|
||||
async function installDependencies(destPath, args = []) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let commandArgs = args;
|
||||
console.log(`installing to ${destPath}`);
|
||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||
|
||||
const opts = {
|
||||
env: {
|
||||
...process.env,
|
||||
// This is a little hack to force `node-gyp` to build for the
|
||||
// Node.js version that `@now/node` and `@now/node-server` use
|
||||
npm_config_target: '8.10.0',
|
||||
},
|
||||
};
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath);
|
||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath);
|
||||
} else if (process.env.AWS_EXECUTION_ENV) {
|
||||
console.log('using memory-fs for yarn cache');
|
||||
await spawnAsync(
|
||||
'node',
|
||||
[path.join(__dirname, 'bootstrap-yarn.js'), '--cwd', destPath].concat(
|
||||
commandArgs,
|
||||
),
|
||||
destPath,
|
||||
);
|
||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath, opts);
|
||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath, opts);
|
||||
} else {
|
||||
await spawnAsync('yarn', ['--cwd', destPath].concat(commandArgs), destPath);
|
||||
await spawnAsync('yarn', ['cache', 'clean'], destPath);
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath].concat(commandArgs),
|
||||
destPath,
|
||||
opts,
|
||||
);
|
||||
await spawnAsync('yarn', ['cache', 'clean'], destPath, opts);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,6 +113,7 @@ async function runPackageJsonScript(destPath, scriptName) {
|
||||
|
||||
module.exports = {
|
||||
runShellScript,
|
||||
runNpmInstall,
|
||||
installDependencies,
|
||||
runNpmInstall: installDependencies,
|
||||
runPackageJsonScript,
|
||||
};
|
||||
|
||||
@@ -29,17 +29,20 @@ async function createLambda({
|
||||
await sema.acquire();
|
||||
try {
|
||||
const zipFile = new ZipFile();
|
||||
const zipBuffer = await new Promise((resolve, reject) => {
|
||||
Object.keys(files)
|
||||
.sort()
|
||||
.forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
stream.on('error', reject);
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
|
||||
Object.keys(files)
|
||||
.sort()
|
||||
.forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
zipFile.end();
|
||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
||||
});
|
||||
|
||||
zipFile.end();
|
||||
const zipBuffer = await streamToBuffer(zipFile.outputStream);
|
||||
return new Lambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.4.32-canary.0",
|
||||
"version": "0.4.37-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-build-utils"
|
||||
},
|
||||
"dependencies": {
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "2.1.4",
|
||||
|
||||
5
packages/now-build-utils/test/fixtures/07-cross-install/api/index.js
vendored
Normal file
5
packages/now-build-utils/test/fixtures/07-cross-install/api/index.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
const cowsay = require('cowsay').say;
|
||||
|
||||
module.exports = (req, resp) => {
|
||||
resp.end(cowsay({ text: 'cross-cow:RANDOMNESS_PLACEHOLDER' }));
|
||||
};
|
||||
5
packages/now-build-utils/test/fixtures/07-cross-install/api/package.json
vendored
Normal file
5
packages/now-build-utils/test/fixtures/07-cross-install/api/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"lib": "../lib"
|
||||
}
|
||||
}
|
||||
7
packages/now-build-utils/test/fixtures/07-cross-install/lib/package.json
vendored
Normal file
7
packages/now-build-utils/test/fixtures/07-cross-install/lib/package.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "lib",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"cowsay": "*"
|
||||
}
|
||||
}
|
||||
9
packages/now-build-utils/test/fixtures/07-cross-install/now.json
vendored
Normal file
9
packages/now-build-utils/test/fixtures/07-cross-install/now.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "api/index.js", "use": "@now/node" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/api/index.js", "mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
@@ -7,7 +7,7 @@ const {
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
const builderUrl = '@canary';
|
||||
let buildUtilsUrl;
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
const path = require('path');
|
||||
const { mkdirp, copyFile } = require('fs-extra');
|
||||
|
||||
const glob = require('@now/build-utils/fs/glob');
|
||||
const download = require('@now/build-utils/fs/download');
|
||||
const { createLambda } = require('@now/build-utils/lambda');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory');
|
||||
const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"name": "@now/cgi",
|
||||
"version": "0.0.14",
|
||||
"version": "0.0.16-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-cgi"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "best -I test/*.js",
|
||||
"prepublish": "./build.sh"
|
||||
@@ -15,8 +21,5 @@
|
||||
"devDependencies": {
|
||||
"@zeit/best": "0.4.3",
|
||||
"rmfr": "2.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
5
packages/now-go/.gitignore
vendored
5
packages/now-go/.gitignore
vendored
@@ -1,4 +1,5 @@
|
||||
node_modules
|
||||
*.log
|
||||
launcher
|
||||
bin
|
||||
/?.js
|
||||
/go
|
||||
/get-exported-function-name
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
mkdir -p bin
|
||||
cd util
|
||||
GOOS=linux GOARCH=amd64 go build get-exported-function-name.go
|
||||
mv get-exported-function-name ../bin/
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
const path = require('path');
|
||||
|
||||
const fetch = require('node-fetch');
|
||||
const tar = require('tar');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
||||
|
||||
const url = 'https://dl.google.com/go/go1.11.1.linux-amd64.tar.gz';
|
||||
|
||||
module.exports = async () => {
|
||||
const res = await fetch(url);
|
||||
const dir = await getWritableDirectory();
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download: ${url}`);
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
res.body
|
||||
.on('error', reject)
|
||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||
.on('finish', () => resolve(path.join(dir, 'bin', 'go')));
|
||||
});
|
||||
};
|
||||
129
packages/now-go/go-helpers.js
Normal file
129
packages/now-go/go-helpers.js
Normal file
@@ -0,0 +1,129 @@
|
||||
const tar = require('tar');
|
||||
const execa = require('execa');
|
||||
const fetch = require('node-fetch');
|
||||
const { mkdirp } = require('fs-extra');
|
||||
const { dirname, join } = require('path');
|
||||
const debug = require('debug')('@now/go:go-helpers');
|
||||
|
||||
const archMap = new Map([['x64', 'amd64'], ['x86', '386']]);
|
||||
const platformMap = new Map([['win32', 'windows']]);
|
||||
|
||||
// Location where the `go` binary will be installed after `postinstall`
|
||||
const GO_DIR = join(__dirname, 'go');
|
||||
const GO_BIN = join(GO_DIR, 'bin/go');
|
||||
|
||||
const getPlatform = p => platformMap.get(p) || p;
|
||||
const getArch = a => archMap.get(a) || a;
|
||||
const getGoUrl = (version, platform, arch) => {
|
||||
const goArch = getArch(arch);
|
||||
const goPlatform = getPlatform(platform);
|
||||
const ext = platform === 'win32' ? 'zip' : 'tar.gz';
|
||||
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||
};
|
||||
|
||||
function getExportedFunctionName(filePath) {
|
||||
debug('Detecting handler name for %o', filePath);
|
||||
const bin = join(__dirname, 'get-exported-function-name');
|
||||
const args = [filePath];
|
||||
const name = execa.stdout(bin, args);
|
||||
debug('Detected exported name %o', filePath);
|
||||
return name;
|
||||
}
|
||||
|
||||
// Creates a `$GOPATH` directory tree, as per `go help gopath` instructions.
|
||||
// Without this, `go` won't recognize the `$GOPATH`.
|
||||
function createGoPathTree(goPath, platform, arch) {
|
||||
const tuple = `${getPlatform(platform)}_${getArch(arch)}`;
|
||||
debug('Creating GOPATH directory structure for %o (%s)', goPath, tuple);
|
||||
return Promise.all([
|
||||
mkdirp(join(goPath, 'bin')),
|
||||
mkdirp(join(goPath, 'pkg', tuple)),
|
||||
]);
|
||||
}
|
||||
|
||||
async function get({ src } = {}) {
|
||||
const args = ['get'];
|
||||
if (src) {
|
||||
debug('Fetching `go` dependencies for file %o', src);
|
||||
args.push(src);
|
||||
} else {
|
||||
debug('Fetching `go` dependencies for cwd %o', this.cwd);
|
||||
}
|
||||
await this(...args);
|
||||
}
|
||||
|
||||
async function build({ src, dest }) {
|
||||
debug('Building `go` binary %o -> %o', src, dest);
|
||||
let sources;
|
||||
if (Array.isArray(src)) {
|
||||
sources = src;
|
||||
} else {
|
||||
sources = [src];
|
||||
}
|
||||
await this('build', '-o', dest, ...sources);
|
||||
}
|
||||
|
||||
async function createGo(
|
||||
goPath,
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
opts = {},
|
||||
goMod = false,
|
||||
) {
|
||||
const env = {
|
||||
...process.env,
|
||||
PATH: `${dirname(GO_BIN)}:${process.env.PATH}`,
|
||||
GOPATH: goPath,
|
||||
...opts.env,
|
||||
};
|
||||
|
||||
if (goMod) {
|
||||
env.GO111MODULE = 'on';
|
||||
}
|
||||
|
||||
function go(...args) {
|
||||
debug('Exec %o', `go ${args.join(' ')}`);
|
||||
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||
}
|
||||
go.cwd = opts.cwd || process.cwd();
|
||||
go.get = get;
|
||||
go.build = build;
|
||||
go.goPath = goPath;
|
||||
await createGoPathTree(goPath, platform, arch);
|
||||
return go;
|
||||
}
|
||||
|
||||
async function downloadGo(
|
||||
dir = GO_DIR,
|
||||
version = '1.12',
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
) {
|
||||
debug('Installing `go` v%s to %o for %s %s', version, dir, platform, arch);
|
||||
|
||||
const url = getGoUrl(version, platform, arch);
|
||||
debug('Downloading `go` URL: %o', url);
|
||||
const res = await fetch(url);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download: ${url} (${res.status})`);
|
||||
}
|
||||
|
||||
// TODO: use a zip extractor when `ext === "zip"`
|
||||
await mkdirp(dir);
|
||||
await new Promise((resolve, reject) => {
|
||||
res.body
|
||||
.on('error', reject)
|
||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||
.on('error', reject)
|
||||
.on('finish', resolve);
|
||||
});
|
||||
|
||||
return createGo(dir, platform, arch);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createGo,
|
||||
downloadGo,
|
||||
getExportedFunctionName,
|
||||
};
|
||||
@@ -1,126 +1,186 @@
|
||||
const path = require('path');
|
||||
const { mkdirp, readFile, writeFile } = require('fs-extra');
|
||||
const { join, dirname } = require('path');
|
||||
const {
|
||||
readFile, writeFile, pathExists, move,
|
||||
} = require('fs-extra');
|
||||
|
||||
const execa = require('execa');
|
||||
const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const downloadGit = require('lambda-git');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const downloadGoBin = require('./download-go-bin');
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createGo, getExportedFunctionName } = require('./go-helpers');
|
||||
|
||||
// creates a `$GOPATH` directory tree, as per
|
||||
// `go help gopath`'s instructions.
|
||||
// without this, Go won't recognize the `$GOPATH`
|
||||
async function createGoPathTree(goPath) {
|
||||
await mkdirp(path.join(goPath, 'bin'));
|
||||
await mkdirp(path.join(goPath, 'pkg', 'linux_amd64'));
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
const config = {
|
||||
maxLambdaSize: '10mb',
|
||||
};
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
console.log('downloading files...');
|
||||
async function build({ files, entrypoint }) {
|
||||
console.log('Downloading user files...');
|
||||
|
||||
const gitPath = await getWritableDirectory();
|
||||
const goPath = await getWritableDirectory();
|
||||
const srcPath = path.join(goPath, 'src', 'lambda');
|
||||
const outDir = await getWritableDirectory();
|
||||
|
||||
await createGoPathTree(goPath);
|
||||
const [goPath, outDir] = await Promise.all([
|
||||
getWritableDirectory(),
|
||||
getWritableDirectory(),
|
||||
]);
|
||||
|
||||
const srcPath = join(goPath, 'src', 'lambda');
|
||||
const downloadedFiles = await download(files, srcPath);
|
||||
|
||||
console.log('downloading go binary...');
|
||||
const goBin = await downloadGoBin();
|
||||
|
||||
console.log('downloading git binary...');
|
||||
// downloads a git binary that works on Amazon Linux and sets
|
||||
// `process.env.GIT_EXEC_PATH` so `go(1)` can see it
|
||||
await downloadGit({ targetDirectory: gitPath });
|
||||
|
||||
const goEnv = {
|
||||
...process.env,
|
||||
GOOS: 'linux',
|
||||
GOARCH: 'amd64',
|
||||
GOPATH: goPath,
|
||||
};
|
||||
|
||||
console.log(`parsing AST for "${entrypoint}"`);
|
||||
let handlerFunctionName = '';
|
||||
console.log(`Parsing AST for "${entrypoint}"`);
|
||||
let parseFunctionName;
|
||||
try {
|
||||
handlerFunctionName = await execa.stdout(
|
||||
path.join(__dirname, 'bin', 'get-exported-function-name'),
|
||||
[downloadedFiles[entrypoint].fsPath],
|
||||
parseFunctionName = await getExportedFunctionName(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`failed to parse AST for "${entrypoint}"`);
|
||||
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (handlerFunctionName === '') {
|
||||
const e = new Error(
|
||||
`Could not find an exported function on "${entrypoint}"`,
|
||||
if (!parseFunctionName) {
|
||||
const err = new Error(
|
||||
`Could not find an exported function in "${entrypoint}"`,
|
||||
);
|
||||
console.log(e.message);
|
||||
throw e;
|
||||
console.log(err.message);
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" on "${entrypoint}"`,
|
||||
);
|
||||
const handlerFunctionName = parseFunctionName.split(',')[0];
|
||||
|
||||
const origianlMainGoContents = await readFile(
|
||||
path.join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`,
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
// in order to allow the user to have `main.go`, we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const entrypointDirname = path.dirname(downloadedFiles[entrypoint].fsPath);
|
||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(path.join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
console.log('installing dependencies');
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse
|
||||
// the `import`s and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await execa(goBin, ['get'], {
|
||||
env: goEnv,
|
||||
cwd: entrypointDirname,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('running go build...');
|
||||
try {
|
||||
await execa(
|
||||
goBin,
|
||||
[
|
||||
'build',
|
||||
'-o',
|
||||
path.join(outDir, 'handler'),
|
||||
path.join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
],
|
||||
{ env: goEnv, cwd: entrypointDirname, stdio: 'inherit' },
|
||||
// check if package name other than main
|
||||
const packageName = parseFunctionName.split(',')[1];
|
||||
const isGoModExist = await pathExists(`${entrypointDirname}/go.mod`);
|
||||
if (packageName !== 'main') {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
true,
|
||||
);
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
if (!isGoModExist) {
|
||||
try {
|
||||
go('mod', 'init', packageName);
|
||||
} catch (err) {
|
||||
console.log(`failed to \`go mod init ${packageName}\``);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const mainModGoFileName = 'main__mod__.go';
|
||||
const modMainGoContents = await readFile(
|
||||
join(__dirname, mainModGoFileName),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
let goPackageName = `${packageName}/${packageName}`;
|
||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||
|
||||
if (isGoModExist) {
|
||||
const goModContents = await readFile(
|
||||
`${entrypointDirname}/go.mod`,
|
||||
'utf8',
|
||||
);
|
||||
goPackageName = `${
|
||||
goModContents.split('\n')[0].split(' ')[1]
|
||||
}/${packageName}`;
|
||||
}
|
||||
|
||||
const mainModGoContents = modMainGoContents
|
||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||
|
||||
// write main__mod__.go
|
||||
await writeFile(
|
||||
join(entrypointDirname, mainModGoFileName),
|
||||
mainModGoContents,
|
||||
);
|
||||
|
||||
// move user go file to folder
|
||||
try {
|
||||
await move(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
`${join(entrypointDirname, packageName, entrypoint)}`,
|
||||
);
|
||||
} catch (err) {
|
||||
console.log('failed to move entry to package folder');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('tidy go.mod file');
|
||||
try {
|
||||
// ensure go.mod up-to-date
|
||||
await go('mod', 'tidy');
|
||||
} catch (err) {
|
||||
console.log('failed to `go mod tidy`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [join(entrypointDirname, mainModGoFileName)];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
false,
|
||||
);
|
||||
const origianlMainGoContents = await readFile(
|
||||
join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
|
||||
// in order to allow the user to have `main.go`,
|
||||
// we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||
// and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const lambda = await createLambda({
|
||||
@@ -133,4 +193,6 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
return {
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { config, build };
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
now "../../utils/go/bridge"
|
||||
"net/http"
|
||||
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||
)
|
||||
|
||||
func main() {
|
||||
|
||||
12
packages/now-go/main__mod__.go
Normal file
12
packages/now-go/main__mod__.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"__NOW_HANDLER_PACKAGE_NAME"
|
||||
|
||||
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||
)
|
||||
|
||||
func main() {
|
||||
now.Start(http.HandlerFunc(__NOW_HANDLER_FUNC_NAME))
|
||||
}
|
||||
@@ -1,29 +1,27 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.2.11",
|
||||
"version": "0.3.1-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-go"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "best -I test/*.js",
|
||||
"prepublish": "./build.sh"
|
||||
"postinstall": "node ./util/install"
|
||||
},
|
||||
"files": [
|
||||
"bin",
|
||||
"download-go-bin.js",
|
||||
"index.js",
|
||||
"main.go"
|
||||
"*.js",
|
||||
"main.go",
|
||||
"main__mod__.go",
|
||||
"util"
|
||||
],
|
||||
"dependencies": {
|
||||
"debug": "^4.1.1",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"lambda-git": "^0.1.2",
|
||||
"mkdirp-promise": "5.0.1",
|
||||
"node-fetch": "^2.2.1",
|
||||
"tar": "4.4.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@zeit/best": "0.4.3",
|
||||
"rmfr": "2.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ func main() {
|
||||
if fn.Name.IsExported() == true {
|
||||
// we found the first exported function
|
||||
// we're done!
|
||||
fmt.Print(fn.Name.Name)
|
||||
fmt.Print(fn.Name.Name, ",", parsed.Name.Name)
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
18
packages/now-go/util/install.js
Normal file
18
packages/now-go/util/install.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const { join } = require('path');
|
||||
const { downloadGo } = require('../go-helpers');
|
||||
|
||||
async function main() {
|
||||
// First download the `go` binary for this platform/arch.
|
||||
const go = await downloadGo();
|
||||
|
||||
// Build the `get-exported-function-name` helper program.
|
||||
// `go get` is not necessary because the program has no external deps.
|
||||
const src = join(__dirname, 'get-exported-function-name.go');
|
||||
const dest = join(__dirname, '../get-exported-function-name');
|
||||
await go.build({ src, dest });
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,4 @@
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { minify } = require('html-minifier');
|
||||
|
||||
const defaultOptions = {
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
{
|
||||
"name": "@now/html-minifier",
|
||||
"version": "1.0.6",
|
||||
"version": "1.0.8-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-html-minifier"
|
||||
},
|
||||
"dependencies": {
|
||||
"html-minifier": "3.5.21"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
const { Lambda } = require('@now/build-utils/lambda.js');
|
||||
const streamToBuffer = require('@now/build-utils/fs/stream-to-buffer.js');
|
||||
const { Lambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const streamToBuffer = require('@now/build-utils/fs/stream-to-buffer.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
if (!files[entrypoint]) throw new Error('Entrypoint not found in files');
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"name": "@now/lambda",
|
||||
"version": "0.4.8",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
"version": "0.4.10-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-lambda"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
|
||||
@@ -7,7 +7,7 @@ const {
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const unified = require('unified');
|
||||
const unifiedStream = require('unified-stream');
|
||||
const markdown = require('remark-parse');
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
{
|
||||
"name": "@now/md",
|
||||
"version": "0.4.8",
|
||||
"version": "0.4.10-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-md"
|
||||
},
|
||||
"dependencies": {
|
||||
"rehype-document": "^2.2.0",
|
||||
"rehype-format": "^2.3.0",
|
||||
@@ -10,9 +16,6 @@
|
||||
"unified": "^7.0.0",
|
||||
"unified-stream": "^1.0.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ const {
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const fs = require('fs');
|
||||
const { promisify } = require('util');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const path = require('path');
|
||||
const { runNpmInstall } = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const { runNpmInstall } = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
@@ -15,7 +15,7 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.15' } };
|
||||
const packageJsonPath = path.join(workPath, 'package.json');
|
||||
await writeFile(packageJsonPath, JSON.stringify(packageJson));
|
||||
console.log('running npm install...');
|
||||
console.log('installing dependencies...');
|
||||
process.env.PUPPETEER_SKIP_CHROMIUM_DOWNLOAD = '1'; // TODO opts argument for runNpmInstall
|
||||
await runNpmInstall(path.dirname(packageJsonPath), [
|
||||
'--prod',
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"name": "@now/mdx-deck",
|
||||
"version": "0.4.18-canary.0",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
"version": "0.4.19-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-mdx-deck"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
|
||||
@@ -7,7 +7,7 @@ const {
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
|
||||
@@ -1,23 +1,31 @@
|
||||
const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob');
|
||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileBlob = require('@now/build-utils/file-blob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const path = require('path');
|
||||
const { readFile, writeFile, unlink } = require('fs.promised');
|
||||
const {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const {
|
||||
readFile,
|
||||
writeFile,
|
||||
unlink: unlinkFile,
|
||||
remove: removePath,
|
||||
mkdirp,
|
||||
rename: renamePath,
|
||||
pathExists,
|
||||
} = require('fs-extra');
|
||||
const semver = require('semver');
|
||||
const nextLegacyVersions = require('./legacy-versions');
|
||||
const {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
moveEntryDirectoryToRoot,
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
excludeStaticDirectory,
|
||||
onlyStaticDirectory,
|
||||
getNextConfig,
|
||||
} = require('./utils');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref').Files } Files */
|
||||
@@ -32,15 +40,17 @@ const {
|
||||
|
||||
/**
|
||||
* Read package.json from files
|
||||
* @param {DownloadedFiles} files
|
||||
* @param {string} entryPath
|
||||
*/
|
||||
async function readPackageJson(files) {
|
||||
if (!files['package.json']) {
|
||||
async function readPackageJson(entryPath) {
|
||||
const packagePath = path.join(entryPath, 'package.json');
|
||||
|
||||
try {
|
||||
return JSON.parse(await readFile(packagePath, 'utf8'));
|
||||
} catch (err) {
|
||||
console.log('package.json not found in entry');
|
||||
return {};
|
||||
}
|
||||
|
||||
const packageJsonPath = files['package.json'].fsPath;
|
||||
return JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -67,6 +77,36 @@ async function writeNpmRc(workPath, token) {
|
||||
);
|
||||
}
|
||||
|
||||
function getNextVersion(packageJson) {
|
||||
let nextVersion;
|
||||
if (packageJson.dependencies && packageJson.dependencies.next) {
|
||||
nextVersion = packageJson.dependencies.next;
|
||||
} else if (packageJson.devDependencies && packageJson.devDependencies.next) {
|
||||
nextVersion = packageJson.devDependencies.next;
|
||||
}
|
||||
return nextVersion;
|
||||
}
|
||||
|
||||
function isLegacyNext(nextVersion) {
|
||||
// If version is using the dist-tag instead of a version range
|
||||
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the version is an exact match with the legacy versions
|
||||
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
||||
// When the version can't be matched with legacy versions, so it must be a newer version
|
||||
if (maxSatisfying === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '5mb',
|
||||
};
|
||||
@@ -80,125 +120,232 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
|
||||
console.log('downloading user files...');
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const filesOnlyEntryDirectory = includeOnlyEntryDirectory(
|
||||
files,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithEntryDirectoryRoot = moveEntryDirectoryToRoot(
|
||||
filesOnlyEntryDirectory,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithoutLockfiles = excludeLockFiles(filesWithEntryDirectoryRoot);
|
||||
const filesWithoutStaticDirectory = excludeStaticDirectory(
|
||||
filesWithoutLockfiles,
|
||||
);
|
||||
const downloadedFiles = await download(filesWithoutStaticDirectory, workPath);
|
||||
await download(files, workPath);
|
||||
const entryPath = path.join(workPath, entryDirectory);
|
||||
|
||||
console.log('normalizing package.json');
|
||||
const packageJson = normalizePackageJson(
|
||||
await readPackageJson(downloadedFiles),
|
||||
);
|
||||
console.log('normalized package.json result: ', packageJson);
|
||||
await writePackageJson(workPath, packageJson);
|
||||
if (await pathExists(path.join(entryPath, '.next'))) {
|
||||
console.warn(
|
||||
'WARNING: You should probably not upload the `.next` directory. See https://zeit.co/docs/v2/deployments/official-builders/next-js-now-next/ for more information.',
|
||||
);
|
||||
}
|
||||
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
|
||||
const nextVersion = getNextVersion(pkg);
|
||||
if (!nextVersion) {
|
||||
throw new Error(
|
||||
'No Next.js version could be detected in "package.json". Make sure `"next"` is installed in "dependencies" or "devDependencies"',
|
||||
);
|
||||
}
|
||||
|
||||
const isLegacy = isLegacyNext(nextVersion);
|
||||
|
||||
console.log(`MODE: ${isLegacy ? 'legacy' : 'serverless'}`);
|
||||
|
||||
if (isLegacy) {
|
||||
try {
|
||||
await unlinkFile(path.join(entryPath, 'yarn.lock'));
|
||||
} catch (err) {
|
||||
console.log('no yarn.lock removed');
|
||||
}
|
||||
|
||||
try {
|
||||
await unlinkFile(path.join(entryPath, 'package-lock.json'));
|
||||
} catch (err) {
|
||||
console.log('no package-lock.json removed');
|
||||
}
|
||||
|
||||
console.warn(
|
||||
"WARNING: your application is being deployed in @now/next's legacy mode. http://err.sh/zeit/now-builders/now-next-legacy-mode",
|
||||
);
|
||||
|
||||
console.log('normalizing package.json');
|
||||
const packageJson = normalizePackageJson(pkg);
|
||||
console.log('normalized package.json result: ', packageJson);
|
||||
await writePackageJson(entryPath, packageJson);
|
||||
} else if (!pkg.scripts || !pkg.scripts['now-build']) {
|
||||
console.warn(
|
||||
'WARNING: "now-build" script not found. Adding \'"now-build": "next build"\' to "package.json" automatically',
|
||||
);
|
||||
pkg.scripts = {
|
||||
'now-build': 'next build',
|
||||
...(pkg.scripts || {}),
|
||||
};
|
||||
console.log('normalized package.json result: ', pkg);
|
||||
await writePackageJson(entryPath, pkg);
|
||||
}
|
||||
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
console.log('found NPM_AUTH_TOKEN in environment, creating .npmrc');
|
||||
await writeNpmRc(workPath, process.env.NPM_AUTH_TOKEN);
|
||||
await writeNpmRc(entryPath, process.env.NPM_AUTH_TOKEN);
|
||||
}
|
||||
|
||||
console.log('running npm install...');
|
||||
await runNpmInstall(workPath, ['--prefer-offline']);
|
||||
console.log('installing dependencies...');
|
||||
await runNpmInstall(entryPath, ['--prefer-offline']);
|
||||
console.log('running user script...');
|
||||
await runPackageJsonScript(workPath, 'now-build');
|
||||
console.log('running npm install --production...');
|
||||
await runNpmInstall(workPath, ['--prefer-offline', '--production']);
|
||||
await runPackageJsonScript(entryPath, 'now-build');
|
||||
|
||||
if (isLegacy) {
|
||||
console.log('running npm install --production...');
|
||||
await runNpmInstall(entryPath, ['--prefer-offline', '--production']);
|
||||
}
|
||||
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
await unlink(path.join(workPath, '.npmrc'));
|
||||
await unlinkFile(path.join(entryPath, '.npmrc'));
|
||||
}
|
||||
|
||||
const filesAfterBuild = await glob('**', workPath);
|
||||
|
||||
console.log('preparing lambda files...');
|
||||
let buildId;
|
||||
try {
|
||||
buildId = await readFile(path.join(workPath, '.next', 'BUILD_ID'), 'utf8');
|
||||
} catch (err) {
|
||||
console.error(
|
||||
'BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"',
|
||||
);
|
||||
throw new Error('Missing BUILD_ID');
|
||||
}
|
||||
const dotNextRootFiles = await glob('.next/*', workPath);
|
||||
const dotNextServerRootFiles = await glob('.next/server/*', workPath);
|
||||
const nodeModules = excludeFiles(
|
||||
await glob('node_modules/**', workPath),
|
||||
file => file.startsWith('node_modules/.cache'),
|
||||
);
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
};
|
||||
const nextFiles = {
|
||||
...nodeModules,
|
||||
...dotNextRootFiles,
|
||||
...dotNextServerRootFiles,
|
||||
...launcherFiles,
|
||||
};
|
||||
if (filesAfterBuild['next.config.js']) {
|
||||
nextFiles['next.config.js'] = filesAfterBuild['next.config.js'];
|
||||
}
|
||||
const pages = await glob(
|
||||
'**/*.js',
|
||||
path.join(workPath, '.next', 'server', 'static', buildId, 'pages'),
|
||||
);
|
||||
const launcherPath = path.join(__dirname, 'launcher.js');
|
||||
const launcherData = await readFile(launcherPath, 'utf8');
|
||||
|
||||
const lambdas = {};
|
||||
await Promise.all(
|
||||
Object.keys(pages).map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
|
||||
if (isLegacy) {
|
||||
const filesAfterBuild = await glob('**', entryPath);
|
||||
|
||||
console.log('preparing lambda files...');
|
||||
let buildId;
|
||||
try {
|
||||
buildId = await readFile(
|
||||
path.join(entryPath, '.next', 'BUILD_ID'),
|
||||
'utf8',
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(
|
||||
'BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"',
|
||||
);
|
||||
throw new Error('Missing BUILD_ID');
|
||||
}
|
||||
const dotNextRootFiles = await glob('.next/*', entryPath);
|
||||
const dotNextServerRootFiles = await glob('.next/server/*', entryPath);
|
||||
const nodeModules = excludeFiles(
|
||||
await glob('node_modules/**', entryPath),
|
||||
file => file.startsWith('node_modules/.cache'),
|
||||
);
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
};
|
||||
const nextFiles = {
|
||||
...nodeModules,
|
||||
...dotNextRootFiles,
|
||||
...dotNextServerRootFiles,
|
||||
...launcherFiles,
|
||||
};
|
||||
if (filesAfterBuild['next.config.js']) {
|
||||
nextFiles['next.config.js'] = filesAfterBuild['next.config.js'];
|
||||
}
|
||||
const pages = await glob(
|
||||
'**/*.js',
|
||||
path.join(entryPath, '.next', 'server', 'static', buildId, 'pages'),
|
||||
);
|
||||
const launcherPath = path.join(__dirname, 'legacy-launcher.js');
|
||||
const launcherData = await readFile(launcherPath, 'utf8');
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(pages).map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pathname = page.replace(/\.js$/, '');
|
||||
const launcher = launcherData.replace(
|
||||
'PATHNAME_PLACEHOLDER',
|
||||
`/${pathname.replace(/(^|\/)index$/, '')}`,
|
||||
);
|
||||
|
||||
const pageFiles = {
|
||||
[`.next/server/static/${buildId}/pages/_document.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_document.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_app.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_app.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_error.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_error.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/${page}`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/${page}`
|
||||
],
|
||||
};
|
||||
|
||||
console.log(`Creating lambda for page: "${page}"...`);
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...nextFiles,
|
||||
...pageFiles,
|
||||
'now__launcher.js': new FileBlob({ data: launcher }),
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
console.log(`Created lambda for page: "${page}"`);
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
console.log('preparing lambda files...');
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
'now__launcher.js': new FileFsRef({
|
||||
fsPath: path.join(__dirname, 'launcher.js'),
|
||||
}),
|
||||
};
|
||||
const pages = await glob(
|
||||
'**/*.js',
|
||||
path.join(entryPath, '.next', 'serverless', 'pages'),
|
||||
);
|
||||
|
||||
const pageKeys = Object.keys(pages);
|
||||
|
||||
if (pageKeys.length === 0) {
|
||||
const nextConfig = await getNextConfig(workPath, entryPath);
|
||||
|
||||
if (nextConfig != null) {
|
||||
console.info('Found next.config.js:');
|
||||
console.info(nextConfig);
|
||||
console.info();
|
||||
}
|
||||
|
||||
const pathname = page.replace(/\.js$/, '');
|
||||
const launcher = launcherData.replace(
|
||||
'PATHNAME_PLACEHOLDER',
|
||||
`/${pathname.replace(/(^|\/)index$/, '')}`,
|
||||
throw new Error(
|
||||
'No serverless pages were built. https://err.sh/zeit/now-builders/now-next-no-serverless-pages-built',
|
||||
);
|
||||
}
|
||||
|
||||
const pageFiles = {
|
||||
[`.next/server/static/${buildId}/pages/_document.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_document.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_app.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_app.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_error.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_error.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/${page}`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/${page}`
|
||||
],
|
||||
};
|
||||
// An optional assets folder that is placed alongside every page entrypoint
|
||||
const assets = await glob(
|
||||
'assets/**',
|
||||
path.join(entryPath, '.next', 'serverless'),
|
||||
);
|
||||
|
||||
console.log(`Creating lambda for page: "${page}"...`);
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...nextFiles,
|
||||
...pageFiles,
|
||||
'now__launcher.js': new FileBlob({ data: launcher }),
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
console.log(`Created lambda for page: "${page}"`);
|
||||
}),
|
||||
);
|
||||
const assetKeys = Object.keys(assets);
|
||||
if (assetKeys.length > 0) {
|
||||
console.log('detected assets to be bundled with lambda:');
|
||||
assetKeys.forEach(assetFile => console.log(`\t${assetFile}`));
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
pageKeys.map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pathname = page.replace(/\.js$/, '');
|
||||
|
||||
console.log(`Creating lambda for page: "${page}"...`);
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...launcherFiles,
|
||||
...assets,
|
||||
'page.js': pages[page],
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
console.log(`Created lambda for page: "${page}"`);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const nextStaticFiles = await glob(
|
||||
'**',
|
||||
path.join(workPath, '.next', 'static'),
|
||||
path.join(entryPath, '.next', 'static'),
|
||||
);
|
||||
const staticFiles = Object.keys(nextStaticFiles).reduce(
|
||||
(mappedFiles, file) => ({
|
||||
@@ -208,52 +355,46 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
{},
|
||||
);
|
||||
|
||||
const nextStaticDirectory = onlyStaticDirectory(filesWithoutLockfiles);
|
||||
const staticDirectoryFiles = Object.keys(nextStaticDirectory).reduce(
|
||||
(mappedFiles, file) => ({
|
||||
...mappedFiles,
|
||||
[path.join(entryDirectory, file)]: nextStaticDirectory[file],
|
||||
}),
|
||||
{},
|
||||
const staticDirectoryFiles = onlyStaticDirectory(
|
||||
includeOnlyEntryDirectory(files, entryDirectory),
|
||||
entryDirectory,
|
||||
);
|
||||
|
||||
return { ...lambdas, ...staticFiles, ...staticDirectoryFiles };
|
||||
};
|
||||
|
||||
exports.prepareCache = async ({
|
||||
files, entrypoint, cachePath, workPath,
|
||||
}) => {
|
||||
console.log('downloading user files...');
|
||||
exports.prepareCache = async ({ cachePath, workPath, entrypoint }) => {
|
||||
console.log('preparing cache ...');
|
||||
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const filesOnlyEntryDirectory = includeOnlyEntryDirectory(
|
||||
files,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithEntryDirectoryRoot = moveEntryDirectoryToRoot(
|
||||
filesOnlyEntryDirectory,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithoutLockfiles = excludeLockFiles(filesWithEntryDirectoryRoot);
|
||||
const filesWithoutStaticDirectory = excludeStaticDirectory(
|
||||
filesWithoutLockfiles,
|
||||
);
|
||||
await download(filesWithoutStaticDirectory, workPath);
|
||||
await download(await glob('.next/**', workPath), cachePath);
|
||||
await download(await glob('node_modules/**', workPath), cachePath);
|
||||
const entryPath = path.join(workPath, entryDirectory);
|
||||
const cacheEntryPath = path.join(cachePath, entryDirectory);
|
||||
|
||||
console.log('.next folder contents', await glob('.next/**', cachePath));
|
||||
console.log(
|
||||
'.cache folder contents',
|
||||
await glob('node_modules/.cache/**', cachePath),
|
||||
);
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
const nextVersion = getNextVersion(pkg);
|
||||
const isLegacy = isLegacyNext(nextVersion);
|
||||
|
||||
console.log('running npm install...');
|
||||
await runNpmInstall(cachePath);
|
||||
if (isLegacy) {
|
||||
// skip caching legacy mode (swapping deps between all and production can get bug-prone)
|
||||
return {};
|
||||
}
|
||||
|
||||
console.log('clearing old cache ...');
|
||||
await removePath(cacheEntryPath);
|
||||
await mkdirp(cacheEntryPath);
|
||||
|
||||
console.log('copying build files for cache ...');
|
||||
await renamePath(entryPath, cacheEntryPath);
|
||||
|
||||
console.log('producing cache file manifest ...');
|
||||
|
||||
const cacheEntrypoint = path.relative(cachePath, cacheEntryPath);
|
||||
return {
|
||||
...(await glob('.next/records.json', cachePath)),
|
||||
...(await glob('.next/server/records.json', cachePath)),
|
||||
...(await glob('node_modules/**', cachePath)),
|
||||
...(await glob('yarn.lock', cachePath)),
|
||||
...(await glob(
|
||||
path.join(cacheEntrypoint, 'node_modules/{**,!.*,.yarn*}'),
|
||||
cachePath,
|
||||
)),
|
||||
...(await glob(path.join(cacheEntrypoint, 'package-lock.json'), cachePath)),
|
||||
...(await glob(path.join(cacheEntrypoint, 'yarn.lock'), cachePath)),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,19 +1,11 @@
|
||||
const { Server } = require('http');
|
||||
const next = require('next-server');
|
||||
const url = require('url');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
|
||||
const bridge = new Bridge();
|
||||
bridge.port = 3000;
|
||||
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
const app = next({});
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
const page = require('./page.js');
|
||||
|
||||
const server = new Server((req, res) => {
|
||||
const parsedUrl = url.parse(req.url, true);
|
||||
app.render(req, res, 'PATHNAME_PLACEHOLDER', parsedUrl.query, parsedUrl);
|
||||
});
|
||||
server.listen(bridge.port);
|
||||
const server = new Server(page.render);
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
|
||||
18
packages/now-next/legacy-launcher.js
Normal file
18
packages/now-next/legacy-launcher.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const { Server } = require('http');
|
||||
const next = require('next-server');
|
||||
const url = require('url');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
const app = next({});
|
||||
|
||||
const server = new Server((req, res) => {
|
||||
const parsedUrl = url.parse(req.url, true);
|
||||
app.render(req, res, 'PATHNAME_PLACEHOLDER', parsedUrl.query, parsedUrl);
|
||||
});
|
||||
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
336
packages/now-next/legacy-versions.js
Normal file
336
packages/now-next/legacy-versions.js
Normal file
@@ -0,0 +1,336 @@
|
||||
module.exports = [
|
||||
'0.1.0',
|
||||
'0.1.1',
|
||||
'0.2.0',
|
||||
'0.2.1',
|
||||
'0.2.2',
|
||||
'0.2.3',
|
||||
'0.2.4',
|
||||
'0.2.5',
|
||||
'0.2.6',
|
||||
'0.2.7',
|
||||
'0.2.8',
|
||||
'0.2.9',
|
||||
'0.2.10',
|
||||
'0.2.11',
|
||||
'0.2.12',
|
||||
'0.2.13',
|
||||
'0.2.14',
|
||||
'0.3.0',
|
||||
'0.3.1',
|
||||
'0.3.2',
|
||||
'0.3.3',
|
||||
'0.4.0',
|
||||
'0.4.1',
|
||||
'0.9.9',
|
||||
'0.9.10',
|
||||
'0.9.11',
|
||||
'1.0.0',
|
||||
'1.0.1',
|
||||
'1.0.2',
|
||||
'1.1.0',
|
||||
'1.1.1',
|
||||
'1.1.2',
|
||||
'1.2.0',
|
||||
'1.2.1',
|
||||
'1.2.2',
|
||||
'1.2.3',
|
||||
'2.0.0-beta.0',
|
||||
'2.0.0-beta.1',
|
||||
'2.0.0-beta.2',
|
||||
'2.0.0-beta.3',
|
||||
'2.0.0-beta.4',
|
||||
'2.0.0-beta.5',
|
||||
'2.0.0-beta.6',
|
||||
'2.0.0-beta.7',
|
||||
'2.0.0-beta.8',
|
||||
'2.0.0-beta.9',
|
||||
'2.0.0-beta.10',
|
||||
'2.0.0-beta.11',
|
||||
'2.0.0-beta.12',
|
||||
'2.0.0-beta.13',
|
||||
'2.0.0-beta.14',
|
||||
'2.0.0-beta.15',
|
||||
'2.0.0-beta.16',
|
||||
'2.0.0-beta.17',
|
||||
'2.0.0-beta.18',
|
||||
'2.0.0-beta.19',
|
||||
'2.0.0-beta.20',
|
||||
'2.0.0-beta.21',
|
||||
'2.0.0-beta.22',
|
||||
'2.0.0-beta.23',
|
||||
'2.0.0-beta.24',
|
||||
'2.0.0-beta.25',
|
||||
'2.0.0-beta.26',
|
||||
'2.0.0-beta.27',
|
||||
'2.0.0-beta.28',
|
||||
'2.0.0-beta.29',
|
||||
'2.0.0-beta.30',
|
||||
'2.0.0-beta.31',
|
||||
'2.0.0-beta.32',
|
||||
'2.0.0-beta.33',
|
||||
'2.0.0-beta.34',
|
||||
'2.0.0-beta.35',
|
||||
'2.0.0-beta.36',
|
||||
'2.0.0-beta.37',
|
||||
'2.0.0-beta.38',
|
||||
'2.0.0-beta.39',
|
||||
'2.0.0-beta.40',
|
||||
'2.0.0-beta.41',
|
||||
'2.0.0-beta.42',
|
||||
'2.0.0',
|
||||
'2.0.1',
|
||||
'2.1.0',
|
||||
'2.1.1',
|
||||
'2.2.0',
|
||||
'2.3.0-alpha1',
|
||||
'2.3.0',
|
||||
'2.3.1',
|
||||
'2.4.0',
|
||||
'2.4.1',
|
||||
'2.4.2',
|
||||
'2.4.3',
|
||||
'2.4.4',
|
||||
'2.4.5',
|
||||
'2.4.6',
|
||||
'2.4.7',
|
||||
'2.4.8',
|
||||
'2.4.9',
|
||||
'3.0.0-beta1',
|
||||
'3.0.0-beta10',
|
||||
'3.0.0-beta11',
|
||||
'3.0.0-beta12',
|
||||
'3.0.0-beta13',
|
||||
'3.0.0-beta14',
|
||||
'3.0.0-beta15',
|
||||
'3.0.0-beta16',
|
||||
'3.0.0-beta2',
|
||||
'3.0.0-beta3',
|
||||
'3.0.0-beta4',
|
||||
'3.0.0-beta5',
|
||||
'3.0.0-beta6',
|
||||
'3.0.0-beta7',
|
||||
'3.0.0-beta8',
|
||||
'3.0.0-beta9',
|
||||
'3.0.1-beta.1',
|
||||
'3.0.1-beta.2',
|
||||
'3.0.1-beta.3',
|
||||
'3.0.1-beta.4',
|
||||
'3.0.1-beta.5',
|
||||
'3.0.1-beta.6',
|
||||
'3.0.1-beta.7',
|
||||
'3.0.1-beta.8',
|
||||
'3.0.1-beta.9',
|
||||
'3.0.1-beta.10',
|
||||
'3.0.1-beta.11',
|
||||
'3.0.1-beta.12',
|
||||
'3.0.1-beta.13',
|
||||
'3.0.1-beta.14',
|
||||
'3.0.1-beta.15',
|
||||
'3.0.1-beta.16',
|
||||
'3.0.1-beta.17',
|
||||
'3.0.1-beta.18',
|
||||
'3.0.1-beta.19',
|
||||
'3.0.1-beta.20',
|
||||
'3.0.1-beta.21',
|
||||
'3.0.1',
|
||||
'3.0.2',
|
||||
'3.0.3',
|
||||
'3.0.4',
|
||||
'3.0.5',
|
||||
'3.0.6',
|
||||
'3.1.0',
|
||||
'3.2.0',
|
||||
'3.2.1',
|
||||
'3.2.2',
|
||||
'3.2.3',
|
||||
'4.0.0-beta.1',
|
||||
'4.0.0-beta.2',
|
||||
'4.0.0-beta.3',
|
||||
'4.0.0-beta.4',
|
||||
'4.0.0-beta.5',
|
||||
'4.0.0-beta.6',
|
||||
'4.0.0',
|
||||
'4.0.1',
|
||||
'4.0.2',
|
||||
'4.0.3',
|
||||
'4.0.4',
|
||||
'4.0.5',
|
||||
'4.1.0',
|
||||
'4.1.1',
|
||||
'4.1.2',
|
||||
'4.1.3',
|
||||
'4.1.4-canary.1',
|
||||
'4.1.4-canary.2',
|
||||
'4.1.4',
|
||||
'4.2.0-canary.1',
|
||||
'4.2.0-zones.2',
|
||||
'4.2.0',
|
||||
'4.2.1',
|
||||
'4.2.2',
|
||||
'4.2.3',
|
||||
'4.3.0-canary.1',
|
||||
'4.3.0-universal-alpha.1',
|
||||
'4.3.0-universal-alpha.2',
|
||||
'4.3.0-universal-alpha.3',
|
||||
'4.3.0-universal-alpha.4',
|
||||
'4.3.0-zones.1',
|
||||
'4.4.0-canary.2',
|
||||
'4.4.0-canary.3',
|
||||
'5.0.0-universal-alpha.1',
|
||||
'5.0.0-universal-alpha.2',
|
||||
'5.0.0-universal-alpha.3',
|
||||
'5.0.0-universal-alpha.4',
|
||||
'5.0.0-universal-alpha.5',
|
||||
'5.0.0-universal-alpha.6',
|
||||
'5.0.0-universal-alpha.7',
|
||||
'5.0.0-universal-alpha.8',
|
||||
'5.0.0-universal-alpha.9',
|
||||
'5.0.0-universal-alpha.10',
|
||||
'5.0.0-universal-alpha.11',
|
||||
'5.0.0-universal-alpha.12',
|
||||
'5.0.0-universal-alpha.13',
|
||||
'5.0.0-universal-alpha.14',
|
||||
'5.0.0-universal-alpha.15',
|
||||
'5.0.0-universal-alpha.16',
|
||||
'5.0.0-universal-alpha.17',
|
||||
'5.0.0-universal-alpha.18',
|
||||
'5.0.0-universal-alpha.19',
|
||||
'5.0.0-universal-alpha.20',
|
||||
'5.0.0-universal-alpha.21',
|
||||
'5.0.0-universal-alpha.22',
|
||||
'5.0.0-universal-alpha.23',
|
||||
'5.0.0-zones.1',
|
||||
'5.0.0',
|
||||
'5.0.1-canary.1',
|
||||
'5.0.1-canary.2',
|
||||
'5.0.1-canary.3',
|
||||
'5.0.1-canary.4',
|
||||
'5.0.1-canary.5',
|
||||
'5.0.1-canary.6',
|
||||
'5.0.1-canary.7',
|
||||
'5.0.1-canary.8',
|
||||
'5.0.1-canary.9',
|
||||
'5.0.1-canary.10',
|
||||
'5.0.1-canary.11',
|
||||
'5.0.1-canary.12',
|
||||
'5.0.1-canary.13',
|
||||
'5.0.1-canary.14',
|
||||
'5.0.1-canary.15',
|
||||
'5.0.1-canary.16',
|
||||
'5.0.1-canary.17',
|
||||
'5.1.0',
|
||||
'6.0.0-canary.1',
|
||||
'6.0.0-canary.2',
|
||||
'6.0.0-canary.3',
|
||||
'6.0.0-canary.4',
|
||||
'6.0.0-canary.5',
|
||||
'6.0.0-canary.6',
|
||||
'6.0.0-canary.7',
|
||||
'6.0.0',
|
||||
'6.0.1-canary.0',
|
||||
'6.0.1-canary.1',
|
||||
'6.0.1-canary.2',
|
||||
'6.0.1',
|
||||
'6.0.2-canary.0',
|
||||
'6.0.2',
|
||||
'6.0.3-canary.0',
|
||||
'6.0.3-canary.1',
|
||||
'6.0.3',
|
||||
'6.0.4-canary.0',
|
||||
'6.0.4-canary.1',
|
||||
'6.0.4-canary.2',
|
||||
'6.0.4-canary.3',
|
||||
'6.0.4-canary.4',
|
||||
'6.0.4-canary.5',
|
||||
'6.0.4-canary.6',
|
||||
'6.0.4-canary.7',
|
||||
'6.0.4-canary.8',
|
||||
'6.0.4-canary.9',
|
||||
'6.1.0-canary.0',
|
||||
'6.1.0',
|
||||
'6.1.1-canary.0',
|
||||
'6.1.1-canary.1',
|
||||
'6.1.1-canary.2',
|
||||
'6.1.1-canary.3',
|
||||
'6.1.1-canary.4',
|
||||
'6.1.1-canary.5',
|
||||
'6.1.1',
|
||||
'6.1.2',
|
||||
'7.0.0-canary.0',
|
||||
'7.0.0-canary.1',
|
||||
'7.0.0-canary.2',
|
||||
'7.0.0-canary.3',
|
||||
'7.0.0-canary.4',
|
||||
'7.0.0-canary.5',
|
||||
'7.0.0-canary.6',
|
||||
'7.0.0-canary.7',
|
||||
'7.0.0-canary.8',
|
||||
'7.0.0-canary.9',
|
||||
'7.0.0-canary.10',
|
||||
'7.0.0-canary.11',
|
||||
'7.0.0-canary.12',
|
||||
'7.0.0-canary.13',
|
||||
'7.0.0-canary.14',
|
||||
'7.0.0-canary.15',
|
||||
'7.0.0-canary.16',
|
||||
'7.0.0-canary.18',
|
||||
'7.0.0-canary.19',
|
||||
'7.0.0-canary.20',
|
||||
'7.0.0',
|
||||
'7.0.1-canary.0',
|
||||
'7.0.1-canary.1',
|
||||
'7.0.1-canary.2',
|
||||
'7.0.1-canary.3',
|
||||
'7.0.1-canary.4',
|
||||
'7.0.1-canary.5',
|
||||
'7.0.1-canary.6',
|
||||
'7.0.1',
|
||||
'7.0.2-alpha.1',
|
||||
'7.0.2-alpha.3',
|
||||
'7.0.2-canary.5',
|
||||
'7.0.2-canary.6',
|
||||
'7.0.2-canary.7',
|
||||
'7.0.2-canary.8',
|
||||
'7.0.2-canary.9',
|
||||
'7.0.2-canary.10',
|
||||
'7.0.2-canary.11',
|
||||
'7.0.2-canary.12',
|
||||
'7.0.2-canary.13',
|
||||
'7.0.2-canary.14',
|
||||
'7.0.2-canary.15',
|
||||
'7.0.2-canary.16',
|
||||
'7.0.2-canary.17',
|
||||
'7.0.2-canary.18',
|
||||
'7.0.2-canary.19',
|
||||
'7.0.2-canary.20',
|
||||
'7.0.2-canary.21',
|
||||
'7.0.2-canary.22',
|
||||
'7.0.2-canary.23',
|
||||
'7.0.2-canary.24',
|
||||
'7.0.2-canary.25',
|
||||
'7.0.2-canary.26',
|
||||
'7.0.2-canary.27',
|
||||
'7.0.2-canary.28',
|
||||
'7.0.2-canary.29',
|
||||
'7.0.2-canary.31',
|
||||
'7.0.2-canary.33',
|
||||
'7.0.2-canary.34',
|
||||
'7.0.2-canary.35',
|
||||
'7.0.2-canary.36',
|
||||
'7.0.2-canary.37',
|
||||
'7.0.2-canary.38',
|
||||
'7.0.2-canary.39',
|
||||
'7.0.2-canary.40',
|
||||
'7.0.2-canary.41',
|
||||
'7.0.2-canary.42',
|
||||
'7.0.2-canary.43',
|
||||
'7.0.2-canary.44',
|
||||
'7.0.2-canary.45',
|
||||
'7.0.2-canary.46',
|
||||
'7.0.2-canary.47',
|
||||
'7.0.2-canary.48',
|
||||
'7.0.2-canary.49',
|
||||
'7.0.2-canary.50',
|
||||
'7.0.2',
|
||||
];
|
||||
@@ -1,12 +1,16 @@
|
||||
{
|
||||
"name": "@now/next",
|
||||
"version": "0.0.81",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "0.1.4",
|
||||
"execa": "^1.0.0",
|
||||
"fs.promised": "^3.0.0"
|
||||
"version": "0.1.3-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-next"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^1.0.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"semver": "^5.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
@@ -64,24 +65,6 @@ function includeOnlyEntryDirectory(files, entryDirectory) {
|
||||
return excludeFiles(files, matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves all files under the entry directory to the root directory
|
||||
* @param {Files} files
|
||||
* @param {string} entryDirectory
|
||||
* @returns {Files}
|
||||
*/
|
||||
function moveEntryDirectoryToRoot(files, entryDirectory) {
|
||||
if (entryDirectory === '.') {
|
||||
return files;
|
||||
}
|
||||
|
||||
function delegate(filePath) {
|
||||
return filePath.replace(new RegExp(`^${entryDirectory}/`), '');
|
||||
}
|
||||
|
||||
return rename(files, delegate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude package manager lockfiles from files
|
||||
* @param {Files} files
|
||||
@@ -99,26 +82,13 @@ function excludeLockFiles(files) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude the static directory from files
|
||||
* Include the static directory from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function excludeStaticDirectory(files) {
|
||||
function onlyStaticDirectory(files, entryDir) {
|
||||
function matcher(filePath) {
|
||||
return filePath.startsWith('static');
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude the static directory from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function onlyStaticDirectory(files) {
|
||||
function matcher(filePath) {
|
||||
return !filePath.startsWith('static');
|
||||
return !filePath.startsWith(path.join(entryDir, 'static'));
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
@@ -153,29 +123,42 @@ function normalizePackageJson(defaultPackageJson = {}) {
|
||||
'react-dom': 'latest',
|
||||
...dependencies, // override react if user provided it
|
||||
// next-server is forced to canary
|
||||
'next-server': 'canary',
|
||||
'next-server': 'v7.0.2-canary.49',
|
||||
},
|
||||
devDependencies: {
|
||||
...devDependencies,
|
||||
// next is forced to canary
|
||||
next: 'canary',
|
||||
next: 'v7.0.2-canary.49',
|
||||
// next-server is a dependency here
|
||||
'next-server': undefined,
|
||||
},
|
||||
scripts: {
|
||||
...defaultPackageJson.scripts,
|
||||
'now-build': 'next build --lambdas',
|
||||
'now-build': 'NODE_OPTIONS=--max_old_space_size=3000 next build --lambdas',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function getNextConfig(workPath, entryPath) {
|
||||
const entryConfig = path.join(entryPath, './next.config.js');
|
||||
if (await fs.pathExists(entryConfig)) {
|
||||
return fs.readFile(entryConfig, 'utf8');
|
||||
}
|
||||
|
||||
const workConfig = path.join(workPath, './next.config.js');
|
||||
if (await fs.pathExists(workConfig)) {
|
||||
return fs.readFile(workConfig, 'utf8');
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
moveEntryDirectoryToRoot,
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
excludeStaticDirectory,
|
||||
onlyStaticDirectory,
|
||||
getNextConfig,
|
||||
};
|
||||
|
||||
24
packages/now-node-bridge/.eslintrc
Normal file
24
packages/now-node-bridge/.eslintrc
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"extends": ["prettier", "airbnb-base"],
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"import/no-unresolved": 0,
|
||||
"import/no-dynamic-require": 0,
|
||||
"global-require": 0
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["test/**"],
|
||||
"rules": {
|
||||
"import/no-extraneous-dependencies": 0
|
||||
},
|
||||
"globals": {
|
||||
"describe": true,
|
||||
"it": true,
|
||||
"test": true,
|
||||
"expect": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
1
packages/now-node-bridge/.gitignore
vendored
Normal file
1
packages/now-node-bridge/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/bridge.*
|
||||
@@ -1,98 +0,0 @@
|
||||
const http = require('http');
|
||||
|
||||
function normalizeEvent(event) {
|
||||
if (event.Action === 'Invoke') {
|
||||
const invokeEvent = JSON.parse(event.body);
|
||||
|
||||
const {
|
||||
method, path, headers, encoding,
|
||||
} = invokeEvent;
|
||||
|
||||
let { body } = invokeEvent;
|
||||
|
||||
if (body) {
|
||||
if (encoding === 'base64') {
|
||||
body = Buffer.from(body, encoding);
|
||||
} else if (encoding === undefined) {
|
||||
body = Buffer.from(body);
|
||||
} else {
|
||||
throw new Error(`Unsupported encoding: ${encoding}`);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
};
|
||||
}
|
||||
|
||||
const {
|
||||
httpMethod: method, path, headers, body,
|
||||
} = event;
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
};
|
||||
}
|
||||
|
||||
class Bridge {
|
||||
constructor() {
|
||||
this.launcher = this.launcher.bind(this);
|
||||
}
|
||||
|
||||
launcher(event) {
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new Promise((resolve, reject) => {
|
||||
if (this.userError) {
|
||||
console.error('Error while initializing entrypoint:', this.userError);
|
||||
return resolve({ statusCode: 500, body: '' });
|
||||
}
|
||||
|
||||
if (!this.port) {
|
||||
return resolve({ statusCode: 504, body: '' });
|
||||
}
|
||||
|
||||
const {
|
||||
method, path, headers, body,
|
||||
} = normalizeEvent(event);
|
||||
|
||||
const opts = {
|
||||
hostname: '127.0.0.1',
|
||||
port: this.port,
|
||||
path,
|
||||
method,
|
||||
headers,
|
||||
};
|
||||
|
||||
const req = http.request(opts, (res) => {
|
||||
const response = res;
|
||||
const respBodyChunks = [];
|
||||
response.on('data', chunk => respBodyChunks.push(Buffer.from(chunk)));
|
||||
response.on('error', error => reject(error));
|
||||
response.on('end', () => {
|
||||
delete response.headers.connection;
|
||||
delete response.headers['content-length'];
|
||||
|
||||
resolve({
|
||||
statusCode: response.statusCode,
|
||||
headers: response.headers,
|
||||
body: Buffer.concat(respBodyChunks).toString('base64'),
|
||||
encoding: 'base64',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
if (body) req.write(body);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Bridge,
|
||||
};
|
||||
@@ -1,7 +1,26 @@
|
||||
{
|
||||
"name": "@now/node-bridge",
|
||||
"version": "0.1.9",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
"version": "1.0.1-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-node-bridge"
|
||||
},
|
||||
"files": [
|
||||
"bridge.*",
|
||||
"index.js"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "npm run build && jest",
|
||||
"prepublish": "npm run build"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/aws-lambda": "8.10.19",
|
||||
"@types/node": "11.9.4",
|
||||
"jest": "24.1.0",
|
||||
"typescript": "3.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
183
packages/now-node-bridge/src/bridge.ts
Normal file
183
packages/now-node-bridge/src/bridge.ts
Normal file
@@ -0,0 +1,183 @@
|
||||
import { AddressInfo } from 'net';
|
||||
import { APIGatewayProxyEvent } from 'aws-lambda';
|
||||
import {
|
||||
Server,
|
||||
IncomingHttpHeaders,
|
||||
OutgoingHttpHeaders,
|
||||
request
|
||||
} from 'http';
|
||||
|
||||
interface NowProxyEvent {
|
||||
Action: string;
|
||||
body: string;
|
||||
}
|
||||
|
||||
export interface NowProxyRequest {
|
||||
isApiGateway?: boolean;
|
||||
method: string;
|
||||
path: string;
|
||||
headers: IncomingHttpHeaders;
|
||||
body: Buffer;
|
||||
}
|
||||
|
||||
export interface NowProxyResponse {
|
||||
statusCode: number;
|
||||
headers: OutgoingHttpHeaders;
|
||||
body: string;
|
||||
encoding: string;
|
||||
}
|
||||
|
||||
function normalizeNowProxyEvent(event: NowProxyEvent): NowProxyRequest {
|
||||
let bodyBuffer: Buffer | null;
|
||||
const { method, path, headers, encoding, body } = JSON.parse(event.body);
|
||||
|
||||
if (body) {
|
||||
if (encoding === 'base64') {
|
||||
bodyBuffer = Buffer.from(body, encoding);
|
||||
} else if (encoding === undefined) {
|
||||
bodyBuffer = Buffer.from(body);
|
||||
} else {
|
||||
throw new Error(`Unsupported encoding: ${encoding}`);
|
||||
}
|
||||
} else {
|
||||
bodyBuffer = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
return { isApiGateway: false, method, path, headers, body: bodyBuffer };
|
||||
}
|
||||
|
||||
function normalizeAPIGatewayProxyEvent(
|
||||
event: APIGatewayProxyEvent
|
||||
): NowProxyRequest {
|
||||
let bodyBuffer: Buffer | null;
|
||||
const { httpMethod: method, path, headers, body } = event;
|
||||
|
||||
if (body) {
|
||||
if (event.isBase64Encoded) {
|
||||
bodyBuffer = Buffer.from(body, 'base64');
|
||||
} else {
|
||||
bodyBuffer = Buffer.from(body);
|
||||
}
|
||||
} else {
|
||||
bodyBuffer = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
return { isApiGateway: true, method, path, headers, body: bodyBuffer };
|
||||
}
|
||||
|
||||
function normalizeEvent(
|
||||
event: NowProxyEvent | APIGatewayProxyEvent
|
||||
): NowProxyRequest {
|
||||
if ('Action' in event) {
|
||||
if (event.Action === 'Invoke') {
|
||||
return normalizeNowProxyEvent(event);
|
||||
} else {
|
||||
throw new Error(`Unexpected event.Action: ${event.Action}`);
|
||||
}
|
||||
} else {
|
||||
return normalizeAPIGatewayProxyEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
export class Bridge {
|
||||
private server: Server | null;
|
||||
private listening: Promise<AddressInfo>;
|
||||
private resolveListening: (info: AddressInfo) => void;
|
||||
|
||||
constructor(server?: Server) {
|
||||
this.server = null;
|
||||
if (server) {
|
||||
this.setServer(server);
|
||||
}
|
||||
this.launcher = this.launcher.bind(this);
|
||||
|
||||
// This is just to appease TypeScript strict mode, since it doesn't
|
||||
// understand that the Promise constructor is synchronous
|
||||
this.resolveListening = (info: AddressInfo) => {};
|
||||
|
||||
this.listening = new Promise(resolve => {
|
||||
this.resolveListening = resolve;
|
||||
});
|
||||
}
|
||||
|
||||
setServer(server: Server) {
|
||||
this.server = server;
|
||||
server.once('listening', () => {
|
||||
const addr = server.address();
|
||||
if (typeof addr === 'string') {
|
||||
throw new Error(`Unexpected string for \`server.address()\`: ${addr}`);
|
||||
} else if (!addr) {
|
||||
throw new Error('`server.address()` returned `null`');
|
||||
} else {
|
||||
this.resolveListening(addr);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
listen() {
|
||||
if (!this.server) {
|
||||
throw new Error('Server has not been set!');
|
||||
}
|
||||
|
||||
return this.server.listen({
|
||||
host: '127.0.0.1',
|
||||
port: 0
|
||||
});
|
||||
}
|
||||
|
||||
async launcher(
|
||||
event: NowProxyEvent | APIGatewayProxyEvent
|
||||
): Promise<NowProxyResponse> {
|
||||
const { port } = await this.listening;
|
||||
|
||||
const { isApiGateway, method, path, headers, body } = normalizeEvent(
|
||||
event
|
||||
);
|
||||
|
||||
const opts = {
|
||||
hostname: '127.0.0.1',
|
||||
port,
|
||||
path,
|
||||
method,
|
||||
headers
|
||||
};
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new Promise((resolve, reject) => {
|
||||
const req = request(opts, res => {
|
||||
const response = res;
|
||||
const respBodyChunks: Buffer[] = [];
|
||||
response.on('data', chunk => respBodyChunks.push(Buffer.from(chunk)));
|
||||
response.on('error', reject);
|
||||
response.on('end', () => {
|
||||
const bodyBuffer = Buffer.concat(respBodyChunks);
|
||||
delete response.headers.connection;
|
||||
|
||||
if (isApiGateway) {
|
||||
delete response.headers['content-length'];
|
||||
} else if (response.headers['content-length']) {
|
||||
response.headers['content-length'] = String(bodyBuffer.length);
|
||||
}
|
||||
|
||||
resolve({
|
||||
statusCode: response.statusCode || 200,
|
||||
headers: response.headers,
|
||||
body: bodyBuffer.toString('base64'),
|
||||
encoding: 'base64'
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
req.on('error', error => {
|
||||
setTimeout(() => {
|
||||
// this lets express print the true error of why the connection was closed.
|
||||
// it is probably 'Cannot set headers after they are sent to the client'
|
||||
reject(error);
|
||||
}, 2);
|
||||
});
|
||||
|
||||
if (body) req.write(body);
|
||||
req.end();
|
||||
});
|
||||
}
|
||||
}
|
||||
71
packages/now-node-bridge/test/bridge.test.js
Normal file
71
packages/now-node-bridge/test/bridge.test.js
Normal file
@@ -0,0 +1,71 @@
|
||||
const assert = require('assert');
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('../bridge');
|
||||
|
||||
test('port binding', async () => {
|
||||
const server = new Server();
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
|
||||
// Test port binding
|
||||
const info = await bridge.listening;
|
||||
assert.equal(info.address, '127.0.0.1');
|
||||
assert.equal(typeof info.port, 'number');
|
||||
|
||||
server.close();
|
||||
});
|
||||
|
||||
test('`APIGatewayProxyEvent` normalizing', async () => {
|
||||
const server = new Server((req, res) => res.end(
|
||||
JSON.stringify({
|
||||
method: req.method,
|
||||
path: req.url,
|
||||
headers: req.headers,
|
||||
}),
|
||||
));
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
const result = await bridge.launcher({
|
||||
httpMethod: 'GET',
|
||||
headers: { foo: 'bar' },
|
||||
path: '/apigateway',
|
||||
body: null,
|
||||
});
|
||||
assert.equal(result.encoding, 'base64');
|
||||
assert.equal(result.statusCode, 200);
|
||||
const body = JSON.parse(Buffer.from(result.body, 'base64').toString());
|
||||
assert.equal(body.method, 'GET');
|
||||
assert.equal(body.path, '/apigateway');
|
||||
assert.equal(body.headers.foo, 'bar');
|
||||
|
||||
server.close();
|
||||
});
|
||||
|
||||
test('`NowProxyEvent` normalizing', async () => {
|
||||
const server = new Server((req, res) => res.end(
|
||||
JSON.stringify({
|
||||
method: req.method,
|
||||
path: req.url,
|
||||
headers: req.headers,
|
||||
}),
|
||||
));
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
const result = await bridge.launcher({
|
||||
Action: 'Invoke',
|
||||
body: JSON.stringify({
|
||||
method: 'POST',
|
||||
headers: { foo: 'baz' },
|
||||
path: '/nowproxy',
|
||||
body: 'body=1',
|
||||
}),
|
||||
});
|
||||
assert.equal(result.encoding, 'base64');
|
||||
assert.equal(result.statusCode, 200);
|
||||
const body = JSON.parse(Buffer.from(result.body, 'base64').toString());
|
||||
assert.equal(body.method, 'POST');
|
||||
assert.equal(body.path, '/nowproxy');
|
||||
assert.equal(body.headers.foo, 'baz');
|
||||
|
||||
server.close();
|
||||
});
|
||||
16
packages/now-node-bridge/tsconfig.json
Normal file
16
packages/now-node-bridge/tsconfig.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"outDir": ".",
|
||||
"strict": true,
|
||||
"sourceMap": true,
|
||||
"declaration": true
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
@@ -1,15 +1,15 @@
|
||||
const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileBlob = require('@now/build-utils/file-blob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const fs = require('fs-extra');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const path = require('path');
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
const rename = require('@now/build-utils/fs/rename.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
||||
@@ -37,7 +37,7 @@ async function downloadInstallAndBundle(
|
||||
console.log('downloading user files...');
|
||||
const downloadedFiles = await download(files, userPath);
|
||||
|
||||
console.log('running npm install for user...');
|
||||
console.log("installing dependencies for user's code...");
|
||||
const entrypointFsDirname = path.join(userPath, path.dirname(entrypoint));
|
||||
await runNpmInstall(entrypointFsDirname, npmArguments);
|
||||
|
||||
@@ -46,8 +46,9 @@ async function downloadInstallAndBundle(
|
||||
{
|
||||
'package.json': new FileBlob({
|
||||
data: JSON.stringify({
|
||||
license: 'UNLICENSED',
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.4.1',
|
||||
'@zeit/ncc': '0.16.0',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
@@ -55,7 +56,7 @@ async function downloadInstallAndBundle(
|
||||
nccPath,
|
||||
);
|
||||
|
||||
console.log('running npm install for ncc...');
|
||||
console.log('installing dependencies for ncc...');
|
||||
await runNpmInstall(nccPath, npmArguments);
|
||||
return [downloadedFiles, userPath, nccPath, entrypointFsDirname];
|
||||
}
|
||||
@@ -63,7 +64,7 @@ async function downloadInstallAndBundle(
|
||||
async function compile(workNccPath, downloadedFiles, entrypoint) {
|
||||
const input = downloadedFiles[entrypoint].fsPath;
|
||||
const ncc = require(path.join(workNccPath, 'node_modules/@zeit/ncc'));
|
||||
const { code, assets } = await ncc(input);
|
||||
const { code, assets } = await ncc(input, { sourceMap: true });
|
||||
|
||||
const preparedFiles = {};
|
||||
const blob = new FileBlob({ data: code });
|
||||
@@ -71,7 +72,8 @@ async function compile(workNccPath, downloadedFiles, entrypoint) {
|
||||
preparedFiles[path.join('user', entrypoint)] = blob;
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const assetName of Object.keys(assets)) {
|
||||
const blob2 = new FileBlob({ data: assets[assetName] });
|
||||
const { source: data, permissions: mode } = assets[assetName];
|
||||
const blob2 = new FileBlob({ data, mode });
|
||||
preparedFiles[
|
||||
path.join('user', path.dirname(entrypoint), assetName)
|
||||
] = blob2;
|
||||
|
||||
@@ -4,22 +4,16 @@ const { Bridge } = require('./bridge.js');
|
||||
const bridge = new Bridge();
|
||||
|
||||
const saveListen = Server.prototype.listen;
|
||||
Server.prototype.listen = function listen(...args) {
|
||||
this.on('listening', function listening() {
|
||||
bridge.port = this.address().port;
|
||||
});
|
||||
saveListen.apply(this, args);
|
||||
Server.prototype.listen = function listen() {
|
||||
bridge.setServer(this);
|
||||
Server.prototype.listen = saveListen;
|
||||
return bridge.listen();
|
||||
};
|
||||
|
||||
try {
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'production';
|
||||
}
|
||||
|
||||
// PLACEHOLDER
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
bridge.userError = error;
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'production';
|
||||
}
|
||||
|
||||
// PLACEHOLDER
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
{
|
||||
"name": "@now/node-server",
|
||||
"version": "0.4.26-canary.1",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^0.1.9",
|
||||
"fs-extra": "7.0.1"
|
||||
"version": "0.5.2-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-node-server"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^1.0.1-canary.0",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const cowsay = require('cowsay/build/cowsay.umd.js').say;
|
||||
const cowsay = require('cowsay').say;
|
||||
const http = require('http');
|
||||
|
||||
// test that process.env is not replaced by webpack
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"cowsay": "^1.3.1"
|
||||
"cowsay": "1.3.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const yodasay = require('yodasay/build/yodasay.umd.js').say;
|
||||
const yodasay = require('yodasay').say;
|
||||
const http = require('http');
|
||||
|
||||
// test that process.env is not replaced by webpack
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"yodasay": "^1.1.6"
|
||||
"yodasay": "1.1.9"
|
||||
}
|
||||
}
|
||||
|
||||
7
packages/now-node-server/test/fixtures/06-content-type/index.js
vendored
Normal file
7
packages/now-node-server/test/fixtures/06-content-type/index.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
const http = require('http');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end('RANDOMNESS_PLACEHOLDER');
|
||||
});
|
||||
|
||||
server.listen();
|
||||
6
packages/now-node-server/test/fixtures/06-content-type/now.json
vendored
Normal file
6
packages/now-node-server/test/fixtures/06-content-type/now.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.js", "use": "@now/node-server" }
|
||||
]
|
||||
}
|
||||
7
packages/now-node-server/test/fixtures/06-content-type/probe.js
vendored
Normal file
7
packages/now-node-server/test/fixtures/06-content-type/probe.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
const assert = require('assert');
|
||||
|
||||
module.exports = async ({ deploymentUrl, fetch, randomness }) => {
|
||||
const resp = await fetch(`https://${deploymentUrl}/`);
|
||||
assert.equal(resp.headers.get('content-type'), null);
|
||||
assert.equal(await resp.text(), randomness);
|
||||
};
|
||||
@@ -2,7 +2,7 @@ const http = require('http');
|
||||
const isBundled = require('./is-bundled.js');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end(isBundled() ? 'RANDOMNESS_PLACEHOLDER:with-bundle' : '');
|
||||
resp.end(isBundled() ? 'RANDOMNESS_PLACEHOLDER:with-bundle' : 'WITHOUT-BUNDLE-THAT-IS-WRONG');
|
||||
});
|
||||
|
||||
server.listen();
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const path = require('path');
|
||||
|
||||
module.exports = () => path.basename(__filename) === 'index.js';
|
||||
// eslint-disable-next-line no-eval
|
||||
module.exports = () => path.basename(eval('__filename')) === 'index.js';
|
||||
|
||||
@@ -2,7 +2,7 @@ const http = require('http');
|
||||
const isBundled = require('./is-bundled.js');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end(isBundled() ? '' : 'RANDOMNESS_PLACEHOLDER:without-bundle');
|
||||
resp.end(isBundled() ? 'WITH-BUNDLE-THAT-IS-WRONG' : 'RANDOMNESS_PLACEHOLDER:without-bundle');
|
||||
});
|
||||
|
||||
server.listen();
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
const path = require('path');
|
||||
|
||||
module.exports = () => path.basename(__filename) === 'index.js';
|
||||
// eslint-disable-next-line no-eval
|
||||
module.exports = () => path.basename(eval('__filename')) === 'index.js';
|
||||
|
||||
@@ -7,7 +7,7 @@ const {
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
|
||||
2
packages/now-node/.gitignore
vendored
Normal file
2
packages/now-node/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/dist
|
||||
/src/bridge.d.ts
|
||||
12
packages/now-node/build.sh
Executable file
12
packages/now-node/build.sh
Executable file
@@ -0,0 +1,12 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
bridge_entrypoint="$(node -p 'require.resolve("@now/node-bridge")')"
|
||||
bridge_defs="$(dirname "$bridge_entrypoint")/bridge.d.ts"
|
||||
|
||||
if [ ! -e "$bridge_defs" ]; then
|
||||
yarn install
|
||||
fi
|
||||
|
||||
cp -v "$bridge_defs" src
|
||||
tsc
|
||||
@@ -1,22 +0,0 @@
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('./bridge.js');
|
||||
|
||||
const bridge = new Bridge();
|
||||
bridge.port = 3000;
|
||||
let listener;
|
||||
|
||||
try {
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'production';
|
||||
}
|
||||
|
||||
// PLACEHOLDER
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
bridge.userError = error;
|
||||
}
|
||||
|
||||
const server = new Server(listener);
|
||||
server.listen(bridge.port);
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
@@ -1,14 +1,28 @@
|
||||
{
|
||||
"name": "@now/node",
|
||||
"version": "0.4.28-canary.0",
|
||||
"version": "0.5.2-canary.3",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-node"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^0.1.9",
|
||||
"@now/node-bridge": "^1.0.1-canary.0",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
"build": "./build.sh",
|
||||
"test": "npm run build && jest",
|
||||
"prepublish": "npm run build"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@types/node": "11.9.4",
|
||||
"jest": "24.1.0",
|
||||
"typescript": "3.3.3"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const fs = require('fs-extra');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const {
|
||||
import { join, dirname } from 'path';
|
||||
import { remove, readFile } from 'fs-extra';
|
||||
import * as glob from '@now/build-utils/fs/glob.js';
|
||||
import * as download from '@now/build-utils/fs/download.js';
|
||||
import * as FileBlob from '@now/build-utils/file-blob.js';
|
||||
import * as FileFsRef from '@now/build-utils/file-fs-ref.js';
|
||||
import { createLambda } from '@now/build-utils/lambda.js';
|
||||
import {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
runPackageJsonScript
|
||||
} from '@now/build-utils/fs/run-user-scripts.js';
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
||||
@@ -27,16 +27,16 @@ const {
|
||||
*/
|
||||
async function downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath },
|
||||
{ npmArguments = [] } = {},
|
||||
{ npmArguments = [] } = {}
|
||||
) {
|
||||
const userPath = path.join(workPath, 'user');
|
||||
const nccPath = path.join(workPath, 'ncc');
|
||||
const userPath = join(workPath, 'user');
|
||||
const nccPath = join(workPath, 'ncc');
|
||||
|
||||
console.log('downloading user files...');
|
||||
const downloadedFiles = await download(files, userPath);
|
||||
|
||||
console.log('running npm install for user...');
|
||||
const entrypointFsDirname = path.join(userPath, path.dirname(entrypoint));
|
||||
console.log("installing dependencies for user's code...");
|
||||
const entrypointFsDirname = join(userPath, dirname(entrypoint));
|
||||
await runNpmInstall(entrypointFsDirname, npmArguments);
|
||||
|
||||
console.log('writing ncc package.json...');
|
||||
@@ -44,56 +44,56 @@ async function downloadInstallAndBundle(
|
||||
{
|
||||
'package.json': new FileBlob({
|
||||
data: JSON.stringify({
|
||||
license: 'UNLICENSED',
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.4.1',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
'@zeit/ncc': '0.16.0',
|
||||
}
|
||||
})
|
||||
})
|
||||
},
|
||||
nccPath,
|
||||
nccPath
|
||||
);
|
||||
|
||||
console.log('running npm install for ncc...');
|
||||
console.log('installing dependencies for ncc...');
|
||||
await runNpmInstall(nccPath, npmArguments);
|
||||
return [downloadedFiles, nccPath, entrypointFsDirname];
|
||||
}
|
||||
|
||||
async function compile(workNccPath, downloadedFiles, entrypoint) {
|
||||
async function compile(workNccPath: string, downloadedFiles, entrypoint: string) {
|
||||
const input = downloadedFiles[entrypoint].fsPath;
|
||||
const ncc = require(path.join(workNccPath, 'node_modules/@zeit/ncc'));
|
||||
const ncc = require(join(workNccPath, 'node_modules/@zeit/ncc'));
|
||||
const { code, assets } = await ncc(input);
|
||||
|
||||
const preparedFiles = {};
|
||||
const blob = new FileBlob({ data: code });
|
||||
// move all user code to 'user' subdirectory
|
||||
preparedFiles[path.join('user', entrypoint)] = blob;
|
||||
preparedFiles[join('user', entrypoint)] = blob;
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const assetName of Object.keys(assets)) {
|
||||
const blob2 = new FileBlob({ data: assets[assetName] });
|
||||
preparedFiles[
|
||||
path.join('user', path.dirname(entrypoint), assetName)
|
||||
] = blob2;
|
||||
const { source: data, permissions: mode } = assets[assetName];
|
||||
const blob2 = new FileBlob({ data, mode });
|
||||
preparedFiles[join('user', dirname(entrypoint), assetName)] = blob2;
|
||||
}
|
||||
|
||||
return preparedFiles;
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '5mb',
|
||||
export const config = {
|
||||
maxLambdaSize: '5mb'
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {BuildParamsType} buildParams
|
||||
* @returns {Promise<Files>}
|
||||
*/
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
export async function build({ files, entrypoint, workPath }) {
|
||||
const [
|
||||
downloadedFiles,
|
||||
workNccPath,
|
||||
entrypointFsDirname,
|
||||
entrypointFsDirname
|
||||
] = await downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath },
|
||||
{ npmArguments: ['--prefer-offline'] },
|
||||
{ npmArguments: ['--prefer-offline'] }
|
||||
);
|
||||
|
||||
console.log('running user script...');
|
||||
@@ -101,36 +101,34 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
|
||||
console.log('compiling entrypoint with ncc...');
|
||||
const preparedFiles = await compile(workNccPath, downloadedFiles, entrypoint);
|
||||
const launcherPath = path.join(__dirname, 'launcher.js');
|
||||
let launcherData = await fs.readFile(launcherPath, 'utf8');
|
||||
const launcherPath = join(__dirname, 'launcher.js');
|
||||
let launcherData = await readFile(launcherPath, 'utf8');
|
||||
|
||||
launcherData = launcherData.replace(
|
||||
'// PLACEHOLDER',
|
||||
[
|
||||
'process.chdir("./user");',
|
||||
`listener = require("./${path.join('user', entrypoint)}");`,
|
||||
'if (listener.default) listener = listener.default;',
|
||||
].join(' '),
|
||||
`listener = require("./${join('user', entrypoint)}");`,
|
||||
'if (listener.default) listener = listener.default;'
|
||||
].join(' ')
|
||||
);
|
||||
|
||||
const launcherFiles = {
|
||||
'launcher.js': new FileBlob({ data: launcherData }),
|
||||
'bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
'bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') })
|
||||
};
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: { ...preparedFiles, ...launcherFiles },
|
||||
handler: 'launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
runtime: 'nodejs8.10'
|
||||
});
|
||||
|
||||
return { [entrypoint]: lambda };
|
||||
};
|
||||
}
|
||||
|
||||
exports.prepareCache = async ({
|
||||
files, entrypoint, workPath, cachePath,
|
||||
}) => {
|
||||
await fs.remove(workPath);
|
||||
export async function prepareCache({ files, entrypoint, workPath, cachePath }) {
|
||||
await remove(workPath);
|
||||
await downloadInstallAndBundle({ files, entrypoint, workPath: cachePath });
|
||||
|
||||
return {
|
||||
@@ -139,6 +137,6 @@ exports.prepareCache = async ({
|
||||
...(await glob('user/yarn.lock', cachePath)),
|
||||
...(await glob('ncc/node_modules/**', cachePath)),
|
||||
...(await glob('ncc/package-lock.json', cachePath)),
|
||||
...(await glob('ncc/yarn.lock', cachePath)),
|
||||
...(await glob('ncc/yarn.lock', cachePath))
|
||||
};
|
||||
};
|
||||
}
|
||||
16
packages/now-node/src/launcher.ts
Normal file
16
packages/now-node/src/launcher.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { Server } from 'http';
|
||||
import { Bridge } from './bridge';
|
||||
|
||||
let listener;
|
||||
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'production';
|
||||
}
|
||||
|
||||
// PLACEHOLDER
|
||||
|
||||
const server = new Server(listener);
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
@@ -1,4 +1,4 @@
|
||||
const cowsay = require('cowsay/build/cowsay.umd.js').say;
|
||||
const cowsay = require('cowsay').say;
|
||||
|
||||
// test that process.env is not replaced by webpack
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"cowsay": "^1.3.1"
|
||||
"cowsay": "1.3.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
const yodasay = require('yodasay/build/yodasay.umd.js').say;
|
||||
const yodasay = require('yodasay').say;
|
||||
|
||||
// test that process.env is not replaced by webpack
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"yodasay": "^1.1.6"
|
||||
"yodasay": "1.1.9"
|
||||
}
|
||||
}
|
||||
|
||||
8
packages/now-node/test/fixtures/07-content-length/now.json
vendored
Normal file
8
packages/now-node/test/fixtures/07-content-length/now.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "test1.js", "use": "@now/node" },
|
||||
{ "src": "test2.js", "use": "@now/node" },
|
||||
{ "src": "test3.js", "use": "@now/node" }
|
||||
]
|
||||
}
|
||||
31
packages/now-node/test/fixtures/07-content-length/probe.js
vendored
Normal file
31
packages/now-node/test/fixtures/07-content-length/probe.js
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
const assert = require('assert');
|
||||
|
||||
async function test1({ deploymentUrl, fetch, randomness }) {
|
||||
const bodyMustBe = `${randomness}:content-length`;
|
||||
const resp = await fetch(`https://${deploymentUrl}/test1.js`);
|
||||
assert.equal(resp.status, 401);
|
||||
assert.equal(await resp.text(), bodyMustBe);
|
||||
assert.equal(resp.headers.get('content-length'), bodyMustBe.length);
|
||||
}
|
||||
|
||||
async function test2({ deploymentUrl, fetch }) {
|
||||
const bodyMustBe = '';
|
||||
const resp = await fetch(`https://${deploymentUrl}/test2.js`);
|
||||
assert.equal(resp.status, 401);
|
||||
assert.equal(await resp.text(), bodyMustBe);
|
||||
assert.equal(resp.headers.get('content-length'), bodyMustBe.length);
|
||||
}
|
||||
|
||||
async function test3({ deploymentUrl, fetch, randomness }) {
|
||||
const bodyMustBe = `${randomness}:content-length`;
|
||||
const resp = await fetch(`https://${deploymentUrl}/test3.js`);
|
||||
assert.equal(resp.status, 401);
|
||||
assert.equal(await resp.text(), bodyMustBe);
|
||||
assert.equal(resp.headers.get('content-length'), null);
|
||||
}
|
||||
|
||||
module.exports = async ({ deploymentUrl, fetch, randomness }) => {
|
||||
await test1({ deploymentUrl, fetch, randomness });
|
||||
await test2({ deploymentUrl, fetch, randomness });
|
||||
await test3({ deploymentUrl, fetch, randomness });
|
||||
};
|
||||
4
packages/now-node/test/fixtures/07-content-length/test1.js
vendored
Normal file
4
packages/now-node/test/fixtures/07-content-length/test1.js
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
module.exports = (_, resp) => {
|
||||
resp.writeHead(401, { 'content-length': 4242 });
|
||||
resp.end(`${process.env.RANDOMNESS_ENV_VAR}:content-length`);
|
||||
};
|
||||
4
packages/now-node/test/fixtures/07-content-length/test2.js
vendored
Normal file
4
packages/now-node/test/fixtures/07-content-length/test2.js
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
module.exports = (_, resp) => {
|
||||
resp.writeHead(401, { 'content-length': 2 });
|
||||
resp.end();
|
||||
};
|
||||
4
packages/now-node/test/fixtures/07-content-length/test3.js
vendored
Normal file
4
packages/now-node/test/fixtures/07-content-length/test3.js
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
module.exports = (_, resp) => {
|
||||
resp.writeHead(401);
|
||||
resp.end(`${process.env.RANDOMNESS_ENV_VAR}:content-length`);
|
||||
};
|
||||
@@ -7,7 +7,7 @@ const {
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
|
||||
15
packages/now-node/tsconfig.json
Normal file
15
packages/now-node/tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"module": "commonjs",
|
||||
"outDir": "dist",
|
||||
"sourceMap": false,
|
||||
"declaration": false
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const OptiPng = require('optipng');
|
||||
const pipe = require('multipipe');
|
||||
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
{
|
||||
"name": "@now/optipng",
|
||||
"version": "0.4.7",
|
||||
"version": "0.4.9-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-optipng"
|
||||
},
|
||||
"dependencies": {
|
||||
"multipipe": "2.0.3",
|
||||
"optipng": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
1
packages/now-php-bridge/.npmignore
Normal file
1
packages/now-php-bridge/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
/build
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user