mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 04:22:13 +00:00
Compare commits
192 Commits
@now/php@0
...
@now/php@0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77d445af71 | ||
|
|
79251ad180 | ||
|
|
a215dc9103 | ||
|
|
ea7d8f0f6c | ||
|
|
2232efc984 | ||
|
|
b64ce0f3c0 | ||
|
|
74233d50ad | ||
|
|
8aebec9fc3 | ||
|
|
54584b7763 | ||
|
|
d163fcbd71 | ||
|
|
04c2996c76 | ||
|
|
2b69b898ed | ||
|
|
846aa11d6a | ||
|
|
a314a74479 | ||
|
|
40f029a72c | ||
|
|
493d8a778f | ||
|
|
cb5dcb658f | ||
|
|
d77287d07b | ||
|
|
61d66bd957 | ||
|
|
ae73df9e3c | ||
|
|
cb09ae5bbf | ||
|
|
a28eeacdaa | ||
|
|
dd9d46d555 | ||
|
|
4472331ee0 | ||
|
|
ac69836b44 | ||
|
|
15949a4ab4 | ||
|
|
697ada9d73 | ||
|
|
cafbe30fa3 | ||
|
|
583ebcc526 | ||
|
|
52d1bd410c | ||
|
|
11d0753bc1 | ||
|
|
538710fe56 | ||
|
|
2828c89e8d | ||
|
|
e40b45a939 | ||
|
|
38ba8a36fc | ||
|
|
0323c505a3 | ||
|
|
17ee07f4f6 | ||
|
|
0a6ada77ac | ||
|
|
4d817dd67d | ||
|
|
9682a7cc0b | ||
|
|
3456f23b3e | ||
|
|
800ca2cb0e | ||
|
|
ba54b4d706 | ||
|
|
e9482d66a9 | ||
|
|
401b669363 | ||
|
|
a2a0ede1f6 | ||
|
|
3c9fcff743 | ||
|
|
e5aa526583 | ||
|
|
822b0ee3de | ||
|
|
d612e46233 | ||
|
|
77ee10cead | ||
|
|
fb2029c464 | ||
|
|
3b15755054 | ||
|
|
4f65cc3aa8 | ||
|
|
9936e35280 | ||
|
|
a04fd242b8 | ||
|
|
17bc6174a9 | ||
|
|
a7c2d9648a | ||
|
|
faa5ea9e21 | ||
|
|
c52f30c898 | ||
|
|
d675edf1dc | ||
|
|
f85c4f496f | ||
|
|
d52d7904c2 | ||
|
|
79232024bd | ||
|
|
660b787bc3 | ||
|
|
2dbf983ddb | ||
|
|
0866ba9391 | ||
|
|
d259a722a0 | ||
|
|
bf77c51f64 | ||
|
|
062b78130c | ||
|
|
fa70bc50cb | ||
|
|
08e22b35d1 | ||
|
|
9d8f3315a1 | ||
|
|
a737a99a9d | ||
|
|
ee92d92c9f | ||
|
|
34d3ebd289 | ||
|
|
785f187e5d | ||
|
|
44449f474e | ||
|
|
f44dae7f39 | ||
|
|
06f973f641 | ||
|
|
47bb47804e | ||
|
|
5df692979a | ||
|
|
cd02d5390f | ||
|
|
c93fd416c4 | ||
|
|
431db7a62d | ||
|
|
6f86c70313 | ||
|
|
0923fc9200 | ||
|
|
787675f462 | ||
|
|
977615720a | ||
|
|
1a4e64cd27 | ||
|
|
c9fc255002 | ||
|
|
e83d4d4249 | ||
|
|
d2ca763079 | ||
|
|
2a95388f89 | ||
|
|
be9fedfdc4 | ||
|
|
f0dee65f69 | ||
|
|
5514753c07 | ||
|
|
7028556919 | ||
|
|
a1f24853fc | ||
|
|
9804e82f8f | ||
|
|
e96596634b | ||
|
|
495c193a40 | ||
|
|
9d77d89513 | ||
|
|
bcffe0db91 | ||
|
|
228892fe9f | ||
|
|
c7d434c81d | ||
|
|
66bf4aa25a | ||
|
|
f328a53a3f | ||
|
|
168b4cf2bb | ||
|
|
a91e3da7d8 | ||
|
|
756c452203 | ||
|
|
80db1937f8 | ||
|
|
d9df4b0929 | ||
|
|
08c52ebbf2 | ||
|
|
fe8ead391a | ||
|
|
5d45f2df31 | ||
|
|
1385c1ca48 | ||
|
|
4c63009123 | ||
|
|
389c85cef5 | ||
|
|
227c239bbc | ||
|
|
b93736132a | ||
|
|
26849422ff | ||
|
|
31dd1ca491 | ||
|
|
04a1ce89b3 | ||
|
|
3e247fd16d | ||
|
|
9e127421ad | ||
|
|
337d0cb1ed | ||
|
|
ad3cdf46f4 | ||
|
|
9a93713158 | ||
|
|
3392ef5636 | ||
|
|
9236e2b584 | ||
|
|
ffc4686ab9 | ||
|
|
1ff4b6bb4b | ||
|
|
0e7d8c3ff1 | ||
|
|
86885b8c38 | ||
|
|
8563f2b8b5 | ||
|
|
c36938bd95 | ||
|
|
957ab6c4e3 | ||
|
|
069e9b4052 | ||
|
|
0b06704be6 | ||
|
|
cf5a49e699 | ||
|
|
b671b23d8d | ||
|
|
fa44c23225 | ||
|
|
ba9da3cec5 | ||
|
|
0909aaf2b3 | ||
|
|
e276342a6f | ||
|
|
f6e1bdf0a1 | ||
|
|
2c2189b661 | ||
|
|
61522e2bc7 | ||
|
|
a20f3a5920 | ||
|
|
a5e58c2a8d | ||
|
|
a58451548f | ||
|
|
8e846c50b5 | ||
|
|
7613dcf68b | ||
|
|
bea499cad3 | ||
|
|
0f20a0dbbf | ||
|
|
1ea30588a3 | ||
|
|
a3b670675a | ||
|
|
c4374fe5a8 | ||
|
|
c3983ba1c2 | ||
|
|
ae1be4b243 | ||
|
|
b4b1131619 | ||
|
|
5fd923a7e7 | ||
|
|
f4e95eb59c | ||
|
|
a6cdaccd34 | ||
|
|
4f5fe8eba8 | ||
|
|
7239013989 | ||
|
|
75000ee334 | ||
|
|
5319610d59 | ||
|
|
6df0d69afa | ||
|
|
65b191f6b9 | ||
|
|
115f62bbba | ||
|
|
cdf6a98a18 | ||
|
|
bb75dfd993 | ||
|
|
b63062cfc5 | ||
|
|
5ad6dabd96 | ||
|
|
30060bee07 | ||
|
|
dd48a1c6d3 | ||
|
|
24d45736f1 | ||
|
|
d839fdbe81 | ||
|
|
cfdcd2f8d0 | ||
|
|
b3cac2b372 | ||
|
|
9d27d69656 | ||
|
|
04197b250c | ||
|
|
ce6f43eae7 | ||
|
|
2cfafe4054 | ||
|
|
b195949881 | ||
|
|
b7c47fa587 | ||
|
|
f3bcefe822 | ||
|
|
7a481c9515 | ||
|
|
af275c60bf | ||
|
|
ded377dfc2 |
@@ -4,6 +4,8 @@ jobs:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
environment:
|
||||
GOPATH: $HOME/go
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
@@ -24,63 +26,17 @@ jobs:
|
||||
- run:
|
||||
name: Tests
|
||||
command: yarn test
|
||||
# publish-canary:
|
||||
# docker:
|
||||
# - image: circleci/node:10
|
||||
# working_directory: ~/repo
|
||||
# steps:
|
||||
# - checkout
|
||||
# - run:
|
||||
# name: Updating apt-get list
|
||||
# command: sudo apt-get update
|
||||
# - run:
|
||||
# name: Installing the latest version of Go
|
||||
# command: sudo apt-get install golang-go
|
||||
# - run: yarn install
|
||||
# - run: yarn bootstrap
|
||||
# - run:
|
||||
# name: Saving Authentication Information
|
||||
# command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
# - run:
|
||||
# name: Publishing to Canary Channel
|
||||
# command: yarn run lerna publish from-git --npm-tag canary --yes
|
||||
publish-stable:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Updating apt-get list
|
||||
command: sudo apt-get update
|
||||
name: Potentially save npm token
|
||||
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
||||
- run:
|
||||
name: Installing the latest version of Go
|
||||
command: sudo apt-get install golang-go
|
||||
- run: yarn install
|
||||
- run: yarn bootstrap
|
||||
name: Potentially publish canary release
|
||||
command: "if ls ~/.npmrc >/dev/null 2>&1 && [[ $(git describe --exact-match 2> /dev/null || :) =~ -canary ]]; then yarn run lerna publish from-git --npm-tag canary --yes; else echo \"Did not publish\"; fi"
|
||||
- run:
|
||||
name: Saving Authentication Information
|
||||
command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
- run:
|
||||
name: Publishing to Stable Channel
|
||||
command: yarn run lerna publish from-git --yes
|
||||
name: Potentially publish stable release
|
||||
command: "if ls ~/.npmrc >/dev/null 2>&1 && [[ ! $(git describe --exact-match 2> /dev/null || :) =~ -canary ]]; then yarn run lerna publish from-git --yes; else echo \"Did not publish\"; fi"
|
||||
workflows:
|
||||
version: 2
|
||||
unscheduled:
|
||||
build-and-deploy:
|
||||
jobs:
|
||||
- build:
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
- publish-stable:
|
||||
requires:
|
||||
- build
|
||||
filters:
|
||||
tags:
|
||||
only: /^.*(\d+\.)(\d+\.)(\*|\d+)$/
|
||||
# - publish-canary:
|
||||
# requires:
|
||||
# - build
|
||||
# filters:
|
||||
# tags:
|
||||
# only: /^.*canary.*($|\b)/
|
||||
- build
|
||||
|
||||
16
.eslintrc
16
.eslintrc
@@ -5,6 +5,20 @@
|
||||
"import/no-unresolved": 0,
|
||||
"import/no-dynamic-require": 0,
|
||||
"global-require": 0
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["test/**"],
|
||||
"rules": {
|
||||
"import/no-extraneous-dependencies": 0
|
||||
},
|
||||
"globals": {
|
||||
"describe": true,
|
||||
"it": true,
|
||||
"test": true,
|
||||
"expect": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -19,3 +19,9 @@ For the canary channel use:
|
||||
```
|
||||
yarn publish-canary
|
||||
```
|
||||
|
||||
CircleCI will take care of publishing the updated packages to npm from there.
|
||||
|
||||
If for some reason CircleCI fails to publish the npm package, you may do so
|
||||
manually by running `npm publish` from the package directory. Make sure to
|
||||
include the `--tag canary` parameter if you are publishing a canary release!
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
module.exports = {
|
||||
testEnvironment: 'node',
|
||||
rootDir: 'test',
|
||||
collectCoverageFrom: ['packages/**/*.{js,jsx}', '!**/node_modules/**'],
|
||||
};
|
||||
|
||||
19
lerna.json
19
lerna.json
@@ -2,7 +2,24 @@
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": [
|
||||
"packages/*"
|
||||
"packages/now-build-utils",
|
||||
"packages/now-node-bridge",
|
||||
"packages/now-php-bridge",
|
||||
"packages/now-bash",
|
||||
"packages/now-cgi",
|
||||
"packages/now-go",
|
||||
"packages/now-html-minifier",
|
||||
"packages/now-lambda",
|
||||
"packages/now-md",
|
||||
"packages/now-mdx-deck",
|
||||
"packages/now-next",
|
||||
"packages/now-node",
|
||||
"packages/now-node-server",
|
||||
"packages/now-optipng",
|
||||
"packages/now-php",
|
||||
"packages/now-python",
|
||||
"packages/now-static-build",
|
||||
"packages/now-wordpress"
|
||||
],
|
||||
"command": {
|
||||
"publish": {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
{
|
||||
"name": "now-builders",
|
||||
"version": "0.0.0",
|
||||
"private": true,
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
@@ -26,14 +27,21 @@
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "^5.0.4",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/multistream": "^2.1.1",
|
||||
"@types/node": "^10.12.8",
|
||||
"async-retry": "1.2.3",
|
||||
"buffer-replace": "^1.0.0",
|
||||
"eslint": "^5.9.0",
|
||||
"eslint-config-airbnb-base": "^13.1.0",
|
||||
"eslint-config-prettier": "^3.1.0",
|
||||
"eslint-plugin-import": "^2.14.0",
|
||||
"fs-extra": "^7.0.1",
|
||||
"glob": "^7.1.3",
|
||||
"jest": "^23.6.0",
|
||||
"lint-staged": "^8.0.4",
|
||||
"node-fetch": "^2.3.0",
|
||||
"pre-commit": "^1.2.2",
|
||||
"prettier": "^1.15.2",
|
||||
"typescript": "^3.1.6"
|
||||
|
||||
32
packages/now-bash/.editorconfig
Normal file
32
packages/now-bash/.editorconfig
Normal file
@@ -0,0 +1,32 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[{*.json,*.json.example,*.gyp,*.yml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[*.py]
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
# Ideal settings - some plugins might support these.
|
||||
[*.js]
|
||||
quote_type = single
|
||||
|
||||
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}]
|
||||
curly_bracket_next_line = false
|
||||
spaces_around_operators = true
|
||||
spaces_around_brackets = outside
|
||||
# close enough to 1TB
|
||||
indent_brace_style = K&R
|
||||
2
packages/now-bash/.gitignore
vendored
Normal file
2
packages/now-bash/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
handler
|
||||
14
packages/now-bash/bootstrap
Executable file
14
packages/now-bash/bootstrap
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
cd "$LAMBDA_TASK_ROOT"
|
||||
|
||||
# Configure `import`
|
||||
export IMPORT_CACHE="$LAMBDA_TASK_ROOT/.import-cache"
|
||||
export PATH="$IMPORT_CACHE/bin:$PATH"
|
||||
|
||||
# Load `import` and runtime
|
||||
. "$(which import)"
|
||||
. "$IMPORT_CACHE/runtime.sh"
|
||||
|
||||
# Load user code and process events in a loop forever
|
||||
_lambda_runtime_init
|
||||
40
packages/now-bash/builder.sh
Executable file
40
packages/now-bash/builder.sh
Executable file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# `import` debug logs are always enabled during build
|
||||
export IMPORT_DEBUG=1
|
||||
|
||||
# Install `import`
|
||||
IMPORT_BIN="$IMPORT_CACHE/bin/import"
|
||||
mkdir -p "$(dirname "$IMPORT_BIN")"
|
||||
curl -sfLS https://import.pw > "$IMPORT_BIN"
|
||||
chmod +x "$IMPORT_BIN"
|
||||
|
||||
# For now only the entrypoint file is copied into the lambda
|
||||
mkdir -p "$(dirname "$ENTRYPOINT")"
|
||||
cp "$SRC/$ENTRYPOINT" "$ENTRYPOINT"
|
||||
|
||||
# Copy in the runtime
|
||||
cp "$BUILDER/runtime.sh" "$IMPORT_CACHE"
|
||||
cp "$BUILDER/bootstrap" .
|
||||
|
||||
# Load `import`
|
||||
. "$(which import)"
|
||||
|
||||
# Cache runtime and user dependencies
|
||||
echo "Caching imports in \"$ENTRYPOINT\"…"
|
||||
. "$IMPORT_CACHE/runtime.sh"
|
||||
. "$ENTRYPOINT"
|
||||
echo "Done caching imports"
|
||||
|
||||
# Run user build script
|
||||
if declare -f build > /dev/null; then
|
||||
echo "Running \`build\` function in \"$ENTRYPOINT\"…"
|
||||
build "$@"
|
||||
fi
|
||||
|
||||
# Ensure the entrypoint defined a `handler` function
|
||||
if ! declare -f handler > /dev/null; then
|
||||
echo "ERROR: A \`handler\` function must be defined in \"$ENTRYPOINT\"!" >&2
|
||||
exit 1
|
||||
fi
|
||||
56
packages/now-bash/index.js
Normal file
56
packages/now-bash/index.js
Normal file
@@ -0,0 +1,56 @@
|
||||
const execa = require('execa');
|
||||
const { join } = require('path');
|
||||
const snakeCase = require('snake-case');
|
||||
const glob = require('@now/build-utils/fs/glob');
|
||||
const download = require('@now/build-utils/fs/download');
|
||||
const { createLambda } = require('@now/build-utils/lambda');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory');
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '10mb',
|
||||
};
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
const srcDir = await getWritableDirectory();
|
||||
const workDir = await getWritableDirectory();
|
||||
|
||||
console.log('downloading files...');
|
||||
await download(files, srcDir);
|
||||
|
||||
const configEnv = Object.keys(config).reduce((o, v) => {
|
||||
o[`IMPORT_${snakeCase(v).toUpperCase()}`] = config[v]; // eslint-disable-line no-param-reassign
|
||||
return o;
|
||||
}, {});
|
||||
|
||||
const IMPORT_CACHE = `${workDir}/.import-cache`;
|
||||
const env = Object.assign({}, process.env, configEnv, {
|
||||
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
||||
IMPORT_CACHE,
|
||||
SRC: srcDir,
|
||||
BUILDER: __dirname,
|
||||
ENTRYPOINT: entrypoint,
|
||||
});
|
||||
|
||||
const builderPath = join(__dirname, 'builder.sh');
|
||||
|
||||
await execa(builderPath, [entrypoint], {
|
||||
env,
|
||||
cwd: workDir,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', workDir),
|
||||
handler: entrypoint, // not actually used in `bootstrap`
|
||||
runtime: 'provided',
|
||||
environment: Object.assign({}, configEnv, {
|
||||
SCRIPT_FILENAME: entrypoint,
|
||||
}),
|
||||
});
|
||||
|
||||
return {
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
22
packages/now-bash/package.json
Normal file
22
packages/now-bash/package.json
Normal file
@@ -0,0 +1,22 @@
|
||||
{
|
||||
"name": "@now/bash",
|
||||
"version": "0.1.1",
|
||||
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
||||
"main": "index.js",
|
||||
"author": "Nathan Rajlich <nate@zeit.co>",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"builder.sh",
|
||||
"runtime.sh",
|
||||
"bootstrap",
|
||||
"index.js",
|
||||
"package.json"
|
||||
],
|
||||
"dependencies": {
|
||||
"execa": "^1.0.0",
|
||||
"snake-case": "^2.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
111
packages/now-bash/runtime.sh
Normal file
111
packages/now-bash/runtime.sh
Normal file
@@ -0,0 +1,111 @@
|
||||
import "static-binaries@1.0.0"
|
||||
static_binaries jq
|
||||
|
||||
# These get reset upon each request
|
||||
_STATUS_CODE="$(mktemp)"
|
||||
_HEADERS="$(mktemp)"
|
||||
|
||||
_lambda_runtime_api() {
|
||||
local endpoint="$1"
|
||||
shift
|
||||
curl -sfLS "http://$AWS_LAMBDA_RUNTIME_API/2018-06-01/runtime/$endpoint" "$@"
|
||||
}
|
||||
|
||||
_lambda_runtime_init() {
|
||||
# Initialize user code
|
||||
. "$SCRIPT_FILENAME" || {
|
||||
local exit_code="$?"
|
||||
local error
|
||||
error='{"exitCode":'"$exit_code"'}'
|
||||
_lambda_runtime_api "init/error" -X POST -d "$error"
|
||||
exit "$EXIT_CODE"
|
||||
}
|
||||
|
||||
# Process events
|
||||
while true; do _lambda_runtime_next; done
|
||||
}
|
||||
|
||||
_lambda_runtime_next() {
|
||||
echo 200 > "$_STATUS_CODE"
|
||||
echo '{"content-type":"text/plain; charset=utf8"}' > "$_HEADERS"
|
||||
|
||||
local headers
|
||||
headers="$(mktemp)"
|
||||
|
||||
# Get an event
|
||||
local event
|
||||
event="$(mktemp)"
|
||||
_lambda_runtime_api invocation/next -D "$headers" | jq --raw-output --monochrome-output '.body' > "$event"
|
||||
|
||||
local request_id
|
||||
request_id="$(grep -Fi Lambda-Runtime-Aws-Request-Id "$headers" | tr -d '[:space:]' | cut -d: -f2)"
|
||||
rm -f "$headers"
|
||||
|
||||
# Execute the handler function from the script
|
||||
local body
|
||||
body="$(mktemp)"
|
||||
|
||||
local exit_code=0
|
||||
REQUEST="$event"
|
||||
|
||||
# Stdin of the `handler` function is the HTTP request body.
|
||||
# Need to use a fifo here instead of bash <() because Lambda
|
||||
# errors with "/dev/fd/63 not found" for some reason :/
|
||||
local stdin
|
||||
stdin="$(mktemp --dry-run)"
|
||||
mkfifo "$stdin"
|
||||
_lambda_runtime_body "$event" > "$stdin" &
|
||||
|
||||
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
||||
rm -f "$event" "$stdin"
|
||||
|
||||
if [ "$exit_code" -eq 0 ]; then
|
||||
# Send the response
|
||||
jq --raw-input --raw-output --compact-output --slurp --monochrome-output \
|
||||
--arg statusCode "$(cat "$_STATUS_CODE")" \
|
||||
--argjson headers "$(cat "$_HEADERS")" \
|
||||
'{statusCode:$statusCode|tonumber, headers:$headers, encoding:"base64", body:.|@base64}' < "$body" \
|
||||
| _lambda_runtime_api "invocation/$request_id/response" -X POST -d @- > /dev/null
|
||||
rm -f "$body" "$_HEADERS"
|
||||
else
|
||||
echo "\`handler\` function return code: $exit_code"
|
||||
_lambda_runtime_api "invocation/$request_id/error" -X POST -d @- > /dev/null <<< '{"exitCode":'"$exit_code"'}'
|
||||
fi
|
||||
}
|
||||
|
||||
_lambda_runtime_body() {
|
||||
if [ "$(jq --raw-output '.body | type' < "$1")" = "string" ]; then
|
||||
if [ "$(jq --raw-output '.encoding' < "$1")" = "base64" ]; then
|
||||
jq --raw-output '.body' < "$1" | base64 -d
|
||||
else
|
||||
# assume plain-text body
|
||||
jq --raw-output '.body' < "$1"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
# Set the response status code.
|
||||
http_response_code() {
|
||||
echo "$1" > "$_STATUS_CODE"
|
||||
}
|
||||
|
||||
# Sets a response header.
|
||||
# Overrides existing header if it has already been set.
|
||||
http_response_header() {
|
||||
local name="$1"
|
||||
local value="$2"
|
||||
local tmp
|
||||
tmp="$(mktemp)"
|
||||
jq --arg name "$name" --arg value "$value" '.[$name] = $value' < "$_HEADERS" > "$tmp"
|
||||
mv -f "$tmp" "$_HEADERS"
|
||||
}
|
||||
|
||||
http_response_redirect() {
|
||||
http_response_code "${2:-302}"
|
||||
http_response_header "location" "$1"
|
||||
}
|
||||
|
||||
http_response_json() {
|
||||
http_response_header "content-type" "application/json; charset=utf8"
|
||||
}
|
||||
146
packages/now-bash/yarn.lock
Normal file
146
packages/now-bash/yarn.lock
Normal file
@@ -0,0 +1,146 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
cross-spawn@^6.0.0:
|
||||
version "6.0.5"
|
||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
|
||||
integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
|
||||
dependencies:
|
||||
nice-try "^1.0.4"
|
||||
path-key "^2.0.1"
|
||||
semver "^5.5.0"
|
||||
shebang-command "^1.2.0"
|
||||
which "^1.2.9"
|
||||
|
||||
end-of-stream@^1.1.0:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43"
|
||||
integrity sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==
|
||||
dependencies:
|
||||
once "^1.4.0"
|
||||
|
||||
execa@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
|
||||
integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
|
||||
dependencies:
|
||||
cross-spawn "^6.0.0"
|
||||
get-stream "^4.0.0"
|
||||
is-stream "^1.1.0"
|
||||
npm-run-path "^2.0.0"
|
||||
p-finally "^1.0.0"
|
||||
signal-exit "^3.0.0"
|
||||
strip-eof "^1.0.0"
|
||||
|
||||
get-stream@^4.0.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
||||
integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
|
||||
dependencies:
|
||||
pump "^3.0.0"
|
||||
|
||||
is-stream@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
||||
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
|
||||
|
||||
isexe@^2.0.0:
|
||||
version "2.0.0"
|
||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
|
||||
|
||||
lower-case@^1.1.1:
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac"
|
||||
integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw=
|
||||
|
||||
nice-try@^1.0.4:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
|
||||
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
||||
|
||||
no-case@^2.2.0:
|
||||
version "2.3.2"
|
||||
resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac"
|
||||
integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ==
|
||||
dependencies:
|
||||
lower-case "^1.1.1"
|
||||
|
||||
npm-run-path@^2.0.0:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
|
||||
integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
|
||||
dependencies:
|
||||
path-key "^2.0.0"
|
||||
|
||||
once@^1.3.1, once@^1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
|
||||
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
|
||||
dependencies:
|
||||
wrappy "1"
|
||||
|
||||
p-finally@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
|
||||
integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
|
||||
|
||||
path-key@^2.0.0, path-key@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
|
||||
integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
|
||||
|
||||
pump@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
||||
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
|
||||
dependencies:
|
||||
end-of-stream "^1.1.0"
|
||||
once "^1.3.1"
|
||||
|
||||
semver@^5.5.0:
|
||||
version "5.6.0"
|
||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.6.0.tgz#7e74256fbaa49c75aa7c7a205cc22799cac80004"
|
||||
integrity sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==
|
||||
|
||||
shebang-command@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
|
||||
integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
|
||||
dependencies:
|
||||
shebang-regex "^1.0.0"
|
||||
|
||||
shebang-regex@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
|
||||
integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
|
||||
|
||||
signal-exit@^3.0.0:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
|
||||
integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=
|
||||
|
||||
snake-case@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-2.1.0.tgz#41bdb1b73f30ec66a04d4e2cad1b76387d4d6d9f"
|
||||
integrity sha1-Qb2xtz8w7GagTU4srRt2OH1NbZ8=
|
||||
dependencies:
|
||||
no-case "^2.2.0"
|
||||
|
||||
strip-eof@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
|
||||
integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
|
||||
|
||||
which@^1.2.9:
|
||||
version "1.3.1"
|
||||
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
|
||||
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
|
||||
dependencies:
|
||||
isexe "^2.0.0"
|
||||
|
||||
wrappy@1:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
|
||||
1
packages/now-build-utils/.npmignore
Normal file
1
packages/now-build-utils/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
/test
|
||||
@@ -1,20 +1,38 @@
|
||||
const assert = require('assert');
|
||||
const fs = require('fs-extra');
|
||||
const MultiStream = require('multistream');
|
||||
const multiStream = require('multistream');
|
||||
const path = require('path');
|
||||
const Sema = require('async-sema');
|
||||
|
||||
/** @typedef {{[filePath: string]: FileFsRef}} FsFiles */
|
||||
|
||||
const semaToPreventEMFILE = new Sema(30);
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
* @argument {string} options.fsPath
|
||||
*/
|
||||
class FileFsRef {
|
||||
constructor({ mode = 0o100644, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof fsPath === 'string');
|
||||
/** @type {string} */
|
||||
this.type = 'FileFsRef';
|
||||
/** @type {number} */
|
||||
this.mode = mode;
|
||||
/** @type {string} */
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
* @argument {NodeJS.ReadableStream} options.stream
|
||||
* @argument {string} options.fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
static async fromStream({ mode = 0o100644, stream, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
@@ -33,6 +51,9 @@ class FileFsRef {
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<NodeJS.ReadableStream>}
|
||||
*/
|
||||
async toStreamAsync() {
|
||||
await semaToPreventEMFILE.acquire();
|
||||
const release = () => semaToPreventEMFILE.release();
|
||||
@@ -42,19 +63,24 @@ class FileFsRef {
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {NodeJS.ReadableStream}
|
||||
*/
|
||||
toStream() {
|
||||
let flag;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new MultiStream((cb) => {
|
||||
if (flag) return cb();
|
||||
return multiStream((cb) => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync().then((stream) => {
|
||||
cb(undefined, stream);
|
||||
}).catch((error) => {
|
||||
cb(error);
|
||||
});
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ const multiStream = require('multistream');
|
||||
const retry = require('async-retry');
|
||||
const Sema = require('async-sema');
|
||||
|
||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
||||
|
||||
const semaToDownloadFromS3 = new Sema(10);
|
||||
|
||||
class BailableError extends Error {
|
||||
@@ -54,7 +56,7 @@ class FileRef {
|
||||
const resp = await fetch(url);
|
||||
if (!resp.ok) {
|
||||
const error = new BailableError(
|
||||
`${resp.status} ${resp.statusText}`,
|
||||
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
||||
);
|
||||
if (resp.status === 403) error.bail = true;
|
||||
throw error;
|
||||
|
||||
94
packages/now-build-utils/fs/bootstrap-yarn.js
vendored
Normal file
94
packages/now-build-utils/fs/bootstrap-yarn.js
vendored
Normal file
@@ -0,0 +1,94 @@
|
||||
const MemoryFileSystem = require('memory-fs');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { spawnSync } = require('child_process');
|
||||
|
||||
const yarnPath = spawnSync('which', ['yarn'])
|
||||
.stdout.toString()
|
||||
.trim();
|
||||
|
||||
const cachePath = spawnSync(yarnPath, ['cache', 'dir'])
|
||||
.stdout.toString()
|
||||
.trim();
|
||||
|
||||
spawnSync(yarnPath, ['cache', 'clean']);
|
||||
const vfs = new MemoryFileSystem();
|
||||
|
||||
function isInsideCachePath(filename) {
|
||||
const relative = path.relative(cachePath, filename);
|
||||
return !relative.startsWith('..');
|
||||
}
|
||||
|
||||
const saveCreateWriteStream = fs.createWriteStream;
|
||||
fs.createWriteStream = (...args) => {
|
||||
const filename = args[0];
|
||||
if (!isInsideCachePath(filename)) {
|
||||
return saveCreateWriteStream.call(fs, ...args);
|
||||
}
|
||||
|
||||
vfs.mkdirpSync(path.dirname(filename));
|
||||
fs.writeFileSync(filename, Buffer.alloc(0));
|
||||
const stream = vfs.createWriteStream(...args);
|
||||
|
||||
stream.on('finish', () => {
|
||||
setTimeout(() => {
|
||||
stream.emit('close');
|
||||
});
|
||||
});
|
||||
|
||||
return stream;
|
||||
};
|
||||
|
||||
const saveReadFile = fs.readFile;
|
||||
fs.readFile = (...args) => {
|
||||
const filename = args[0];
|
||||
if (!isInsideCachePath(filename)) {
|
||||
return saveReadFile.call(fs, ...args);
|
||||
}
|
||||
|
||||
const callback = args[args.length - 1];
|
||||
return vfs.readFile(...args.slice(0, -1), (error, result) => {
|
||||
if (error) {
|
||||
saveReadFile.call(fs, ...args);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(error, result);
|
||||
});
|
||||
};
|
||||
|
||||
const saveCopyFile = fs.copyFile;
|
||||
fs.copyFile = (...args) => {
|
||||
const src = args[0];
|
||||
const dest = args[1];
|
||||
const callback = args[args.length - 1];
|
||||
|
||||
if (isInsideCachePath(src) && !isInsideCachePath(dest)) {
|
||||
const buffer = vfs.readFileSync(src);
|
||||
return fs.writeFile(dest, buffer, callback);
|
||||
}
|
||||
|
||||
if (!isInsideCachePath(src) && isInsideCachePath(dest)) {
|
||||
const buffer = fs.readFileSync(src);
|
||||
|
||||
vfs.mkdirpSync(path.dirname(dest));
|
||||
fs.writeFileSync(dest, Buffer.alloc(0));
|
||||
return vfs.writeFile(dest, buffer, callback);
|
||||
}
|
||||
|
||||
return saveCopyFile.call(fs, ...args);
|
||||
};
|
||||
|
||||
const saveWriteFile = fs.writeFile;
|
||||
fs.writeFile = (...args) => {
|
||||
const filename = args[0];
|
||||
if (!isInsideCachePath(filename)) {
|
||||
return saveWriteFile.call(fs, ...args);
|
||||
}
|
||||
|
||||
vfs.mkdirpSync(path.dirname(filename));
|
||||
fs.writeFileSync(filename, Buffer.alloc(0));
|
||||
return vfs.writeFile(...args);
|
||||
};
|
||||
|
||||
require(yarnPath);
|
||||
@@ -1,20 +1,38 @@
|
||||
const path = require('path');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
/** @typedef {import('../file-ref')} FileRef */
|
||||
/** @typedef {import('../file-fs-ref')} FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
/** @typedef {{[filePath: string]: FileFsRef}|{}} DownloadedFiles */
|
||||
|
||||
/**
|
||||
* @param {FileRef|FileFsRef} file
|
||||
* @param {string} fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
async function downloadFile(file, fsPath) {
|
||||
const { mode } = file;
|
||||
const stream = file.toStream();
|
||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* Download files to disk
|
||||
* @argument {Files} files
|
||||
* @argument {string} basePath
|
||||
* @returns {Promise<DownloadedFiles>}
|
||||
*/
|
||||
module.exports = async function download(files, basePath) {
|
||||
const files2 = {};
|
||||
|
||||
await Promise.all(Object.keys(files).map(async (name) => {
|
||||
const file = files[name];
|
||||
const fsPath = path.join(basePath, name);
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}));
|
||||
await Promise.all(
|
||||
Object.keys(files).map(async (name) => {
|
||||
const file = files[name];
|
||||
const fsPath = path.join(basePath, name);
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}),
|
||||
);
|
||||
|
||||
return files2;
|
||||
};
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const dev = !process.env.AWS_EXECUTION_ENV;
|
||||
const TMP_PATH = dev ? path.join(process.cwd(), 'tmp') : '/tmp';
|
||||
const prod = process.env.AWS_EXECUTION_ENV || process.env.X_GOOGLE_CODE_LOCATION;
|
||||
const TMP_PATH = prod ? '/tmp' : path.join(__dirname, 'tmp');
|
||||
|
||||
module.exports = async function getWritableDirectory() {
|
||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||
|
||||
@@ -3,15 +3,30 @@ const path = require('path');
|
||||
const vanillaGlob = require('glob');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
/** @typedef {import('fs').Stats} Stats */
|
||||
/** @typedef {import('glob').IOptions} GlobOptions */
|
||||
/** @typedef {import('../file-fs-ref').FsFiles|{}} GlobFiles */
|
||||
|
||||
/**
|
||||
* @argument {string} pattern
|
||||
* @argument {GlobOptions|string} opts
|
||||
* @argument {string} [mountpoint]
|
||||
* @returns {Promise<GlobFiles>}
|
||||
*/
|
||||
module.exports = function glob(pattern, opts = {}, mountpoint) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let options = opts;
|
||||
if (typeof options === 'string') {
|
||||
options = { cwd: options };
|
||||
/** @type {GlobOptions} */
|
||||
let options;
|
||||
if (typeof opts === 'string') {
|
||||
options = { cwd: opts };
|
||||
} else {
|
||||
options = opts;
|
||||
}
|
||||
|
||||
if (!options.cwd) {
|
||||
throw new Error('Second argument (basePath) must be specified for names of resulting files');
|
||||
throw new Error(
|
||||
'Second argument (basePath) must be specified for names of resulting files',
|
||||
);
|
||||
}
|
||||
|
||||
if (!path.isAbsolute(options.cwd)) {
|
||||
@@ -26,21 +41,27 @@ module.exports = function glob(pattern, opts = {}, mountpoint) {
|
||||
vanillaGlob(pattern, options, (error, files) => {
|
||||
if (error) return reject(error);
|
||||
|
||||
resolve(files.reduce((files2, relativePath) => {
|
||||
const fsPath = path.join(options.cwd, relativePath);
|
||||
const stat = options.statCache[fsPath];
|
||||
assert(stat, `statCache does not contain value for ${relativePath} (resolved to ${fsPath})`);
|
||||
if (stat.isFile()) {
|
||||
let finalPath = relativePath;
|
||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
||||
return {
|
||||
...files2,
|
||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
||||
};
|
||||
}
|
||||
resolve(
|
||||
files.reduce((files2, relativePath) => {
|
||||
const fsPath = path.join(options.cwd, relativePath);
|
||||
/** @type {Stats|any} */
|
||||
const stat = options.statCache[fsPath];
|
||||
assert(
|
||||
stat,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
||||
);
|
||||
if (stat && stat.isFile()) {
|
||||
let finalPath = relativePath;
|
||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
||||
return {
|
||||
...files2,
|
||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
||||
};
|
||||
}
|
||||
|
||||
return files2;
|
||||
}, {}));
|
||||
return files2;
|
||||
}, {}),
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,6 +1,25 @@
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
|
||||
/**
|
||||
* @callback delegate
|
||||
* @argument {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Rename files using delegate function
|
||||
* @argument {Files} files
|
||||
* @argument {delegate} delegate
|
||||
* @returns {Files}
|
||||
*/
|
||||
module.exports = function rename(files, delegate) {
|
||||
return Object.keys(files).reduce((newFiles, name) => ({
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}), {});
|
||||
return Object.keys(files).reduce(
|
||||
(newFiles, name) => ({
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}),
|
||||
{},
|
||||
);
|
||||
};
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
const assert = require('assert');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
const prod = process.env.AWS_EXECUTION_ENV
|
||||
|| process.env.X_GOOGLE_CODE_LOCATION;
|
||||
|
||||
function spawnAsync(command, args, cwd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
||||
@@ -13,22 +17,34 @@ function spawnAsync(command, args, cwd) {
|
||||
}
|
||||
|
||||
async function runShellScript(fsPath) {
|
||||
assert(path.isAbsolute(fsPath));
|
||||
const destPath = path.dirname(fsPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function shouldUseNpm(destPath) {
|
||||
async function scanParentDirs(destPath, scriptName) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let hasScript = false;
|
||||
let hasPackageLockJson = false;
|
||||
let currentDestPath = destPath;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const packageJsonPath = path.join(currentDestPath, 'package.json');
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await fs.exists(path.join(currentDestPath, 'package.json'))) {
|
||||
if (await fs.exists(packageJsonPath)) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await fs.exists(path.join(currentDestPath, 'package-lock.json'))) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath));
|
||||
hasScript = Boolean(
|
||||
packageJson.scripts && scriptName && packageJson.scripts[scriptName],
|
||||
);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
hasPackageLockJson = await fs.exists(
|
||||
path.join(currentDestPath, 'package-lock.json'),
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
const newDestPath = path.dirname(currentDestPath);
|
||||
@@ -36,16 +52,29 @@ async function shouldUseNpm(destPath) {
|
||||
currentDestPath = newDestPath;
|
||||
}
|
||||
|
||||
return false;
|
||||
return { hasScript, hasPackageLockJson };
|
||||
}
|
||||
|
||||
async function runNpmInstall(destPath, args = []) {
|
||||
async function installDependencies(destPath, args = []) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let commandArgs = args;
|
||||
console.log(`installing to ${destPath}`);
|
||||
if (await shouldUseNpm(destPath)) {
|
||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath);
|
||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath);
|
||||
} else if (prod) {
|
||||
console.log('using memory-fs for yarn cache');
|
||||
await spawnAsync(
|
||||
'node',
|
||||
[path.join(__dirname, 'bootstrap-yarn.js'), '--cwd', destPath].concat(
|
||||
commandArgs,
|
||||
),
|
||||
destPath,
|
||||
);
|
||||
} else {
|
||||
await spawnAsync('yarn', ['--cwd', destPath].concat(commandArgs), destPath);
|
||||
await spawnAsync('yarn', ['cache', 'clean'], destPath);
|
||||
@@ -53,21 +82,19 @@ async function runNpmInstall(destPath, args = []) {
|
||||
}
|
||||
|
||||
async function runPackageJsonScript(destPath, scriptName) {
|
||||
try {
|
||||
if (await shouldUseNpm(destPath)) {
|
||||
console.log(`running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath, 'run', scriptName],
|
||||
destPath,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error.message);
|
||||
return false;
|
||||
assert(path.isAbsolute(destPath));
|
||||
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
||||
destPath,
|
||||
scriptName,
|
||||
);
|
||||
if (!hasScript) return false;
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
console.log(`running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath);
|
||||
}
|
||||
|
||||
return true;
|
||||
@@ -75,6 +102,7 @@ async function runPackageJsonScript(destPath, scriptName) {
|
||||
|
||||
module.exports = {
|
||||
runShellScript,
|
||||
runNpmInstall,
|
||||
installDependencies,
|
||||
runNpmInstall: installDependencies,
|
||||
runPackageJsonScript,
|
||||
};
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
const assert = require('assert');
|
||||
const Sema = require('async-sema');
|
||||
const { ZipFile } = require('yazl');
|
||||
const streamToBuffer = require('./fs/stream-to-buffer.js');
|
||||
|
||||
@@ -14,6 +15,7 @@ class Lambda {
|
||||
}
|
||||
}
|
||||
|
||||
const sema = new Sema(10);
|
||||
const mtime = new Date(1540000000000);
|
||||
|
||||
async function createLambda({
|
||||
@@ -23,19 +25,33 @@ async function createLambda({
|
||||
assert(typeof handler === 'string', '"handler" is not a string');
|
||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||
assert(typeof environment === 'object', '"environment" is not an object');
|
||||
const zipFile = new ZipFile();
|
||||
|
||||
Object.keys(files).sort().forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
await sema.acquire();
|
||||
try {
|
||||
const zipFile = new ZipFile();
|
||||
const zipBuffer = await new Promise((resolve, reject) => {
|
||||
Object.keys(files)
|
||||
.sort()
|
||||
.forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
stream.on('error', reject);
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
|
||||
zipFile.end();
|
||||
const zipBuffer = await streamToBuffer(zipFile.outputStream);
|
||||
return new Lambda({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
});
|
||||
zipFile.end();
|
||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
||||
});
|
||||
|
||||
return new Lambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
environment,
|
||||
});
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.4.29-canary.2",
|
||||
"version": "0.4.32",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "2.1.4",
|
||||
@@ -8,8 +9,12 @@
|
||||
"fs-extra": "7.0.0",
|
||||
"glob": "7.1.3",
|
||||
"into-stream": "4.0.0",
|
||||
"memory-fs": "0.4.1",
|
||||
"multistream": "2.1.1",
|
||||
"node-fetch": "2.2.0",
|
||||
"yazl": "2.4.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
}
|
||||
}
|
||||
|
||||
5
packages/now-build-utils/test/fixtures/07-cross-install/api/index.js
vendored
Normal file
5
packages/now-build-utils/test/fixtures/07-cross-install/api/index.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
const cowsay = require('cowsay').say;
|
||||
|
||||
module.exports = (req, resp) => {
|
||||
resp.end(cowsay({ text: 'cross-cow:RANDOMNESS_PLACEHOLDER' }));
|
||||
};
|
||||
5
packages/now-build-utils/test/fixtures/07-cross-install/api/package.json
vendored
Normal file
5
packages/now-build-utils/test/fixtures/07-cross-install/api/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"lib": "../lib"
|
||||
}
|
||||
}
|
||||
7
packages/now-build-utils/test/fixtures/07-cross-install/lib/package.json
vendored
Normal file
7
packages/now-build-utils/test/fixtures/07-cross-install/lib/package.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"name": "lib",
|
||||
"version": "0.0.1",
|
||||
"dependencies": {
|
||||
"cowsay": "*"
|
||||
}
|
||||
}
|
||||
9
packages/now-build-utils/test/fixtures/07-cross-install/now.json
vendored
Normal file
9
packages/now-build-utils/test/fixtures/07-cross-install/now.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "api/index.js", "use": "@now/node" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/api/index.js", "mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
11
packages/now-build-utils/test/fixtures/08-yarn-npm/now.json
vendored
Normal file
11
packages/now-build-utils/test/fixtures/08-yarn-npm/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "with-npm/index.js", "use": "@now/node" },
|
||||
{ "src": "with-yarn/index.js", "use": "@now/node" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/with-npm", "mustContain": "npm:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/with-yarn", "mustContain": "yarn:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/index.js
vendored
Normal file
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/index.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/must-be-npm.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/must-be-npm.js
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const execpath = path.basename(process.env.npm_execpath);
|
||||
console.log('execpath', execpath);
|
||||
|
||||
if (execpath === 'npm-cli.js') {
|
||||
fs.writeFileSync(
|
||||
'index.js',
|
||||
'module.exports = (_, resp) => resp.end("npm:RANDOMNESS_PLACEHOLDER");',
|
||||
);
|
||||
} else {
|
||||
throw new Error('npm is expected');
|
||||
}
|
||||
3
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package-lock.json
generated
vendored
Normal file
3
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"lockfileVersion": 1
|
||||
}
|
||||
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package.json
vendored
Normal file
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "node must-be-npm.js"
|
||||
}
|
||||
}
|
||||
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/index.js
vendored
Normal file
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/index.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/must-be-yarn.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/must-be-yarn.js
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const execpath = path.basename(process.env.npm_execpath);
|
||||
console.log('execpath', execpath);
|
||||
|
||||
if (execpath === 'yarn.js' || execpath === 'yarn') {
|
||||
fs.writeFileSync(
|
||||
'index.js',
|
||||
'module.exports = (_, resp) => resp.end("yarn:RANDOMNESS_PLACEHOLDER");',
|
||||
);
|
||||
} else {
|
||||
throw new Error('yarn is expected');
|
||||
}
|
||||
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/package.json
vendored
Normal file
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "node must-be-yarn.js"
|
||||
}
|
||||
}
|
||||
63
packages/now-build-utils/test/test.js
Normal file
63
packages/now-build-utils/test/test.js
Normal file
@@ -0,0 +1,63 @@
|
||||
/* global beforeAll, expect, it, jest */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
const builderUrl = '@canary';
|
||||
let buildUtilsUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const buildUtilsPath = path.resolve(__dirname, '..');
|
||||
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
|
||||
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||
});
|
||||
|
||||
// own fixtures
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture),
|
||||
),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
|
||||
// few foreign tests
|
||||
|
||||
const buildersToTestWith = ['now-node-server', 'now-static-build'];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const builder of buildersToTestWith) {
|
||||
const fixturesPath2 = path.resolve(
|
||||
__dirname,
|
||||
`../../${builder}/test/fixtures`,
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
||||
// don't run all foreign fixtures, just some
|
||||
if (['01-cowsay', '03-env-vars'].includes(fixture)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${builder}/${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath2, fixture),
|
||||
),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
export GOOS=linux
|
||||
export GOARCH=amd64
|
||||
export GOPATH=$HOME/go
|
||||
go get github.com/aws/aws-lambda-go/events
|
||||
go get github.com/aws/aws-lambda-go/lambda
|
||||
go build -o handler main.go
|
||||
|
||||
@@ -1,152 +1,36 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
"fmt"
|
||||
"net"
|
||||
"strings"
|
||||
"io/ioutil"
|
||||
now "../../utils/go/bridge"
|
||||
"net/http"
|
||||
"net/http/cgi"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"encoding/json"
|
||||
b64 "encoding/base64"
|
||||
"github.com/aws/aws-lambda-go/events"
|
||||
"github.com/aws/aws-lambda-go/lambda"
|
||||
)
|
||||
|
||||
type Request struct {
|
||||
Host string `json:"host"`
|
||||
Path string `json:"path"`
|
||||
Method string `json:"method"`
|
||||
Headers map[string]string `json:"headers"`
|
||||
Encoding string `json:"encoding,omitempty"`
|
||||
Body string `json:"body"`
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
StatusCode int `json:"statusCode"`
|
||||
Headers map[string]string `json:"headers"`
|
||||
Encoding string `json:"encoding,omitemtpy"`
|
||||
Body string `json:"body"`
|
||||
}
|
||||
|
||||
type ResponseError struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type ResponseErrorWrapper struct {
|
||||
Error ResponseError `json:"error"`
|
||||
}
|
||||
|
||||
type CgiHandler struct {
|
||||
http.Handler
|
||||
Dir string
|
||||
Script string
|
||||
}
|
||||
|
||||
func createErrorResponse(message string, code string, statusCode int) (Response, error) {
|
||||
obj := ResponseErrorWrapper{
|
||||
Error: ResponseError{
|
||||
Code: code,
|
||||
Message: message,
|
||||
},
|
||||
}
|
||||
|
||||
body, _ := json.Marshal(obj)
|
||||
|
||||
return Response{
|
||||
StatusCode: statusCode,
|
||||
Headers: map[string]string{
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
Body: string(body),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (h *CgiHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||
cgih := cgi.Handler{
|
||||
handler := cgi.Handler{
|
||||
Path: h.Script,
|
||||
Root: "/" + h.Script,
|
||||
Dir: h.Dir,
|
||||
Env: []string{"SERVER_PORT=443"},
|
||||
Dir: h.Dir,
|
||||
Env: []string{
|
||||
"HTTPS=on",
|
||||
"SERVER_PORT=443",
|
||||
"SERVER_SOFTWARE=@now/cgi",
|
||||
},
|
||||
}
|
||||
cgih.ServeHTTP(w, r)
|
||||
handler.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
func main() {
|
||||
l, err := net.Listen("tcp", ":0")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
workdir, _ := filepath.Abs(".")
|
||||
script := os.Getenv("SCRIPT_FILENAME")
|
||||
h := &CgiHandler{nil, workdir, script}
|
||||
|
||||
http.Handle("/", h)
|
||||
go http.Serve(l, nil)
|
||||
|
||||
handler := func(_req events.APIGatewayProxyRequest) (Response, error) {
|
||||
var req Request
|
||||
|
||||
err := json.Unmarshal([]byte(_req.Body), &req)
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return createErrorResponse("Invalid payload", "bad_request", 400)
|
||||
}
|
||||
|
||||
if req.Encoding == "base64" {
|
||||
decoded, _ := b64.StdEncoding.DecodeString(req.Body)
|
||||
req.Body = string(decoded)
|
||||
}
|
||||
|
||||
url := "http://" + l.Addr().String() + req.Path
|
||||
|
||||
internalReq, err := http.NewRequest(req.Method, url, strings.NewReader(req.Body))
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return createErrorResponse("Bad gateway internal req failed", "bad_gateway", 502)
|
||||
}
|
||||
|
||||
for k, v := range req.Headers {
|
||||
internalReq.Header.Add(k, v)
|
||||
if strings.ToLower(k) == "host" {
|
||||
internalReq.Host = v
|
||||
}
|
||||
}
|
||||
|
||||
client := &http.Client{}
|
||||
internalRes, err := client.Do(internalReq)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return createErrorResponse("Bad gateway internal req Do failed", "bad_gateway", 502)
|
||||
}
|
||||
defer internalRes.Body.Close()
|
||||
|
||||
resHeaders := make(map[string]string, len(internalRes.Header))
|
||||
for k, v := range internalRes.Header {
|
||||
// FIXME: support multiple values via concatenating with ','
|
||||
// see RFC 7230, section 3.2.2
|
||||
resHeaders[k] = v[0]
|
||||
}
|
||||
|
||||
bodyBytes, err := ioutil.ReadAll(internalRes.Body)
|
||||
if err != nil {
|
||||
return createErrorResponse("Bad gateway ReadAll bytes from response failed", "bad_gateway", 502)
|
||||
}
|
||||
|
||||
resBody := b64.StdEncoding.EncodeToString(bodyBytes)
|
||||
|
||||
return Response{
|
||||
StatusCode: internalRes.StatusCode,
|
||||
Headers: resHeaders,
|
||||
Encoding: "base64",
|
||||
Body: resBody,
|
||||
}, nil
|
||||
}
|
||||
|
||||
lambda.Start(handler)
|
||||
handler := &CgiHandler{nil, workdir, script}
|
||||
now.Start(handler)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "@now/cgi",
|
||||
"version": "0.0.12-canary.1",
|
||||
"version": "0.0.15",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "best -I test/*.js",
|
||||
"prepublish": "./build.sh"
|
||||
|
||||
1
packages/now-go/.gitignore
vendored
1
packages/now-go/.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
node_modules
|
||||
*.log
|
||||
launcher
|
||||
bin
|
||||
|
||||
@@ -9,7 +9,7 @@ const downloadGit = require('lambda-git');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const downloadGoBin = require('./download-go-bin');
|
||||
|
||||
// creates a `$GOPATH` direcotry tree, as per
|
||||
// creates a `$GOPATH` directory tree, as per
|
||||
// `go help gopath`'s instructions.
|
||||
// without this, Go won't recognize the `$GOPATH`
|
||||
async function createGoPathTree(goPath) {
|
||||
|
||||
@@ -1,140 +1,10 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
b64 "encoding/base64"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"github.com/aws/aws-lambda-go/events"
|
||||
"github.com/aws/aws-lambda-go/lambda"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
now "../../utils/go/bridge"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Request struct {
|
||||
Host string `json:"host"`
|
||||
Path string `json:"path"`
|
||||
Method string `json`
|
||||
Headers map[string]string `json:"headers"`
|
||||
Encoding string `json"encoding,omitempty"`
|
||||
Body string `json:"body"`
|
||||
}
|
||||
|
||||
type Response struct {
|
||||
StatusCode int `json:"statusCode"`
|
||||
Headers map[string]string `json:"headers"`
|
||||
Encoding string `json:"encoding,omitemtpy"`
|
||||
Body string `json:"body"`
|
||||
}
|
||||
|
||||
type ResponseError struct {
|
||||
Code string `json:"code"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
type ResponseErrorWrapper struct {
|
||||
Error ResponseError `json:"error"`
|
||||
}
|
||||
|
||||
func createErrorResponse(message string, code string, statusCode int) (Response, error) {
|
||||
obj := ResponseErrorWrapper{
|
||||
Error: ResponseError{
|
||||
Code: code,
|
||||
Message: message,
|
||||
},
|
||||
}
|
||||
|
||||
body, _ := json.Marshal(obj)
|
||||
|
||||
return Response{
|
||||
StatusCode: statusCode,
|
||||
Headers: map[string]string{
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
Body: string(body),
|
||||
}, nil
|
||||
}
|
||||
|
||||
func main() {
|
||||
l, err := net.Listen("tcp", ":0")
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
http.HandleFunc("/", __NOW_HANDLER_FUNC_NAME)
|
||||
go http.Serve(l, nil)
|
||||
|
||||
handler := func(_req events.APIGatewayProxyRequest) (Response, error) {
|
||||
var req Request
|
||||
|
||||
err := json.Unmarshal([]byte(_req.Body), &req)
|
||||
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return createErrorResponse("Invalid payload", "bad_request", 400)
|
||||
}
|
||||
|
||||
if req.Encoding == "base64" {
|
||||
decoded, _ := b64.StdEncoding.DecodeString(req.Body)
|
||||
req.Body = string(decoded)
|
||||
}
|
||||
|
||||
url := "http://" + l.Addr().String() + req.Path
|
||||
|
||||
internalReq, err := http.NewRequest(req.Method, url, strings.NewReader(req.Body))
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return createErrorResponse("Bad gateway", "bad_gateway", 502)
|
||||
}
|
||||
|
||||
for k, v := range req.Headers {
|
||||
internalReq.Header.Add(k, v)
|
||||
if strings.ToLower(k) == "host" {
|
||||
req.Host = v
|
||||
}
|
||||
}
|
||||
|
||||
client := &http.Client{}
|
||||
internalRes, err := client.Do(internalReq)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return createErrorResponse("Bad gateway", "bad_gateway", 502)
|
||||
}
|
||||
defer internalRes.Body.Close()
|
||||
|
||||
resHeaders := make(map[string]string, len(internalRes.Header))
|
||||
var resEncoding string
|
||||
for k, v := range internalRes.Header {
|
||||
// FIXME: support multiple values via concatenating with ','
|
||||
// see RFC 7230, section 3.2.2
|
||||
if strings.ToLower(k) == "x-now-response-encoding" {
|
||||
// we don't want to send this header down
|
||||
resEncoding = v[0]
|
||||
} else {
|
||||
resHeaders[k] = v[0]
|
||||
}
|
||||
}
|
||||
|
||||
bodyBytes, err := ioutil.ReadAll(internalRes.Body)
|
||||
if err != nil {
|
||||
return createErrorResponse("Bad gateway", "bad_gateway", 502)
|
||||
}
|
||||
|
||||
var resBody string
|
||||
if resEncoding == "base64" {
|
||||
resBody = b64.StdEncoding.EncodeToString(bodyBytes)
|
||||
} else {
|
||||
resBody = string(bodyBytes)
|
||||
}
|
||||
|
||||
return Response{
|
||||
StatusCode: internalRes.StatusCode,
|
||||
Headers: resHeaders,
|
||||
Encoding: resEncoding,
|
||||
Body: resBody,
|
||||
}, nil
|
||||
}
|
||||
|
||||
lambda.Start(handler)
|
||||
now.Start(http.HandlerFunc(__NOW_HANDLER_FUNC_NAME))
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.2.9-canary.1",
|
||||
"version": "0.2.12",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "best -I test/*.js",
|
||||
"prepublish": "./build.sh"
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "@now/html-minifier",
|
||||
"version": "1.0.5-canary.0",
|
||||
"version": "1.0.7",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"html-minifier": "3.5.21"
|
||||
},
|
||||
|
||||
1
packages/now-lambda/.npmignore
Normal file
1
packages/now-lambda/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
/test
|
||||
@@ -1,7 +1,11 @@
|
||||
{
|
||||
"name": "@now/lambda",
|
||||
"version": "0.4.6-canary.0",
|
||||
"version": "0.4.9",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
}
|
||||
}
|
||||
|
||||
BIN
packages/now-lambda/test/fixtures/01-cowsay/index.zip
vendored
Normal file
BIN
packages/now-lambda/test/fixtures/01-cowsay/index.zip
vendored
Normal file
Binary file not shown.
11
packages/now-lambda/test/fixtures/01-cowsay/now.json
vendored
Normal file
11
packages/now-lambda/test/fixtures/01-cowsay/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } },
|
||||
{ "src": "subdirectory/index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:NO_REPLACE_TO_AVOID_CRC_MISMATCH" },
|
||||
{ "path": "/subdirectory/", "mustContain": "yoda:NO_REPLACE_TO_AVOID_CRC_MISMATCH" }
|
||||
]
|
||||
}
|
||||
BIN
packages/now-lambda/test/fixtures/01-cowsay/subdirectory/index.zip
vendored
Normal file
BIN
packages/now-lambda/test/fixtures/01-cowsay/subdirectory/index.zip
vendored
Normal file
Binary file not shown.
33
packages/now-lambda/test/test.js
Normal file
33
packages/now-lambda/test/test.js
Normal file
@@ -0,0 +1,33 @@
|
||||
/* global beforeAll, expect, it, jest */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const builderPath = path.resolve(__dirname, '..');
|
||||
builderUrl = await packAndDeploy(builderPath);
|
||||
console.log('builderUrl', builderUrl);
|
||||
});
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture),
|
||||
),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
1
packages/now-md/.npmignore
Normal file
1
packages/now-md/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
/test
|
||||
@@ -22,12 +22,17 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
.use(markdown)
|
||||
.use(remark2rehype)
|
||||
.use(doc, {
|
||||
title, language, meta, css,
|
||||
title,
|
||||
language,
|
||||
meta,
|
||||
css,
|
||||
})
|
||||
.use(format)
|
||||
.use(html);
|
||||
|
||||
const result = await FileBlob.fromStream({ stream: stream.pipe(unifiedStream(processor)) });
|
||||
const result = await FileBlob.fromStream({
|
||||
stream: stream.pipe(unifiedStream(processor)),
|
||||
});
|
||||
|
||||
console.log(result.data.toString());
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "@now/md",
|
||||
"version": "0.4.6-canary.0",
|
||||
"version": "0.4.9",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"rehype-document": "^2.2.0",
|
||||
"rehype-format": "^2.3.0",
|
||||
@@ -12,5 +13,8 @@
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
}
|
||||
}
|
||||
|
||||
5
packages/now-md/test/fixtures/01-cowsay/index.md
vendored
Normal file
5
packages/now-md/test/fixtures/01-cowsay/index.md
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Testing Markdown
|
||||
|
||||
cow:RANDOMNESS_PLACEHOLDER
|
||||
|
||||
[Wow a link!](https://zeit.co)
|
||||
11
packages/now-md/test/fixtures/01-cowsay/now.json
vendored
Normal file
11
packages/now-md/test/fixtures/01-cowsay/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.md", "use": "@now/md" },
|
||||
{ "src": "subdirectory/index.md", "use": "@now/md" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/subdirectory/", "mustContain": "yoda:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
5
packages/now-md/test/fixtures/01-cowsay/subdirectory/index.md
vendored
Normal file
5
packages/now-md/test/fixtures/01-cowsay/subdirectory/index.md
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Testing Markdown
|
||||
|
||||
yoda:RANDOMNESS_PLACEHOLDER
|
||||
|
||||
[Wow a link!](https://zeit.co)
|
||||
33
packages/now-md/test/test.js
Normal file
33
packages/now-md/test/test.js
Normal file
@@ -0,0 +1,33 @@
|
||||
/* global beforeAll, expect, it, jest */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const builderPath = path.resolve(__dirname, '..');
|
||||
builderUrl = await packAndDeploy(builderPath);
|
||||
console.log('builderUrl', builderUrl);
|
||||
});
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture),
|
||||
),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
1
packages/now-mdx-deck/.npmignore
Normal file
1
packages/now-mdx-deck/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
/test
|
||||
@@ -6,26 +6,30 @@ const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const { runNpmInstall } = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
console.log('downloading user files...');
|
||||
const downloadedFiles = await download(files, workPath);
|
||||
console.log('writing package.json...');
|
||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.7' } };
|
||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.15' } };
|
||||
const packageJsonPath = path.join(workPath, 'package.json');
|
||||
await writeFile(packageJsonPath, JSON.stringify(packageJson));
|
||||
console.log('running npm install...');
|
||||
console.log('installing dependencies...');
|
||||
process.env.PUPPETEER_SKIP_CHROMIUM_DOWNLOAD = '1'; // TODO opts argument for runNpmInstall
|
||||
await runNpmInstall(path.dirname(packageJsonPath), ['--prod', '--prefer-offline']);
|
||||
await runNpmInstall(path.dirname(packageJsonPath), [
|
||||
'--prod',
|
||||
'--prefer-offline',
|
||||
]);
|
||||
console.log('building...');
|
||||
const outDir = await getWritableDirectory();
|
||||
const entrypointFsPath = downloadedFiles[entrypoint].fsPath;
|
||||
const mountpoint = path.dirname(entrypoint);
|
||||
|
||||
const build = require(path.join(workPath, 'node_modules/mdx-deck/lib/build.js'));
|
||||
const build = require(path.join(
|
||||
workPath,
|
||||
'node_modules/mdx-deck/lib/build.js',
|
||||
));
|
||||
|
||||
await build({
|
||||
html: true,
|
||||
@@ -41,15 +45,15 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
|
||||
exports.prepareCache = async ({ cachePath }) => {
|
||||
console.log('writing package.json...');
|
||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.7' } };
|
||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.15' } };
|
||||
const packageJsonPath = path.join(cachePath, 'package.json');
|
||||
await writeFile(packageJsonPath, JSON.stringify(packageJson));
|
||||
console.log('running npm install...');
|
||||
await runNpmInstall(path.dirname(packageJsonPath), ['--prod']);
|
||||
|
||||
return {
|
||||
...await glob('node_modules/**', cachePath),
|
||||
...await glob('package-lock.json', cachePath),
|
||||
...await glob('yarn.lock', cachePath),
|
||||
...(await glob('node_modules/**', cachePath)),
|
||||
...(await glob('package-lock.json', cachePath)),
|
||||
...(await glob('yarn.lock', cachePath)),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
{
|
||||
"name": "@now/mdx-deck",
|
||||
"version": "0.4.15-canary.0",
|
||||
"version": "0.4.18",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
}
|
||||
}
|
||||
|
||||
5
packages/now-mdx-deck/test/fixtures/01-cowsay/index.mdx
vendored
Normal file
5
packages/now-mdx-deck/test/fixtures/01-cowsay/index.mdx
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Testing Markdown
|
||||
---
|
||||
cow:RANDOMNESS_PLACEHOLDER
|
||||
---
|
||||
[Wow a link!](https://zeit.co)
|
||||
11
packages/now-mdx-deck/test/fixtures/01-cowsay/now.json
vendored
Normal file
11
packages/now-mdx-deck/test/fixtures/01-cowsay/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.mdx", "use": "@now/mdx-deck" },
|
||||
{ "src": "subdirectory/index.mdx", "use": "@now/mdx-deck" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/subdirectory/", "mustContain": "yoda:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
5
packages/now-mdx-deck/test/fixtures/01-cowsay/subdirectory/index.mdx
vendored
Normal file
5
packages/now-mdx-deck/test/fixtures/01-cowsay/subdirectory/index.mdx
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# Testing Markdown
|
||||
---
|
||||
yoda:RANDOMNESS_PLACEHOLDER
|
||||
---
|
||||
[Wow a link!](https://zeit.co)
|
||||
33
packages/now-mdx-deck/test/test.js
Normal file
33
packages/now-mdx-deck/test/test.js
Normal file
@@ -0,0 +1,33 @@
|
||||
/* global beforeAll, expect, it, jest */
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||
|
||||
jest.setTimeout(2 * 60 * 1000);
|
||||
const buildUtilsUrl = '@canary';
|
||||
let builderUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const builderPath = path.resolve(__dirname, '..');
|
||||
builderUrl = await packAndDeploy(builderPath);
|
||||
console.log('builderUrl', builderUrl);
|
||||
});
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture),
|
||||
),
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
@@ -4,216 +4,311 @@ const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob');
|
||||
const path = require('path');
|
||||
const { readFile, writeFile, unlink } = require('fs.promised');
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
const {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const semver = require('semver');
|
||||
const nextLegacyVersions = require('./legacy-versions');
|
||||
const {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
moveEntryDirectoryToRoot,
|
||||
normalizePackageJson,
|
||||
excludeStaticDirectory,
|
||||
onlyStaticDirectory,
|
||||
} = require('./utils');
|
||||
|
||||
// Exclude certain files from the files object
|
||||
function excludeFiles(files, matchFn) {
|
||||
return Object.keys(files).reduce((newFiles, fileName) => {
|
||||
if (matchFn(fileName)) {
|
||||
return newFiles;
|
||||
}
|
||||
return {
|
||||
...newFiles,
|
||||
[fileName]: files[fileName],
|
||||
};
|
||||
}, {});
|
||||
/** @typedef { import('@now/build-utils/file-ref').Files } Files */
|
||||
/** @typedef { import('@now/build-utils/fs/download').DownloadedFiles } DownloadedFiles */
|
||||
|
||||
/**
|
||||
* @typedef {Object} BuildParamsType
|
||||
* @property {Files} files - Files object
|
||||
* @property {string} entrypoint - Entrypoint specified for the builder
|
||||
* @property {string} workPath - Working directory for this build
|
||||
*/
|
||||
|
||||
/**
|
||||
* Read package.json from files
|
||||
* @param {DownloadedFiles} files
|
||||
*/
|
||||
async function readPackageJson(files) {
|
||||
if (!files['package.json']) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const packageJsonPath = files['package.json'].fsPath;
|
||||
return JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
||||
}
|
||||
|
||||
function shouldExcludeFile(entryDirectory) {
|
||||
return (file) => {
|
||||
// If the file is not in the entry directory
|
||||
if (entryDirectory !== '.' && !file.startsWith(entryDirectory)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Exclude static directory
|
||||
if (file.startsWith(path.join(entryDirectory, 'static'))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (file === 'package-lock.json') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (file === 'yarn.lock') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
if (
|
||||
!/package\.json$/.exec(entrypoint)
|
||||
&& !/next\.config\.js$/.exec(entrypoint)
|
||||
) {
|
||||
throw new Error(
|
||||
'Specified "src" for "@now/next" has to be "package.json" or "next.config.js"',
|
||||
);
|
||||
}
|
||||
|
||||
console.log('downloading user files...');
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const filesToDownload = excludeFiles(
|
||||
files,
|
||||
shouldExcludeFile(entryDirectory),
|
||||
);
|
||||
const entrypointHandledFilesToDownload = rename(filesToDownload, (file) => {
|
||||
if (entryDirectory !== '.') {
|
||||
return file.replace(new RegExp(`^${entryDirectory}/`), '');
|
||||
}
|
||||
return file;
|
||||
});
|
||||
let downloadedFiles = await download(
|
||||
entrypointHandledFilesToDownload,
|
||||
workPath,
|
||||
);
|
||||
|
||||
let packageJson = {};
|
||||
if (downloadedFiles['package.json']) {
|
||||
console.log('found package.json, overwriting');
|
||||
const packageJsonPath = downloadedFiles['package.json'].fsPath;
|
||||
packageJson = JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
||||
}
|
||||
|
||||
packageJson = {
|
||||
...packageJson,
|
||||
dependencies: {
|
||||
...packageJson.dependencies,
|
||||
'next-server': 'canary',
|
||||
},
|
||||
devDependencies: {
|
||||
...packageJson.devDependencies,
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
...packageJson.scripts,
|
||||
'now-build': 'next build',
|
||||
},
|
||||
};
|
||||
|
||||
if (!packageJson.dependencies.react) {
|
||||
console.log(
|
||||
'"react" not found in dependencies, adding to "package.json" "dependencies"',
|
||||
);
|
||||
packageJson.dependencies.react = 'latest';
|
||||
}
|
||||
if (!packageJson.dependencies['react-dom']) {
|
||||
console.log(
|
||||
'"react-dom" not found in dependencies, adding to "package.json" "dependencies"',
|
||||
);
|
||||
packageJson.dependencies['react-dom'] = 'latest';
|
||||
}
|
||||
|
||||
// in case the user has `next` on their `dependencies`, we remove it
|
||||
delete packageJson.dependencies.next;
|
||||
|
||||
/**
|
||||
* Write package.json
|
||||
* @param {string} workPath
|
||||
* @param {Object} packageJson
|
||||
*/
|
||||
async function writePackageJson(workPath, packageJson) {
|
||||
await writeFile(
|
||||
path.join(workPath, 'package.json'),
|
||||
JSON.stringify(packageJson, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write .npmrc with npm auth token
|
||||
* @param {string} workPath
|
||||
* @param {string} token
|
||||
*/
|
||||
async function writeNpmRc(workPath, token) {
|
||||
await writeFile(
|
||||
path.join(workPath, '.npmrc'),
|
||||
`//registry.npmjs.org/:_authToken=${token}`,
|
||||
);
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '5mb',
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {BuildParamsType} buildParams
|
||||
* @returns {Promise<Files>}
|
||||
*/
|
||||
exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
validateEntrypoint(entrypoint);
|
||||
|
||||
console.log('downloading user files...');
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const filesOnlyEntryDirectory = includeOnlyEntryDirectory(
|
||||
files,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithEntryDirectoryRoot = moveEntryDirectoryToRoot(
|
||||
filesOnlyEntryDirectory,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithoutStaticDirectory = excludeStaticDirectory(
|
||||
filesWithEntryDirectoryRoot,
|
||||
);
|
||||
const downloadedFiles = await download(filesWithoutStaticDirectory, workPath);
|
||||
|
||||
const pkg = await readPackageJson(downloadedFiles);
|
||||
|
||||
let nextVersion;
|
||||
if (pkg.dependencies && pkg.dependencies.next) {
|
||||
nextVersion = pkg.dependencies.next;
|
||||
} else if (pkg.devDependencies && pkg.devDependencies.next) {
|
||||
nextVersion = pkg.devDependencies.next;
|
||||
}
|
||||
|
||||
if (!nextVersion) {
|
||||
throw new Error(
|
||||
'No Next.js version could be detected in "package.json". Make sure `"next"` is installed in "dependencies" or "devDependencies"',
|
||||
);
|
||||
}
|
||||
|
||||
const isLegacy = (() => {
|
||||
// If version is using the dist-tag instead of a version range
|
||||
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the version is an exact match with the legacy versions
|
||||
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
||||
// When the version can't be matched with legacy versions, so it must be a newer version
|
||||
if (maxSatisfying === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
})();
|
||||
|
||||
console.log(`MODE: ${isLegacy ? 'legacy' : 'serverless'}`);
|
||||
|
||||
if (isLegacy) {
|
||||
try {
|
||||
await unlink(path.join(workPath, 'yarn.lock'));
|
||||
} catch (err) {
|
||||
console.log('no yarn.lock removed');
|
||||
}
|
||||
|
||||
try {
|
||||
await unlink(path.join(workPath, 'package-lock.json'));
|
||||
} catch (err) {
|
||||
console.log('no package-lock.json removed');
|
||||
}
|
||||
|
||||
console.warn(
|
||||
"WARNING: your application is being deployed in @now/next's legacy mode.",
|
||||
);
|
||||
console.log('normalizing package.json');
|
||||
const packageJson = normalizePackageJson(pkg);
|
||||
console.log('normalized package.json result: ', packageJson);
|
||||
await writePackageJson(workPath, packageJson);
|
||||
} else if (!pkg.scripts || !pkg.scripts['now-build']) {
|
||||
console.warn(
|
||||
'WARNING: "now-build" script not found. Adding \'"now-build": "next build"\' to "package.json" automatically',
|
||||
);
|
||||
pkg.scripts = {
|
||||
'now-build': 'next build',
|
||||
...(pkg.scripts || {}),
|
||||
};
|
||||
console.log('normalized package.json result: ', pkg);
|
||||
await writePackageJson(workPath, pkg);
|
||||
}
|
||||
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
console.log('found NPM_AUTH_TOKEN in environment, creating .npmrc');
|
||||
await writeFile(
|
||||
path.join(workPath, '.npmrc'),
|
||||
`//registry.npmjs.org/:_authToken=${process.env.NPM_AUTH_TOKEN}`,
|
||||
);
|
||||
await writeNpmRc(workPath, process.env.NPM_AUTH_TOKEN);
|
||||
}
|
||||
downloadedFiles = await glob('**', workPath);
|
||||
|
||||
console.log('running npm install...');
|
||||
console.log('installing dependencies...');
|
||||
await runNpmInstall(workPath, ['--prefer-offline']);
|
||||
console.log('running user script...');
|
||||
await runPackageJsonScript(workPath, 'now-build');
|
||||
console.log('running npm install --production...');
|
||||
await runNpmInstall(workPath, ['--prefer-offline', '--production']);
|
||||
|
||||
if (isLegacy) {
|
||||
console.log('running npm install --production...');
|
||||
await runNpmInstall(workPath, ['--prefer-offline', '--production']);
|
||||
}
|
||||
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
await unlink(path.join(workPath, '.npmrc'));
|
||||
}
|
||||
downloadedFiles = await glob('**', workPath);
|
||||
|
||||
console.log('preparing lambda files...');
|
||||
let buildId;
|
||||
try {
|
||||
buildId = await readFile(path.join(workPath, '.next', 'BUILD_ID'), 'utf8');
|
||||
} catch (err) {
|
||||
console.error(
|
||||
'BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"',
|
||||
);
|
||||
throw new Error('Missing BUILD_ID');
|
||||
}
|
||||
const dotNextRootFiles = await glob('.next/*', workPath);
|
||||
const dotNextServerRootFiles = await glob('.next/server/*', workPath);
|
||||
const nodeModules = excludeFiles(
|
||||
await glob('node_modules/**', workPath),
|
||||
file => file.startsWith('node_modules/.cache'),
|
||||
);
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
};
|
||||
const nextFiles = {
|
||||
...nodeModules,
|
||||
...dotNextRootFiles,
|
||||
...dotNextServerRootFiles,
|
||||
...launcherFiles,
|
||||
};
|
||||
if (downloadedFiles['next.config.js']) {
|
||||
nextFiles['next.config.js'] = downloadedFiles['next.config.js'];
|
||||
}
|
||||
const pages = await glob(
|
||||
'**/*.js',
|
||||
path.join(workPath, '.next', 'server', 'static', buildId, 'pages'),
|
||||
);
|
||||
const launcherPath = path.join(__dirname, 'launcher.js');
|
||||
const launcherData = await readFile(launcherPath, 'utf8');
|
||||
|
||||
const lambdas = {};
|
||||
await Promise.all(
|
||||
Object.keys(pages).map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pathname = page.replace(/\.js$/, '');
|
||||
const launcher = launcherData.replace(
|
||||
'PATHNAME_PLACEHOLDER',
|
||||
`/${pathname.replace(/(^|\/)index$/, '')}`,
|
||||
if (isLegacy) {
|
||||
const filesAfterBuild = await glob('**', workPath);
|
||||
|
||||
console.log('preparing lambda files...');
|
||||
let buildId;
|
||||
try {
|
||||
buildId = await readFile(
|
||||
path.join(workPath, '.next', 'BUILD_ID'),
|
||||
'utf8',
|
||||
);
|
||||
} catch (err) {
|
||||
console.error(
|
||||
'BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"',
|
||||
);
|
||||
throw new Error('Missing BUILD_ID');
|
||||
}
|
||||
const dotNextRootFiles = await glob('.next/*', workPath);
|
||||
const dotNextServerRootFiles = await glob('.next/server/*', workPath);
|
||||
const nodeModules = excludeFiles(
|
||||
await glob('node_modules/**', workPath),
|
||||
file => file.startsWith('node_modules/.cache'),
|
||||
);
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
};
|
||||
const nextFiles = {
|
||||
...nodeModules,
|
||||
...dotNextRootFiles,
|
||||
...dotNextServerRootFiles,
|
||||
...launcherFiles,
|
||||
};
|
||||
if (filesAfterBuild['next.config.js']) {
|
||||
nextFiles['next.config.js'] = filesAfterBuild['next.config.js'];
|
||||
}
|
||||
const pages = await glob(
|
||||
'**/*.js',
|
||||
path.join(workPath, '.next', 'server', 'static', buildId, 'pages'),
|
||||
);
|
||||
const launcherPath = path.join(__dirname, 'legacy-launcher.js');
|
||||
const launcherData = await readFile(launcherPath, 'utf8');
|
||||
|
||||
const pageFiles = {
|
||||
[`.next/server/static/${buildId}/pages/_document.js`]: downloadedFiles[
|
||||
`.next/server/static/${buildId}/pages/_document.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_app.js`]: downloadedFiles[
|
||||
`.next/server/static/${buildId}/pages/_app.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_error.js`]: downloadedFiles[
|
||||
`.next/server/static/${buildId}/pages/_error.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/${page}`]: downloadedFiles[
|
||||
`.next/server/static/${buildId}/pages/${page}`
|
||||
],
|
||||
};
|
||||
await Promise.all(
|
||||
Object.keys(pages).map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...nextFiles,
|
||||
...pageFiles,
|
||||
'now__launcher.js': new FileBlob({ data: launcher }),
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
}),
|
||||
);
|
||||
const pathname = page.replace(/\.js$/, '');
|
||||
const launcher = launcherData.replace(
|
||||
'PATHNAME_PLACEHOLDER',
|
||||
`/${pathname.replace(/(^|\/)index$/, '')}`,
|
||||
);
|
||||
|
||||
const pageFiles = {
|
||||
[`.next/server/static/${buildId}/pages/_document.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_document.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_app.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_app.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/_error.js`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/_error.js`
|
||||
],
|
||||
[`.next/server/static/${buildId}/pages/${page}`]: filesAfterBuild[
|
||||
`.next/server/static/${buildId}/pages/${page}`
|
||||
],
|
||||
};
|
||||
|
||||
console.log(`Creating lambda for page: "${page}"...`);
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...nextFiles,
|
||||
...pageFiles,
|
||||
'now__launcher.js': new FileBlob({ data: launcher }),
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
console.log(`Created lambda for page: "${page}"`);
|
||||
}),
|
||||
);
|
||||
} else {
|
||||
console.log('preparing lambda files...');
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
'now__launcher.js': new FileFsRef({
|
||||
fsPath: path.join(__dirname, 'launcher.js'),
|
||||
}),
|
||||
};
|
||||
const pages = await glob(
|
||||
'**/*.js',
|
||||
path.join(workPath, '.next', 'serverless', 'pages'),
|
||||
);
|
||||
|
||||
const pageKeys = Object.keys(pages);
|
||||
|
||||
if (pageKeys.length === 0) {
|
||||
throw new Error(
|
||||
'No serverless pages were built. https://err.sh/zeit/now-builders/now-next-no-serverless-pages-built',
|
||||
);
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
pageKeys.map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pathname = page.replace(/\.js$/, '');
|
||||
|
||||
console.log(`Creating lambda for page: "${page}"...`);
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...launcherFiles,
|
||||
'page.js': pages[page],
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
console.log(`Created lambda for page: "${page}"`);
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
const nextStaticFiles = await glob(
|
||||
'**',
|
||||
@@ -227,28 +322,14 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
{},
|
||||
);
|
||||
|
||||
return { ...lambdas, ...staticFiles };
|
||||
};
|
||||
|
||||
exports.prepareCache = async ({ files, cachePath, workPath }) => {
|
||||
console.log('downloading user files...');
|
||||
await download(files, cachePath);
|
||||
await download(await glob('.next/**', workPath), cachePath);
|
||||
await download(await glob('node_modules/**', workPath), cachePath);
|
||||
|
||||
console.log('.next folder contents', await glob('.next/**', cachePath));
|
||||
console.log(
|
||||
'.cache folder contents',
|
||||
await glob('node_modules/.cache/**', cachePath),
|
||||
const nextStaticDirectory = onlyStaticDirectory(filesWithEntryDirectoryRoot);
|
||||
const staticDirectoryFiles = Object.keys(nextStaticDirectory).reduce(
|
||||
(mappedFiles, file) => ({
|
||||
...mappedFiles,
|
||||
[path.join(entryDirectory, file)]: nextStaticDirectory[file],
|
||||
}),
|
||||
{},
|
||||
);
|
||||
|
||||
console.log('running npm install...');
|
||||
await runNpmInstall(cachePath);
|
||||
|
||||
return {
|
||||
...(await glob('.next/records.json', cachePath)),
|
||||
...(await glob('.next/server/records.json', cachePath)),
|
||||
...(await glob('node_modules/**', cachePath)),
|
||||
...(await glob('yarn.lock', cachePath)),
|
||||
};
|
||||
return { ...lambdas, ...staticFiles, ...staticDirectoryFiles };
|
||||
};
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
const { Server } = require('http');
|
||||
const next = require('next-server');
|
||||
const url = require('url');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
const page = require('./page.js');
|
||||
|
||||
const bridge = new Bridge();
|
||||
bridge.port = 3000;
|
||||
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
const app = next({});
|
||||
|
||||
const server = new Server((req, res) => {
|
||||
const parsedUrl = url.parse(req.url, true);
|
||||
app.render(req, res, 'PATHNAME_PLACEHOLDER', parsedUrl.query, parsedUrl);
|
||||
});
|
||||
const server = new Server(page.render);
|
||||
server.listen(bridge.port);
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
|
||||
19
packages/now-next/legacy-launcher.js
Normal file
19
packages/now-next/legacy-launcher.js
Normal file
@@ -0,0 +1,19 @@
|
||||
const { Server } = require('http');
|
||||
const next = require('next-server');
|
||||
const url = require('url');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
|
||||
const bridge = new Bridge();
|
||||
bridge.port = 3000;
|
||||
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
const app = next({});
|
||||
|
||||
const server = new Server((req, res) => {
|
||||
const parsedUrl = url.parse(req.url, true);
|
||||
app.render(req, res, 'PATHNAME_PLACEHOLDER', parsedUrl.query, parsedUrl);
|
||||
});
|
||||
server.listen(bridge.port);
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
336
packages/now-next/legacy-versions.js
Normal file
336
packages/now-next/legacy-versions.js
Normal file
@@ -0,0 +1,336 @@
|
||||
module.exports = [
|
||||
'0.1.0',
|
||||
'0.1.1',
|
||||
'0.2.0',
|
||||
'0.2.1',
|
||||
'0.2.2',
|
||||
'0.2.3',
|
||||
'0.2.4',
|
||||
'0.2.5',
|
||||
'0.2.6',
|
||||
'0.2.7',
|
||||
'0.2.8',
|
||||
'0.2.9',
|
||||
'0.2.10',
|
||||
'0.2.11',
|
||||
'0.2.12',
|
||||
'0.2.13',
|
||||
'0.2.14',
|
||||
'0.3.0',
|
||||
'0.3.1',
|
||||
'0.3.2',
|
||||
'0.3.3',
|
||||
'0.4.0',
|
||||
'0.4.1',
|
||||
'0.9.9',
|
||||
'0.9.10',
|
||||
'0.9.11',
|
||||
'1.0.0',
|
||||
'1.0.1',
|
||||
'1.0.2',
|
||||
'1.1.0',
|
||||
'1.1.1',
|
||||
'1.1.2',
|
||||
'1.2.0',
|
||||
'1.2.1',
|
||||
'1.2.2',
|
||||
'1.2.3',
|
||||
'2.0.0-beta.0',
|
||||
'2.0.0-beta.1',
|
||||
'2.0.0-beta.2',
|
||||
'2.0.0-beta.3',
|
||||
'2.0.0-beta.4',
|
||||
'2.0.0-beta.5',
|
||||
'2.0.0-beta.6',
|
||||
'2.0.0-beta.7',
|
||||
'2.0.0-beta.8',
|
||||
'2.0.0-beta.9',
|
||||
'2.0.0-beta.10',
|
||||
'2.0.0-beta.11',
|
||||
'2.0.0-beta.12',
|
||||
'2.0.0-beta.13',
|
||||
'2.0.0-beta.14',
|
||||
'2.0.0-beta.15',
|
||||
'2.0.0-beta.16',
|
||||
'2.0.0-beta.17',
|
||||
'2.0.0-beta.18',
|
||||
'2.0.0-beta.19',
|
||||
'2.0.0-beta.20',
|
||||
'2.0.0-beta.21',
|
||||
'2.0.0-beta.22',
|
||||
'2.0.0-beta.23',
|
||||
'2.0.0-beta.24',
|
||||
'2.0.0-beta.25',
|
||||
'2.0.0-beta.26',
|
||||
'2.0.0-beta.27',
|
||||
'2.0.0-beta.28',
|
||||
'2.0.0-beta.29',
|
||||
'2.0.0-beta.30',
|
||||
'2.0.0-beta.31',
|
||||
'2.0.0-beta.32',
|
||||
'2.0.0-beta.33',
|
||||
'2.0.0-beta.34',
|
||||
'2.0.0-beta.35',
|
||||
'2.0.0-beta.36',
|
||||
'2.0.0-beta.37',
|
||||
'2.0.0-beta.38',
|
||||
'2.0.0-beta.39',
|
||||
'2.0.0-beta.40',
|
||||
'2.0.0-beta.41',
|
||||
'2.0.0-beta.42',
|
||||
'2.0.0',
|
||||
'2.0.1',
|
||||
'2.1.0',
|
||||
'2.1.1',
|
||||
'2.2.0',
|
||||
'2.3.0-alpha1',
|
||||
'2.3.0',
|
||||
'2.3.1',
|
||||
'2.4.0',
|
||||
'2.4.1',
|
||||
'2.4.2',
|
||||
'2.4.3',
|
||||
'2.4.4',
|
||||
'2.4.5',
|
||||
'2.4.6',
|
||||
'2.4.7',
|
||||
'2.4.8',
|
||||
'2.4.9',
|
||||
'3.0.0-beta1',
|
||||
'3.0.0-beta10',
|
||||
'3.0.0-beta11',
|
||||
'3.0.0-beta12',
|
||||
'3.0.0-beta13',
|
||||
'3.0.0-beta14',
|
||||
'3.0.0-beta15',
|
||||
'3.0.0-beta16',
|
||||
'3.0.0-beta2',
|
||||
'3.0.0-beta3',
|
||||
'3.0.0-beta4',
|
||||
'3.0.0-beta5',
|
||||
'3.0.0-beta6',
|
||||
'3.0.0-beta7',
|
||||
'3.0.0-beta8',
|
||||
'3.0.0-beta9',
|
||||
'3.0.1-beta.1',
|
||||
'3.0.1-beta.2',
|
||||
'3.0.1-beta.3',
|
||||
'3.0.1-beta.4',
|
||||
'3.0.1-beta.5',
|
||||
'3.0.1-beta.6',
|
||||
'3.0.1-beta.7',
|
||||
'3.0.1-beta.8',
|
||||
'3.0.1-beta.9',
|
||||
'3.0.1-beta.10',
|
||||
'3.0.1-beta.11',
|
||||
'3.0.1-beta.12',
|
||||
'3.0.1-beta.13',
|
||||
'3.0.1-beta.14',
|
||||
'3.0.1-beta.15',
|
||||
'3.0.1-beta.16',
|
||||
'3.0.1-beta.17',
|
||||
'3.0.1-beta.18',
|
||||
'3.0.1-beta.19',
|
||||
'3.0.1-beta.20',
|
||||
'3.0.1-beta.21',
|
||||
'3.0.1',
|
||||
'3.0.2',
|
||||
'3.0.3',
|
||||
'3.0.4',
|
||||
'3.0.5',
|
||||
'3.0.6',
|
||||
'3.1.0',
|
||||
'3.2.0',
|
||||
'3.2.1',
|
||||
'3.2.2',
|
||||
'3.2.3',
|
||||
'4.0.0-beta.1',
|
||||
'4.0.0-beta.2',
|
||||
'4.0.0-beta.3',
|
||||
'4.0.0-beta.4',
|
||||
'4.0.0-beta.5',
|
||||
'4.0.0-beta.6',
|
||||
'4.0.0',
|
||||
'4.0.1',
|
||||
'4.0.2',
|
||||
'4.0.3',
|
||||
'4.0.4',
|
||||
'4.0.5',
|
||||
'4.1.0',
|
||||
'4.1.1',
|
||||
'4.1.2',
|
||||
'4.1.3',
|
||||
'4.1.4-canary.1',
|
||||
'4.1.4-canary.2',
|
||||
'4.1.4',
|
||||
'4.2.0-canary.1',
|
||||
'4.2.0-zones.2',
|
||||
'4.2.0',
|
||||
'4.2.1',
|
||||
'4.2.2',
|
||||
'4.2.3',
|
||||
'4.3.0-canary.1',
|
||||
'4.3.0-universal-alpha.1',
|
||||
'4.3.0-universal-alpha.2',
|
||||
'4.3.0-universal-alpha.3',
|
||||
'4.3.0-universal-alpha.4',
|
||||
'4.3.0-zones.1',
|
||||
'4.4.0-canary.2',
|
||||
'4.4.0-canary.3',
|
||||
'5.0.0-universal-alpha.1',
|
||||
'5.0.0-universal-alpha.2',
|
||||
'5.0.0-universal-alpha.3',
|
||||
'5.0.0-universal-alpha.4',
|
||||
'5.0.0-universal-alpha.5',
|
||||
'5.0.0-universal-alpha.6',
|
||||
'5.0.0-universal-alpha.7',
|
||||
'5.0.0-universal-alpha.8',
|
||||
'5.0.0-universal-alpha.9',
|
||||
'5.0.0-universal-alpha.10',
|
||||
'5.0.0-universal-alpha.11',
|
||||
'5.0.0-universal-alpha.12',
|
||||
'5.0.0-universal-alpha.13',
|
||||
'5.0.0-universal-alpha.14',
|
||||
'5.0.0-universal-alpha.15',
|
||||
'5.0.0-universal-alpha.16',
|
||||
'5.0.0-universal-alpha.17',
|
||||
'5.0.0-universal-alpha.18',
|
||||
'5.0.0-universal-alpha.19',
|
||||
'5.0.0-universal-alpha.20',
|
||||
'5.0.0-universal-alpha.21',
|
||||
'5.0.0-universal-alpha.22',
|
||||
'5.0.0-universal-alpha.23',
|
||||
'5.0.0-zones.1',
|
||||
'5.0.0',
|
||||
'5.0.1-canary.1',
|
||||
'5.0.1-canary.2',
|
||||
'5.0.1-canary.3',
|
||||
'5.0.1-canary.4',
|
||||
'5.0.1-canary.5',
|
||||
'5.0.1-canary.6',
|
||||
'5.0.1-canary.7',
|
||||
'5.0.1-canary.8',
|
||||
'5.0.1-canary.9',
|
||||
'5.0.1-canary.10',
|
||||
'5.0.1-canary.11',
|
||||
'5.0.1-canary.12',
|
||||
'5.0.1-canary.13',
|
||||
'5.0.1-canary.14',
|
||||
'5.0.1-canary.15',
|
||||
'5.0.1-canary.16',
|
||||
'5.0.1-canary.17',
|
||||
'5.1.0',
|
||||
'6.0.0-canary.1',
|
||||
'6.0.0-canary.2',
|
||||
'6.0.0-canary.3',
|
||||
'6.0.0-canary.4',
|
||||
'6.0.0-canary.5',
|
||||
'6.0.0-canary.6',
|
||||
'6.0.0-canary.7',
|
||||
'6.0.0',
|
||||
'6.0.1-canary.0',
|
||||
'6.0.1-canary.1',
|
||||
'6.0.1-canary.2',
|
||||
'6.0.1',
|
||||
'6.0.2-canary.0',
|
||||
'6.0.2',
|
||||
'6.0.3-canary.0',
|
||||
'6.0.3-canary.1',
|
||||
'6.0.3',
|
||||
'6.0.4-canary.0',
|
||||
'6.0.4-canary.1',
|
||||
'6.0.4-canary.2',
|
||||
'6.0.4-canary.3',
|
||||
'6.0.4-canary.4',
|
||||
'6.0.4-canary.5',
|
||||
'6.0.4-canary.6',
|
||||
'6.0.4-canary.7',
|
||||
'6.0.4-canary.8',
|
||||
'6.0.4-canary.9',
|
||||
'6.1.0-canary.0',
|
||||
'6.1.0',
|
||||
'6.1.1-canary.0',
|
||||
'6.1.1-canary.1',
|
||||
'6.1.1-canary.2',
|
||||
'6.1.1-canary.3',
|
||||
'6.1.1-canary.4',
|
||||
'6.1.1-canary.5',
|
||||
'6.1.1',
|
||||
'6.1.2',
|
||||
'7.0.0-canary.0',
|
||||
'7.0.0-canary.1',
|
||||
'7.0.0-canary.2',
|
||||
'7.0.0-canary.3',
|
||||
'7.0.0-canary.4',
|
||||
'7.0.0-canary.5',
|
||||
'7.0.0-canary.6',
|
||||
'7.0.0-canary.7',
|
||||
'7.0.0-canary.8',
|
||||
'7.0.0-canary.9',
|
||||
'7.0.0-canary.10',
|
||||
'7.0.0-canary.11',
|
||||
'7.0.0-canary.12',
|
||||
'7.0.0-canary.13',
|
||||
'7.0.0-canary.14',
|
||||
'7.0.0-canary.15',
|
||||
'7.0.0-canary.16',
|
||||
'7.0.0-canary.18',
|
||||
'7.0.0-canary.19',
|
||||
'7.0.0-canary.20',
|
||||
'7.0.0',
|
||||
'7.0.1-canary.0',
|
||||
'7.0.1-canary.1',
|
||||
'7.0.1-canary.2',
|
||||
'7.0.1-canary.3',
|
||||
'7.0.1-canary.4',
|
||||
'7.0.1-canary.5',
|
||||
'7.0.1-canary.6',
|
||||
'7.0.1',
|
||||
'7.0.2-alpha.1',
|
||||
'7.0.2-alpha.3',
|
||||
'7.0.2-canary.5',
|
||||
'7.0.2-canary.6',
|
||||
'7.0.2-canary.7',
|
||||
'7.0.2-canary.8',
|
||||
'7.0.2-canary.9',
|
||||
'7.0.2-canary.10',
|
||||
'7.0.2-canary.11',
|
||||
'7.0.2-canary.12',
|
||||
'7.0.2-canary.13',
|
||||
'7.0.2-canary.14',
|
||||
'7.0.2-canary.15',
|
||||
'7.0.2-canary.16',
|
||||
'7.0.2-canary.17',
|
||||
'7.0.2-canary.18',
|
||||
'7.0.2-canary.19',
|
||||
'7.0.2-canary.20',
|
||||
'7.0.2-canary.21',
|
||||
'7.0.2-canary.22',
|
||||
'7.0.2-canary.23',
|
||||
'7.0.2-canary.24',
|
||||
'7.0.2-canary.25',
|
||||
'7.0.2-canary.26',
|
||||
'7.0.2-canary.27',
|
||||
'7.0.2-canary.28',
|
||||
'7.0.2-canary.29',
|
||||
'7.0.2-canary.31',
|
||||
'7.0.2-canary.33',
|
||||
'7.0.2-canary.34',
|
||||
'7.0.2-canary.35',
|
||||
'7.0.2-canary.36',
|
||||
'7.0.2-canary.37',
|
||||
'7.0.2-canary.38',
|
||||
'7.0.2-canary.39',
|
||||
'7.0.2-canary.40',
|
||||
'7.0.2-canary.41',
|
||||
'7.0.2-canary.42',
|
||||
'7.0.2-canary.43',
|
||||
'7.0.2-canary.44',
|
||||
'7.0.2-canary.45',
|
||||
'7.0.2-canary.46',
|
||||
'7.0.2-canary.47',
|
||||
'7.0.2-canary.48',
|
||||
'7.0.2-canary.49',
|
||||
'7.0.2-canary.50',
|
||||
'7.0.2',
|
||||
];
|
||||
@@ -1,10 +1,12 @@
|
||||
{
|
||||
"name": "@now/next",
|
||||
"version": "0.0.78-canary.1",
|
||||
"version": "0.0.84",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "0.1.4",
|
||||
"execa": "^1.0.0",
|
||||
"fs.promised": "^3.0.0"
|
||||
"fs.promised": "^3.0.0",
|
||||
"semver": "^5.6.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
|
||||
181
packages/now-next/utils.js
Normal file
181
packages/now-next/utils.js
Normal file
@@ -0,0 +1,181 @@
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
|
||||
/**
|
||||
* Validate if the entrypoint is allowed to be used
|
||||
* @param {string} entrypoint
|
||||
* @throws {Error}
|
||||
*/
|
||||
function validateEntrypoint(entrypoint) {
|
||||
if (
|
||||
!/package\.json$/.exec(entrypoint)
|
||||
&& !/next\.config\.js$/.exec(entrypoint)
|
||||
) {
|
||||
throw new Error(
|
||||
'Specified "src" for "@now/next" has to be "package.json" or "next.config.js"',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This callback type is called `requestCallback` and is displayed as a global symbol.
|
||||
*
|
||||
* @callback matcher
|
||||
* @param {string} filePath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Exclude certain files from the files object
|
||||
* @param {Files} files
|
||||
* @param {matcher} matcher
|
||||
* @returns {Files}
|
||||
*/
|
||||
function excludeFiles(files, matcher) {
|
||||
return Object.keys(files).reduce((newFiles, filePath) => {
|
||||
if (matcher(filePath)) {
|
||||
return newFiles;
|
||||
}
|
||||
return {
|
||||
...newFiles,
|
||||
[filePath]: files[filePath],
|
||||
};
|
||||
}, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Files object holding only the entrypoint files
|
||||
* @param {Files} files
|
||||
* @param {string} entryDirectory
|
||||
* @returns {Files}
|
||||
*/
|
||||
function includeOnlyEntryDirectory(files, entryDirectory) {
|
||||
if (entryDirectory === '.') {
|
||||
return files;
|
||||
}
|
||||
|
||||
function matcher(filePath) {
|
||||
return !filePath.startsWith(entryDirectory);
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves all files under the entry directory to the root directory
|
||||
* @param {Files} files
|
||||
* @param {string} entryDirectory
|
||||
* @returns {Files}
|
||||
*/
|
||||
function moveEntryDirectoryToRoot(files, entryDirectory) {
|
||||
if (entryDirectory === '.') {
|
||||
return files;
|
||||
}
|
||||
|
||||
function delegate(filePath) {
|
||||
return filePath.replace(new RegExp(`^${entryDirectory}/`), '');
|
||||
}
|
||||
|
||||
return rename(files, delegate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude package manager lockfiles from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function excludeLockFiles(files) {
|
||||
const newFiles = files;
|
||||
if (newFiles['package-lock.json']) {
|
||||
delete newFiles['package-lock.json'];
|
||||
}
|
||||
if (newFiles['yarn.lock']) {
|
||||
delete newFiles['yarn.lock'];
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude the static directory from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function excludeStaticDirectory(files) {
|
||||
function matcher(filePath) {
|
||||
return filePath.startsWith('static');
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude the static directory from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function onlyStaticDirectory(files) {
|
||||
function matcher(filePath) {
|
||||
return !filePath.startsWith('static');
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enforce specific package.json configuration for smallest possible lambda
|
||||
* @param {{dependencies?: any, devDependencies?: any, scripts?: any}} defaultPackageJson
|
||||
*/
|
||||
function normalizePackageJson(defaultPackageJson = {}) {
|
||||
const dependencies = {};
|
||||
const devDependencies = {
|
||||
...defaultPackageJson.dependencies,
|
||||
...defaultPackageJson.devDependencies,
|
||||
};
|
||||
|
||||
if (devDependencies.react) {
|
||||
dependencies.react = devDependencies.react;
|
||||
delete devDependencies.react;
|
||||
}
|
||||
|
||||
if (devDependencies['react-dom']) {
|
||||
dependencies['react-dom'] = devDependencies['react-dom'];
|
||||
delete devDependencies['react-dom'];
|
||||
}
|
||||
|
||||
return {
|
||||
...defaultPackageJson,
|
||||
dependencies: {
|
||||
// react and react-dom can be overwritten
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
...dependencies, // override react if user provided it
|
||||
// next-server is forced to canary
|
||||
'next-server': 'v7.0.2-canary.49',
|
||||
},
|
||||
devDependencies: {
|
||||
...devDependencies,
|
||||
// next is forced to canary
|
||||
next: 'v7.0.2-canary.49',
|
||||
// next-server is a dependency here
|
||||
'next-server': undefined,
|
||||
},
|
||||
scripts: {
|
||||
...defaultPackageJson.scripts,
|
||||
'now-build': 'NODE_OPTIONS=--max_old_space_size=3000 next build --lambdas',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
moveEntryDirectoryToRoot,
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
excludeStaticDirectory,
|
||||
onlyStaticDirectory,
|
||||
};
|
||||
@@ -1,7 +1,10 @@
|
||||
const http = require('http');
|
||||
|
||||
function normalizeEvent(event) {
|
||||
let isApiGateway = true;
|
||||
|
||||
if (event.Action === 'Invoke') {
|
||||
isApiGateway = false;
|
||||
const invokeEvent = JSON.parse(event.body);
|
||||
|
||||
const {
|
||||
@@ -21,10 +24,7 @@ function normalizeEvent(event) {
|
||||
}
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
isApiGateway, method, path, headers, body,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -33,10 +33,7 @@ function normalizeEvent(event) {
|
||||
} = event;
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
isApiGateway, method, path, headers, body,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -58,7 +55,7 @@ class Bridge {
|
||||
}
|
||||
|
||||
const {
|
||||
method, path, headers, body,
|
||||
isApiGateway, method, path, headers, body,
|
||||
} = normalizeEvent(event);
|
||||
|
||||
const opts = {
|
||||
@@ -73,20 +70,35 @@ class Bridge {
|
||||
const response = res;
|
||||
const respBodyChunks = [];
|
||||
response.on('data', chunk => respBodyChunks.push(Buffer.from(chunk)));
|
||||
response.on('error', error => reject(error));
|
||||
response.on('error', reject);
|
||||
response.on('end', () => {
|
||||
const bodyBuffer = Buffer.concat(respBodyChunks);
|
||||
delete response.headers.connection;
|
||||
delete response.headers['content-length'];
|
||||
|
||||
if (isApiGateway) {
|
||||
delete response.headers['content-length'];
|
||||
} else
|
||||
if (response.headers['content-length']) {
|
||||
response.headers['content-length'] = bodyBuffer.length;
|
||||
}
|
||||
|
||||
resolve({
|
||||
statusCode: response.statusCode,
|
||||
headers: response.headers,
|
||||
body: Buffer.concat(respBodyChunks).toString('base64'),
|
||||
body: bodyBuffer.toString('base64'),
|
||||
encoding: 'base64',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
req.on('error', (error) => {
|
||||
setTimeout(() => {
|
||||
// this lets express print the true error of why the connection was closed.
|
||||
// it is probably 'Cannot set headers after they are sent to the client'
|
||||
reject(error);
|
||||
}, 2);
|
||||
});
|
||||
|
||||
if (body) req.write(body);
|
||||
req.end();
|
||||
});
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
{
|
||||
"name": "@now/node-bridge",
|
||||
"version": "0.1.8-canary.1",
|
||||
"version": "0.1.10",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
|
||||
1
packages/now-node-server/.npmignore
Normal file
1
packages/now-node-server/.npmignore
Normal file
@@ -0,0 +1 @@
|
||||
/test
|
||||
@@ -5,31 +5,41 @@ const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const fs = require('fs-extra');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
const {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
|
||||
const fsp = {
|
||||
readFile: promisify(fs.readFile),
|
||||
};
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
||||
|
||||
async function commonForTwo({
|
||||
files, entrypoint, workPath, cachePath,
|
||||
}) {
|
||||
const xPath = workPath || cachePath;
|
||||
const preferOfflineArgument = workPath ? ['--prefer-offline'] : [];
|
||||
/**
|
||||
* @typedef {Object} BuildParamsType
|
||||
* @property {Files} files - Files object
|
||||
* @property {string} entrypoint - Entrypoint specified for the builder
|
||||
* @property {Object} config - User-passed config from now.json
|
||||
* @property {string} workPath - Working directory for this build
|
||||
*/
|
||||
|
||||
const xUserPath = path.join(xPath, 'user');
|
||||
const xNccPath = path.join(xPath, 'ncc');
|
||||
/**
|
||||
* @param {BuildParamsType} buildParams
|
||||
* @param {Object} [options]
|
||||
* @param {string[]} [options.npmArguments]
|
||||
*/
|
||||
async function downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath },
|
||||
{ npmArguments = [] } = {},
|
||||
) {
|
||||
const userPath = path.join(workPath, 'user');
|
||||
const nccPath = path.join(workPath, 'ncc');
|
||||
|
||||
console.log('downloading user files...');
|
||||
const filesOnDisk = await download(files, xUserPath);
|
||||
const downloadedFiles = await download(files, userPath);
|
||||
|
||||
console.log('running npm install for user...');
|
||||
const entrypointFsDirname = path.join(xUserPath, path.dirname(entrypoint));
|
||||
await runNpmInstall(entrypointFsDirname, preferOfflineArgument);
|
||||
console.log('installing dependencies for user\'s code...');
|
||||
const entrypointFsDirname = path.join(userPath, path.dirname(entrypoint));
|
||||
await runNpmInstall(entrypointFsDirname, npmArguments);
|
||||
|
||||
console.log('writing ncc package.json...');
|
||||
await download(
|
||||
@@ -37,48 +47,78 @@ async function commonForTwo({
|
||||
'package.json': new FileBlob({
|
||||
data: JSON.stringify({
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.1.3-webpack',
|
||||
'@zeit/ncc': '0.11.0',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
},
|
||||
xNccPath,
|
||||
nccPath,
|
||||
);
|
||||
|
||||
console.log('running npm install for ncc...');
|
||||
await runNpmInstall(xNccPath, preferOfflineArgument);
|
||||
return [filesOnDisk, xNccPath, entrypointFsDirname];
|
||||
console.log('installing dependencies for ncc...');
|
||||
await runNpmInstall(nccPath, npmArguments);
|
||||
return [downloadedFiles, userPath, nccPath, entrypointFsDirname];
|
||||
}
|
||||
|
||||
async function compile(workNccPath, input) {
|
||||
async function compile(workNccPath, downloadedFiles, entrypoint) {
|
||||
const input = downloadedFiles[entrypoint].fsPath;
|
||||
const ncc = require(path.join(workNccPath, 'node_modules/@zeit/ncc'));
|
||||
return ncc(input);
|
||||
const { code, assets } = await ncc(input);
|
||||
|
||||
const preparedFiles = {};
|
||||
const blob = new FileBlob({ data: code });
|
||||
// move all user code to 'user' subdirectory
|
||||
preparedFiles[path.join('user', entrypoint)] = blob;
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const assetName of Object.keys(assets)) {
|
||||
const { source: data, permissions: mode } = assets[assetName];
|
||||
const blob2 = new FileBlob({ data, mode });
|
||||
preparedFiles[
|
||||
path.join('user', path.dirname(entrypoint), assetName)
|
||||
] = blob2;
|
||||
}
|
||||
|
||||
return preparedFiles;
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '15mb',
|
||||
};
|
||||
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
const [filesOnDisk, workNccPath, entrypointFsDirname] = await commonForTwo({
|
||||
files,
|
||||
entrypoint,
|
||||
workPath,
|
||||
});
|
||||
/**
|
||||
* @param {BuildParamsType} buildParams
|
||||
* @returns {Promise<Files>}
|
||||
*/
|
||||
exports.build = async ({
|
||||
files, entrypoint, config, workPath,
|
||||
}) => {
|
||||
const [
|
||||
downloadedFiles,
|
||||
workUserPath,
|
||||
workNccPath,
|
||||
entrypointFsDirname,
|
||||
] = await downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath },
|
||||
{ npmArguments: ['--prefer-offline'] },
|
||||
);
|
||||
|
||||
console.log('running user script...');
|
||||
await runPackageJsonScript(entrypointFsDirname, 'now-build');
|
||||
|
||||
console.log('compiling entrypoint with ncc...');
|
||||
const data = await compile(workNccPath, filesOnDisk[entrypoint].fsPath);
|
||||
const blob = new FileBlob({ data });
|
||||
|
||||
console.log('preparing lambda files...');
|
||||
// move all user code to 'user' subdirectory
|
||||
const compiledFiles = { [path.join('user', entrypoint)]: blob };
|
||||
const launcherPath = path.join(__dirname, 'launcher.js');
|
||||
let launcherData = await fsp.readFile(launcherPath, 'utf8');
|
||||
let preparedFiles;
|
||||
|
||||
if (config && config.bundle === false) {
|
||||
// move all user code to 'user' subdirectory
|
||||
preparedFiles = await glob('**', workUserPath);
|
||||
preparedFiles = rename(preparedFiles, name => path.join('user', name));
|
||||
} else {
|
||||
console.log('compiling entrypoint with ncc...');
|
||||
preparedFiles = await compile(workNccPath, downloadedFiles, entrypoint);
|
||||
}
|
||||
|
||||
const launcherPath = path.join(__dirname, 'launcher.js');
|
||||
let launcherData = await fs.readFile(launcherPath, 'utf8');
|
||||
launcherData = launcherData.replace(
|
||||
'// PLACEHOLDER',
|
||||
[
|
||||
@@ -93,7 +133,7 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
};
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: { ...compiledFiles, ...launcherFiles },
|
||||
files: { ...preparedFiles, ...launcherFiles },
|
||||
handler: 'launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
@@ -105,7 +145,7 @@ exports.prepareCache = async ({
|
||||
files, entrypoint, workPath, cachePath,
|
||||
}) => {
|
||||
await fs.remove(workPath);
|
||||
await commonForTwo({ files, entrypoint, cachePath });
|
||||
await downloadInstallAndBundle({ files, entrypoint, workPath: cachePath });
|
||||
|
||||
return {
|
||||
...(await glob('user/node_modules/**', cachePath)),
|
||||
|
||||
@@ -12,7 +12,10 @@ Server.prototype.listen = function listen(...args) {
|
||||
};
|
||||
|
||||
try {
|
||||
process.env.NODE_ENV = 'production';
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'production';
|
||||
}
|
||||
|
||||
// PLACEHOLDER
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
{
|
||||
"name": "@now/node-server",
|
||||
"version": "0.4.23-canary.3",
|
||||
"version": "0.4.26",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^0.1.8-canary.1",
|
||||
"@now/node-bridge": "^0.1.10",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
}
|
||||
}
|
||||
|
||||
11
packages/now-node-server/test/fixtures/01-cowsay/index.js
vendored
Normal file
11
packages/now-node-server/test/fixtures/01-cowsay/index.js
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
const cowsay = require('cowsay').say;
|
||||
const http = require('http');
|
||||
|
||||
// test that process.env is not replaced by webpack
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end(cowsay({ text: 'cow:RANDOMNESS_PLACEHOLDER' }));
|
||||
});
|
||||
|
||||
server.listen();
|
||||
11
packages/now-node-server/test/fixtures/01-cowsay/now.json
vendored
Normal file
11
packages/now-node-server/test/fixtures/01-cowsay/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.js", "use": "@now/node-server" },
|
||||
{ "src": "subdirectory/index.js", "use": "@now/node-server" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/subdirectory/", "mustContain": "yoda:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
5
packages/now-node-server/test/fixtures/01-cowsay/package.json
vendored
Normal file
5
packages/now-node-server/test/fixtures/01-cowsay/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"cowsay": "1.3.1"
|
||||
}
|
||||
}
|
||||
11
packages/now-node-server/test/fixtures/01-cowsay/subdirectory/index.js
vendored
Normal file
11
packages/now-node-server/test/fixtures/01-cowsay/subdirectory/index.js
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
const yodasay = require('yodasay').say;
|
||||
const http = require('http');
|
||||
|
||||
// test that process.env is not replaced by webpack
|
||||
process.env.NODE_ENV = 'development';
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end(yodasay({ text: 'yoda:RANDOMNESS_PLACEHOLDER' }));
|
||||
});
|
||||
|
||||
server.listen();
|
||||
5
packages/now-node-server/test/fixtures/01-cowsay/subdirectory/package.json
vendored
Normal file
5
packages/now-node-server/test/fixtures/01-cowsay/subdirectory/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"yodasay": "1.1.9"
|
||||
}
|
||||
}
|
||||
10
packages/now-node-server/test/fixtures/03-env-vars/build-env/index.js
vendored
Normal file
10
packages/now-node-server/test/fixtures/03-env-vars/build-env/index.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
const assert = require('assert');
|
||||
const http = require('http');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
assert(!process.env.RANDOMNESS_BUILD_ENV_VAR);
|
||||
assert(process.env.RANDOMNESS_ENV_VAR);
|
||||
resp.end('BUILD_TIME_PLACEHOLDER:build-env');
|
||||
});
|
||||
|
||||
server.listen();
|
||||
12
packages/now-node-server/test/fixtures/03-env-vars/build-env/now-build.js
vendored
Normal file
12
packages/now-node-server/test/fixtures/03-env-vars/build-env/now-build.js
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
const assert = require('assert');
|
||||
const fs = require('fs');
|
||||
|
||||
assert(process.env.RANDOMNESS_BUILD_ENV_VAR);
|
||||
assert(!process.env.RANDOMNESS_ENV_VAR);
|
||||
|
||||
fs.writeFileSync(
|
||||
'index.js',
|
||||
fs
|
||||
.readFileSync('index.js', 'utf8')
|
||||
.replace('BUILD_TIME_PLACEHOLDER', process.env.RANDOMNESS_BUILD_ENV_VAR),
|
||||
);
|
||||
5
packages/now-node-server/test/fixtures/03-env-vars/build-env/package.json
vendored
Normal file
5
packages/now-node-server/test/fixtures/03-env-vars/build-env/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "node now-build.js"
|
||||
}
|
||||
}
|
||||
10
packages/now-node-server/test/fixtures/03-env-vars/env/index.js
vendored
Normal file
10
packages/now-node-server/test/fixtures/03-env-vars/env/index.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
const assert = require('assert');
|
||||
const http = require('http');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
assert(!process.env.RANDOMNESS_BUILD_ENV_VAR);
|
||||
assert(process.env.RANDOMNESS_ENV_VAR);
|
||||
resp.end(`${process.env.RANDOMNESS_ENV_VAR}:env`);
|
||||
});
|
||||
|
||||
server.listen();
|
||||
11
packages/now-node-server/test/fixtures/03-env-vars/now.json
vendored
Normal file
11
packages/now-node-server/test/fixtures/03-env-vars/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "build-env/index.js", "use": "@now/node-server" },
|
||||
{ "src": "env/index.js", "use": "@now/node-server" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/build-env", "mustContain": "RANDOMNESS_PLACEHOLDER:build-env" },
|
||||
{ "path": "/env", "mustContain": "RANDOMNESS_PLACEHOLDER:env" }
|
||||
]
|
||||
}
|
||||
7
packages/now-node-server/test/fixtures/06-content-type/index.js
vendored
Normal file
7
packages/now-node-server/test/fixtures/06-content-type/index.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
const http = require('http');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end('RANDOMNESS_PLACEHOLDER');
|
||||
});
|
||||
|
||||
server.listen();
|
||||
6
packages/now-node-server/test/fixtures/06-content-type/now.json
vendored
Normal file
6
packages/now-node-server/test/fixtures/06-content-type/now.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.js", "use": "@now/node-server" }
|
||||
]
|
||||
}
|
||||
7
packages/now-node-server/test/fixtures/06-content-type/probe.js
vendored
Normal file
7
packages/now-node-server/test/fixtures/06-content-type/probe.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
const assert = require('assert');
|
||||
|
||||
module.exports = async ({ deploymentUrl, fetch, randomness }) => {
|
||||
const resp = await fetch(`https://${deploymentUrl}/`);
|
||||
assert.equal(resp.headers.get('content-type'), null);
|
||||
assert.equal(await resp.text(), randomness);
|
||||
};
|
||||
15
packages/now-node-server/test/fixtures/08-assets/index.js
vendored
Normal file
15
packages/now-node-server/test/fixtures/08-assets/index.js
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
const fs = require('fs');
|
||||
const http = require('http');
|
||||
const path = require('path');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
const asset1 = fs.readFileSync(
|
||||
path.join(__dirname, 'subdirectory1/asset.txt'),
|
||||
);
|
||||
const asset2 = fs.readFileSync(
|
||||
path.join(__dirname, 'subdirectory2/asset.txt'),
|
||||
);
|
||||
resp.end(`${asset1},${asset2}`);
|
||||
});
|
||||
|
||||
server.listen();
|
||||
9
packages/now-node-server/test/fixtures/08-assets/now.json
vendored
Normal file
9
packages/now-node-server/test/fixtures/08-assets/now.json
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.js", "use": "@now/node-server" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "asset1:RANDOMNESS_PLACEHOLDER,asset2:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
1
packages/now-node-server/test/fixtures/08-assets/subdirectory1/asset.txt
vendored
Normal file
1
packages/now-node-server/test/fixtures/08-assets/subdirectory1/asset.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
asset1:RANDOMNESS_PLACEHOLDER
|
||||
1
packages/now-node-server/test/fixtures/08-assets/subdirectory2/asset.txt
vendored
Normal file
1
packages/now-node-server/test/fixtures/08-assets/subdirectory2/asset.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
asset2:RANDOMNESS_PLACEHOLDER
|
||||
11
packages/now-node-server/test/fixtures/09-no-bundle/now.json
vendored
Normal file
11
packages/now-node-server/test/fixtures/09-no-bundle/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "with-bundle/index.js", "use": "@now/node-server" },
|
||||
{ "src": "without-bundle/index.js", "use": "@now/node-server", "config": { "bundle": false } }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/with-bundle", "mustContain": "RANDOMNESS_PLACEHOLDER:with-bundle" },
|
||||
{ "path": "/without-bundle", "mustContain": "RANDOMNESS_PLACEHOLDER:without-bundle" }
|
||||
]
|
||||
}
|
||||
8
packages/now-node-server/test/fixtures/09-no-bundle/with-bundle/index.js
vendored
Normal file
8
packages/now-node-server/test/fixtures/09-no-bundle/with-bundle/index.js
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
const http = require('http');
|
||||
const isBundled = require('./is-bundled.js');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end(isBundled() ? 'RANDOMNESS_PLACEHOLDER:with-bundle' : 'WITHOUT-BUNDLE-THAT-IS-WRONG');
|
||||
});
|
||||
|
||||
server.listen();
|
||||
4
packages/now-node-server/test/fixtures/09-no-bundle/with-bundle/is-bundled.js
vendored
Normal file
4
packages/now-node-server/test/fixtures/09-no-bundle/with-bundle/is-bundled.js
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
const path = require('path');
|
||||
|
||||
// eslint-disable-next-line no-eval
|
||||
module.exports = () => path.basename(eval('__filename')) === 'index.js';
|
||||
8
packages/now-node-server/test/fixtures/09-no-bundle/without-bundle/index.js
vendored
Normal file
8
packages/now-node-server/test/fixtures/09-no-bundle/without-bundle/index.js
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
const http = require('http');
|
||||
const isBundled = require('./is-bundled.js');
|
||||
|
||||
const server = http.createServer((req, resp) => {
|
||||
resp.end(isBundled() ? 'WITH-BUNDLE-THAT-IS-WRONG' : 'RANDOMNESS_PLACEHOLDER:without-bundle');
|
||||
});
|
||||
|
||||
server.listen();
|
||||
4
packages/now-node-server/test/fixtures/09-no-bundle/without-bundle/is-bundled.js
vendored
Normal file
4
packages/now-node-server/test/fixtures/09-no-bundle/without-bundle/is-bundled.js
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
const path = require('path');
|
||||
|
||||
// eslint-disable-next-line no-eval
|
||||
module.exports = () => path.basename(eval('__filename')) === 'index.js';
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user