mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
393 Commits
@now/lambd
...
@now/pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
99410a0c06 | ||
|
|
9df2e3a62d | ||
|
|
3cc786412d | ||
|
|
973229e02e | ||
|
|
1493b0657b | ||
|
|
66df087faa | ||
|
|
4401799eb3 | ||
|
|
313cad8e20 | ||
|
|
3a51773693 | ||
|
|
ddab628034 | ||
|
|
2c10cdcbca | ||
|
|
c30eac53f1 | ||
|
|
2d3e32e95b | ||
|
|
bd8d41cadc | ||
|
|
0a6e7d8e23 | ||
|
|
e0a8cb5011 | ||
|
|
bcdc27139f | ||
|
|
8cfaef7e6c | ||
|
|
79c096b80e | ||
|
|
2cacb95c7d | ||
|
|
0b54093ca2 | ||
|
|
8c7371e093 | ||
|
|
e59c0f59f5 | ||
|
|
21b115a788 | ||
|
|
c5df4f7f9e | ||
|
|
2429451115 | ||
|
|
21789cdbf1 | ||
|
|
f74ed2aff1 | ||
|
|
2f83e6375a | ||
|
|
99a7b8f1f7 | ||
|
|
3fa7b80bde | ||
|
|
d012e73166 | ||
|
|
d6c46da37f | ||
|
|
1508932ad6 | ||
|
|
873b099f53 | ||
|
|
6546d3b67d | ||
|
|
34ad4ac33a | ||
|
|
01d0b017af | ||
|
|
db88ad4b32 | ||
|
|
4b846c3c88 | ||
|
|
2731435e3b | ||
|
|
f18e1a6bd4 | ||
|
|
167d7bedec | ||
|
|
cd0b1d61d1 | ||
|
|
86dc3708d2 | ||
|
|
1e79eae029 | ||
|
|
d0c954210a | ||
|
|
67c3481779 | ||
|
|
67473afd5b | ||
|
|
3d52610acf | ||
|
|
729ba010c4 | ||
|
|
a470e563dc | ||
|
|
15f674b8b7 | ||
|
|
1e4e6b68e0 | ||
|
|
2332100cff | ||
|
|
9cb6f500b0 | ||
|
|
79a2bfde35 | ||
|
|
86a659e5c5 | ||
|
|
0f00110db7 | ||
|
|
03ca6975ed | ||
|
|
001813c529 | ||
|
|
570ef4824b | ||
|
|
ae9a43a0bd | ||
|
|
ffaa5eaa17 | ||
|
|
7f41a23a43 | ||
|
|
e80a1a5340 | ||
|
|
7eadaf889a | ||
|
|
0823cc1005 | ||
|
|
11be2bf349 | ||
|
|
e8f31aebeb | ||
|
|
162d27a38f | ||
|
|
355c007dbb | ||
|
|
f946463cab | ||
|
|
a1e768ecc1 | ||
|
|
539f9135dd | ||
|
|
abd3d84d4c | ||
|
|
0274893d31 | ||
|
|
0ec767e0d0 | ||
|
|
e6296fa06b | ||
|
|
35def42263 | ||
|
|
08641ab804 | ||
|
|
e7f87ceba7 | ||
|
|
e9632699e1 | ||
|
|
a9427bbe76 | ||
|
|
cd700b01cc | ||
|
|
12244133f8 | ||
|
|
55acea648c | ||
|
|
48a6b77fed | ||
|
|
b0e20c043c | ||
|
|
88ceaf12d0 | ||
|
|
0f94a523a6 | ||
|
|
0b3a01d07b | ||
|
|
c03d9479e1 | ||
|
|
8d987243c6 | ||
|
|
23d877de75 | ||
|
|
8855813697 | ||
|
|
5ca59332ed | ||
|
|
860a678b74 | ||
|
|
ecb3fd25da | ||
|
|
5cb95bd226 | ||
|
|
47bc8c04ad | ||
|
|
2879041a65 | ||
|
|
2e7e403725 | ||
|
|
30eede64ec | ||
|
|
70f3d6f7de | ||
|
|
e1f7191f67 | ||
|
|
a8f4b67aa2 | ||
|
|
d64a53cbb1 | ||
|
|
4300882d12 | ||
|
|
0711e094f3 | ||
|
|
4ec1883262 | ||
|
|
cb53db4157 | ||
|
|
49aea85638 | ||
|
|
3971b5a8cf | ||
|
|
958946d01a | ||
|
|
f3d284476f | ||
|
|
6076b12067 | ||
|
|
5c7eba6049 | ||
|
|
3ed99a5b25 | ||
|
|
f8b08926f2 | ||
|
|
354e80b3e3 | ||
|
|
0cfb4a8466 | ||
|
|
8facd2845f | ||
|
|
9dae464fa6 | ||
|
|
6ff4b25d79 | ||
|
|
6766e9a099 | ||
|
|
d327426c93 | ||
|
|
47f73c856f | ||
|
|
9fdd247773 | ||
|
|
fe59cabf15 | ||
|
|
da37a9bc06 | ||
|
|
bad779be1f | ||
|
|
5eb5deb8eb | ||
|
|
f9afee7dba | ||
|
|
547e9ed684 | ||
|
|
68d2927cbf | ||
|
|
ca7f716432 | ||
|
|
9d15c35623 | ||
|
|
a55ce5da8f | ||
|
|
f0a06b797e | ||
|
|
0a77f43832 | ||
|
|
4ce0d31688 | ||
|
|
280802615f | ||
|
|
c7db281065 | ||
|
|
ca00739041 | ||
|
|
11f8b17599 | ||
|
|
970ab7d5c5 | ||
|
|
71c082dccd | ||
|
|
76a185eb90 | ||
|
|
8c0f3d107d | ||
|
|
5bfbd63e13 | ||
|
|
f7bc3b3490 | ||
|
|
d83b09ffbd | ||
|
|
0c120f6202 | ||
|
|
48a01415f8 | ||
|
|
9198b72382 | ||
|
|
79048b83de | ||
|
|
f798e2cf19 | ||
|
|
be5057a738 | ||
|
|
059d44fde7 | ||
|
|
11ad481546 | ||
|
|
4061ed2eb7 | ||
|
|
b54c79e619 | ||
|
|
2203ae1a20 | ||
|
|
5e58c1738b | ||
|
|
290577ddfb | ||
|
|
a5e651403e | ||
|
|
df2769717b | ||
|
|
880ef77b7b | ||
|
|
af2616c283 | ||
|
|
4300f4d797 | ||
|
|
4921e541af | ||
|
|
6ce00eda8c | ||
|
|
060e952f6c | ||
|
|
1639127b24 | ||
|
|
ebbb62eaea | ||
|
|
66954e84fe | ||
|
|
15a21bb28c | ||
|
|
96ca1e1d8c | ||
|
|
587cb52191 | ||
|
|
95422ffd46 | ||
|
|
391a883799 | ||
|
|
43d6960df4 | ||
|
|
5c128003d8 | ||
|
|
2f8fd1b14b | ||
|
|
625553c146 | ||
|
|
3b0ce7bad3 | ||
|
|
489ec1dfa5 | ||
|
|
d3f92d7143 | ||
|
|
3072b044ef | ||
|
|
3b4968657f | ||
|
|
cafae4c800 | ||
|
|
64952d24f1 | ||
|
|
72758b6e0d | ||
|
|
4f80bc74d5 | ||
|
|
a6e62ed61c | ||
|
|
d8eecd6172 | ||
|
|
0e70608511 | ||
|
|
da0de150df | ||
|
|
a58c35fb9e | ||
|
|
fe88a69ab7 | ||
|
|
9767682006 | ||
|
|
3285b31721 | ||
|
|
70353c7fc0 | ||
|
|
f85cf99325 | ||
|
|
8b14a46d04 | ||
|
|
383cbfd82f | ||
|
|
81e268a3c9 | ||
|
|
ac8b33213b | ||
|
|
de12e7b8c8 | ||
|
|
b9346603f0 | ||
|
|
0b793dfc35 | ||
|
|
9dd672c383 | ||
|
|
1b743aeea8 | ||
|
|
d4af4b9f5c | ||
|
|
b734ca3e01 | ||
|
|
f81d753104 | ||
|
|
db31b9a207 | ||
|
|
b80b5182e6 | ||
|
|
268a7c2b81 | ||
|
|
667a16c996 | ||
|
|
7b851f81c0 | ||
|
|
80fbbcd194 | ||
|
|
3108332043 | ||
|
|
7509c82c32 | ||
|
|
c4f5a5b48d | ||
|
|
05314da810 | ||
|
|
5f1cf714c1 | ||
|
|
2623e2e799 | ||
|
|
bac1da09d4 | ||
|
|
5b57f1a3ac | ||
|
|
2e95dd5329 | ||
|
|
215f6367d6 | ||
|
|
e8cd348a79 | ||
|
|
168f373641 | ||
|
|
8c3174be29 | ||
|
|
898de78b63 | ||
|
|
26e33c1c4b | ||
|
|
c2f95de3ec | ||
|
|
6a7de860db | ||
|
|
acb8cadafe | ||
|
|
1a8df7080d | ||
|
|
5a92826eb0 | ||
|
|
e083aa3750 | ||
|
|
941f675657 | ||
|
|
6fad726abb | ||
|
|
dd22051d6b | ||
|
|
7e86cb403f | ||
|
|
d19d557738 | ||
|
|
e4281f698c | ||
|
|
86ff681c6d | ||
|
|
ba97a7cf19 | ||
|
|
0a94397700 | ||
|
|
5c8e2f2ccc | ||
|
|
35d56a34cb | ||
|
|
9dfd37e135 | ||
|
|
6f815f2645 | ||
|
|
e186f89cfd | ||
|
|
50cade8bba | ||
|
|
13866e61f6 | ||
|
|
b72f902271 | ||
|
|
159cfe99dd | ||
|
|
1d9a96d104 | ||
|
|
245f846d3e | ||
|
|
c5ef7f3f35 | ||
|
|
ccba15a5aa | ||
|
|
f49aefa8e4 | ||
|
|
d6b36df4ce | ||
|
|
3e4dd10a79 | ||
|
|
73956706bd | ||
|
|
bd8da5360d | ||
|
|
6d5a2a4438 | ||
|
|
c88dc78e33 | ||
|
|
63ac11e9f7 | ||
|
|
1840632729 | ||
|
|
00d8eb0f65 | ||
|
|
3db58ac373 | ||
|
|
92a1720eea | ||
|
|
9abbfbe3f3 | ||
|
|
11ef8aa816 | ||
|
|
3a122ea950 | ||
|
|
737e50630a | ||
|
|
fb27b7b9be | ||
|
|
d1a4aecd2f | ||
|
|
5ef7014ed8 | ||
|
|
0ff2c9950e | ||
|
|
ddcdcdf3e2 | ||
|
|
bfc99f19d2 | ||
|
|
de2c08cfe8 | ||
|
|
9679f07124 | ||
|
|
6ce24d6a4e | ||
|
|
e3e029f5f6 | ||
|
|
89172a6e89 | ||
|
|
e8f1dbaa46 | ||
|
|
16b5b6fdf3 | ||
|
|
3bab29ff76 | ||
|
|
d675d2e668 | ||
|
|
2dda88e676 | ||
|
|
5a0090eb1f | ||
|
|
d438b4ec4e | ||
|
|
f8810fd7e6 | ||
|
|
a642cfea96 | ||
|
|
2daa20a9f2 | ||
|
|
4d5c0c40f0 | ||
|
|
29051681df | ||
|
|
96d5e81538 | ||
|
|
9ba9dd6949 | ||
|
|
b362d57270 | ||
|
|
4ff95e1718 | ||
|
|
ef02bedd4d | ||
|
|
ed68a09c3e | ||
|
|
ac7ae5fc5d | ||
|
|
9727b1f020 | ||
|
|
2dc454f15f | ||
|
|
4463af5c7a | ||
|
|
c00fb37cf6 | ||
|
|
4deb426f9c | ||
|
|
008b04413a | ||
|
|
f177ba46e9 | ||
|
|
c030fce589 | ||
|
|
50a5150bb5 | ||
|
|
0578ccf47e | ||
|
|
e32cd36ded | ||
|
|
6ac0ab121c | ||
|
|
05db2e6a73 | ||
|
|
0b89d30d6c | ||
|
|
8a021c9417 | ||
|
|
f218771382 | ||
|
|
17309291ed | ||
|
|
86300577ae | ||
|
|
f9594e0d61 | ||
|
|
20fd4b2e12 | ||
|
|
718e4d0e0c | ||
|
|
dc3584cd08 | ||
|
|
b41788b241 | ||
|
|
af9a2f9792 | ||
|
|
f8b8e760de | ||
|
|
93d6ec8024 | ||
|
|
7ed6b84056 | ||
|
|
31da488365 | ||
|
|
8eaf05f782 | ||
|
|
9311e90f27 | ||
|
|
c0de970de2 | ||
|
|
465ac2093d | ||
|
|
19ab0e8698 | ||
|
|
02fa98e5e3 | ||
|
|
4aef9d48b0 | ||
|
|
bd2d05344e | ||
|
|
edc7696623 | ||
|
|
e2f91094bc | ||
|
|
38dba57378 | ||
|
|
be6a6ba1d7 | ||
|
|
31fb5d9ec8 | ||
|
|
6c8f946a48 | ||
|
|
d59e1b9789 | ||
|
|
2852d3fbc3 | ||
|
|
d0292eb751 | ||
|
|
17bbf69346 | ||
|
|
4fb4229c90 | ||
|
|
03b7586b50 | ||
|
|
a1427866ca | ||
|
|
5f787b8146 | ||
|
|
b03405a665 | ||
|
|
4393dad15a | ||
|
|
b4d604b2e9 | ||
|
|
5fb6e5c0ba | ||
|
|
9d7dd3a713 | ||
|
|
4f867b320d | ||
|
|
c153690104 | ||
|
|
8c1b96edf7 | ||
|
|
15c83a69f7 | ||
|
|
0986de85ee | ||
|
|
94c5d83ccc | ||
|
|
ff49b9d32d | ||
|
|
ec5290dab1 | ||
|
|
4f758ec84e | ||
|
|
7951be156a | ||
|
|
1bafc1d7b7 | ||
|
|
1493101325 | ||
|
|
824b044a96 | ||
|
|
0978be4c3d | ||
|
|
dc832aa6c3 | ||
|
|
8df77fe4fa | ||
|
|
ff413b45fa | ||
|
|
e7befb5dc1 | ||
|
|
b898f82771 | ||
|
|
e6b22cb0df | ||
|
|
cbfe4a133d | ||
|
|
823b78c626 | ||
|
|
03e1255043 | ||
|
|
3373cbca4e | ||
|
|
4fba4b5f67 | ||
|
|
9fcf6da3c1 |
@@ -29,14 +29,8 @@ jobs:
|
|||||||
- run:
|
- run:
|
||||||
name: Tests and Coverage
|
name: Tests and Coverage
|
||||||
command: yarn test-coverage
|
command: yarn test-coverage
|
||||||
- run:
|
|
||||||
name: Potentially save npm token
|
|
||||||
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
|
||||||
- run:
|
|
||||||
name: Potentially publish releases to npm
|
|
||||||
command: ./.circleci/publish.sh
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
build-and-deploy:
|
build-and-test:
|
||||||
jobs:
|
jobs:
|
||||||
- build
|
- build
|
||||||
|
|||||||
@@ -1,6 +1,13 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [ -z "$NPM_TOKEN" ]; then
|
||||||
|
echo "NPM_TOKEN not found. Did you forget to assign the GitHub Action secret?"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||||
|
|
||||||
if [ ! -e ~/.npmrc ]; then
|
if [ ! -e ~/.npmrc ]; then
|
||||||
echo "~/.npmrc file does not exist, skipping publish"
|
echo "~/.npmrc file does not exist, skipping publish"
|
||||||
exit 0
|
exit 0
|
||||||
@@ -21,4 +28,7 @@ else
|
|||||||
echo "Publishing stable release"
|
echo "Publishing stable release"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Sometimes this is a false alarm and blocks publish
|
||||||
|
git checkout yarn.lock
|
||||||
|
|
||||||
yarn run lerna publish from-git $npm_tag --yes
|
yarn run lerna publish from-git $npm_tag --yes
|
||||||
|
|||||||
@@ -1,30 +1,41 @@
|
|||||||
root = true
|
root = true
|
||||||
|
|
||||||
[*]
|
[*]
|
||||||
indent_style = tab
|
|
||||||
indent_size = 4
|
|
||||||
tab_width = 4
|
|
||||||
end_of_line = lf
|
end_of_line = lf
|
||||||
charset = utf-8
|
charset = utf-8
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
insert_final_newline = true
|
insert_final_newline = true
|
||||||
|
|
||||||
[{*.json,*.json.example,*.gyp,*.yml}]
|
[{*.json,*.json.example,*.gyp,*.yml,*.yaml,*.workflow}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
[*.py]
|
[{*.py,*.asm}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
|
|
||||||
|
[*.py]
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.asm]
|
||||||
|
indent_size = 8
|
||||||
|
|
||||||
[*.md]
|
[*.md]
|
||||||
trim_trailing_whitespace = false
|
trim_trailing_whitespace = false
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
# Ideal settings - some plugins might support these.
|
# Ideal settings - some plugins might support these
|
||||||
[*.js]
|
[*.js,*.jsx,*.ts,*.tsx]
|
||||||
quote_type = single
|
quote_type = single
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}]
|
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.java,*.go,*.rs,*.php,*.ng,*.d,*.cs,*.swift}]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
tab_width = 4
|
||||||
|
|
||||||
|
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.tsx,*.d,*.cs,*.swift}]
|
||||||
curly_bracket_next_line = false
|
curly_bracket_next_line = false
|
||||||
spaces_around_operators = true
|
spaces_around_operators = true
|
||||||
spaces_around_brackets = outside
|
spaces_around_brackets = outside
|
||||||
@@ -3,8 +3,14 @@
|
|||||||
/**/node_modules/*
|
/**/node_modules/*
|
||||||
/packages/now-go/go/*
|
/packages/now-go/go/*
|
||||||
/packages/now-build-utils/dist/*
|
/packages/now-build-utils/dist/*
|
||||||
|
/packages/now-build-utils/src/*.js
|
||||||
|
/packages/now-build-utils/src/fs/*.js
|
||||||
/packages/now-node/dist/*
|
/packages/now-node/dist/*
|
||||||
/packages/now-next/dist/*
|
/packages/now-next/dist/*
|
||||||
/packages/now-node-bridge/*
|
/packages/now-node-bridge/*
|
||||||
/packages/now-python/*
|
/packages/now-python/dist/*
|
||||||
/packages/now-optipng/dist/*
|
/packages/now-go/*
|
||||||
|
/packages/now-ruby/dist/*
|
||||||
|
/packages/now-static-build/dist/*
|
||||||
|
/packages/now-static-build/test/fixtures/**
|
||||||
|
/packages/now-routing-utils/dist/*
|
||||||
|
|||||||
13
.github/CODEOWNERS
vendored
Normal file
13
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
# Documentation
|
||||||
|
# https://help.github.com/en/articles/about-code-owners
|
||||||
|
|
||||||
|
* @styfle
|
||||||
|
/packages/now-build-utils @styfle @AndyBitz
|
||||||
|
/packages/now-node @styfle @tootallnate @lucleray
|
||||||
|
/packages/now-node-bridge @styfle @tootallnate @lucleray
|
||||||
|
/packages/now-next @timer
|
||||||
|
/packages/now-go @styfle @sophearak
|
||||||
|
/packages/now-python @styfle @sophearak
|
||||||
|
/packages/now-ruby @styfle @coetry @nathancahill
|
||||||
|
/packages/now-static-build @styfle @AndyBitz
|
||||||
|
/packages/now-routing-utils @dav-is
|
||||||
76
.github/main.workflow
vendored
Normal file
76
.github/main.workflow
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
workflow "Canary publish" {
|
||||||
|
on = "push"
|
||||||
|
resolves = ["3. Canary yarn run publish"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Canary filter" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
args = "branch canary"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Canary PR not deleted" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
needs = ["0. Canary filter"]
|
||||||
|
args = "not deleted"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "1. Canary yarn install" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["0. Canary PR not deleted"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "--pure-lockfile install"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "2. Canary yarn run build" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["1. Canary yarn install"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "--pure-lockfile run build"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "3. Canary yarn run publish" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["2. Canary yarn run build"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "--pure-lockfile run publish-from-github"
|
||||||
|
secrets = ["NPM_TOKEN"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
workflow "Master publish" {
|
||||||
|
on = "push"
|
||||||
|
resolves = ["3. Master yarn run publish"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Master filter" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
args = "branch master"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Master PR not deleted" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
needs = ["0. Master filter"]
|
||||||
|
args = "not deleted"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "1. Master yarn install" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["0. Master PR not deleted"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "--pure-lockfile install"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "2. Master yarn run build" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["1. Master yarn install"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "--pure-lockfile run build"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "3. Master yarn run publish" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["2. Master yarn run build"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "--pure-lockfile run publish-from-github"
|
||||||
|
secrets = ["NPM_TOKEN"]
|
||||||
|
}
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
|||||||
node_modules
|
node_modules
|
||||||
tmp
|
tmp
|
||||||
target/
|
target/
|
||||||
.next
|
.next
|
||||||
|
coverage
|
||||||
|
*.tgz
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"singleQuote": true,
|
|
||||||
"trailingComma": "es5"
|
|
||||||
}
|
|
||||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"eslint.enable": false
|
|
||||||
}
|
|
||||||
74
CODE_OF_CONDUCT.md
Normal file
74
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
## Code of Conduct
|
||||||
|
|
||||||
|
### Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as
|
||||||
|
contributors and maintainers pledge to making participation in our project and
|
||||||
|
our community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity and
|
||||||
|
orientation.
|
||||||
|
|
||||||
|
### Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment
|
||||||
|
include:
|
||||||
|
|
||||||
|
- Using welcoming and inclusive language
|
||||||
|
- Being respectful of differing viewpoints and experiences
|
||||||
|
- Gracefully accepting constructive criticism
|
||||||
|
- Focusing on what is best for the community
|
||||||
|
- Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||||
|
advances
|
||||||
|
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
- Public or private harassment
|
||||||
|
- Publishing others' private information, such as a physical or electronic
|
||||||
|
address, without explicit permission
|
||||||
|
- Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
### Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable
|
||||||
|
behavior and are expected to take appropriate and fair corrective action in
|
||||||
|
response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
### Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community. Examples of
|
||||||
|
representing a project or community include using an official project e-mail
|
||||||
|
address, posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event. Representation of a project may be
|
||||||
|
further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
### Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting the project team at [abuse@zeit.co](mailto:abuse@zeit.co). All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. The project team is
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||||
|
Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||||
|
faith may face temporary or permanent repercussions as determined by other
|
||||||
|
members of the project's leadership.
|
||||||
|
|
||||||
|
### Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||||
|
available at [http://contributor-covenant.org/version/1/4][version]
|
||||||
|
|
||||||
|
[homepage]: http://contributor-covenant.org
|
||||||
|
[version]: http://contributor-covenant.org/version/1/4/
|
||||||
85
CONTRIBUTING.md
Normal file
85
CONTRIBUTING.md
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
# Contributing
|
||||||
|
|
||||||
|
When contributing to this repository, please first discuss the change you wish to make via issue or [spectrum](https://spectrum.chat/zeit) with the owners of this repository before submitting a Pull Request.
|
||||||
|
|
||||||
|
Please read our [code of conduct](CODE_OF_CONDUCT.md) and follow it in all your interactions with the project.
|
||||||
|
|
||||||
|
## Local development
|
||||||
|
|
||||||
|
This project is configured in a monorepo pattern where one repo contains multiple npm packages. Dependencies are installed and managed with `yarn`, not `npm` CLI.
|
||||||
|
|
||||||
|
To get started, execute the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
git clone https://github.com/zeit/now-builders
|
||||||
|
yarn install
|
||||||
|
yarn bootstrap
|
||||||
|
yarn build
|
||||||
|
yarn lint
|
||||||
|
yarn test
|
||||||
|
```
|
||||||
|
|
||||||
|
Make sure all the tests pass before making changes.
|
||||||
|
|
||||||
|
## Verifying your change
|
||||||
|
|
||||||
|
Once you are done with your changes (we even suggest doing it along the way ), make sure all the test still run by running
|
||||||
|
|
||||||
|
```
|
||||||
|
yarn build && yarn test
|
||||||
|
```
|
||||||
|
|
||||||
|
from the root of the project.
|
||||||
|
|
||||||
|
If any test fails, make sure to fix it along with your changes. See [Interpreting test errors](#Interpreting-test-errors) for more information about how the tests are executed, especially the integration tests.
|
||||||
|
|
||||||
|
## Pull Request Process
|
||||||
|
|
||||||
|
Once you are confident that your changes work properly, open a pull request on the main repository.
|
||||||
|
|
||||||
|
The pull request will be reviewed by the maintainers and the tests will be checked by our continuous integration platform.
|
||||||
|
|
||||||
|
## Interpreting test errors
|
||||||
|
|
||||||
|
There are 2 kinds of tests in this repository – Unit tests and Integration tests.
|
||||||
|
|
||||||
|
Unit tests are run locally with `jest` and execute quickly because they are testing the smallest units of code.
|
||||||
|
|
||||||
|
### Integration tests
|
||||||
|
|
||||||
|
Integration tests create deployments to your ZEIT account using the `test` project name. After each test is deployed, the `probes` key is used to check if the response is the expected value. If the value doesn't match, you'll see a message explaining the difference. If the deployment failed to build, you'll see a more generic message like the following:
|
||||||
|
|
||||||
|
```
|
||||||
|
[Error: Fetched page https://test-8ashcdlew.now.sh/root.js does not contain hello Root!. Instead it contains An error occurred with this application.
|
||||||
|
|
||||||
|
NO_STATUS_CODE_FRO Response headers:
|
||||||
|
cache-control=s-maxage=0
|
||||||
|
connection=close
|
||||||
|
content-type=text/plain; charset=utf-8
|
||||||
|
date=Wed, 19 Jun 2019 18:01:37 GMT
|
||||||
|
server=now
|
||||||
|
strict-transport-security=max-age=63072000
|
||||||
|
transfer-encoding=chunked
|
||||||
|
x-now-id=iad1:hgtzj-1560967297876-44ae12559f95
|
||||||
|
x-now-trace=iad1]
|
||||||
|
```
|
||||||
|
|
||||||
|
In such cases you can visit the URL of the failed deployment and append `/_logs` so see the build error. In the case above, that would be https://test-8ashcdlew.now.sh/_logs
|
||||||
|
|
||||||
|
The logs of this deployment will contain the actual error which may help you to understand what went wrong.
|
||||||
|
|
||||||
|
### @zeit/ncc integration
|
||||||
|
|
||||||
|
Some of the builders use `@zeit/ncc` to bundle files before deployment. If you suspect an error with the bundling mechanism, you can run the `ncc` CLI with a couple modifications to the test.
|
||||||
|
|
||||||
|
For example if an error occurred in `now-node/test/fixtures/08-assets`
|
||||||
|
|
||||||
|
```
|
||||||
|
cd packages/now-node/test/fixtures/08-assets
|
||||||
|
yarn install
|
||||||
|
echo 'require("http").createServer(module.exports).listen(3000)' >> index.js
|
||||||
|
npx @zeit/ncc@0.20.1 build index.js --source-map
|
||||||
|
node dist
|
||||||
|
```
|
||||||
|
|
||||||
|
This will compile the test with the specific version of `ncc` and run the resulting file. If it fails here, then there is likely a bug in `ncc` and not the Builder.
|
||||||
447
DEVELOPING_A_BUILDER.md
Normal file
447
DEVELOPING_A_BUILDER.md
Normal file
@@ -0,0 +1,447 @@
|
|||||||
|
# Builders Developer Reference
|
||||||
|
|
||||||
|
The following page is a reference for how to create a Builder using the available Builder's API.
|
||||||
|
|
||||||
|
A Builder is an npm module that exposes a `build` function and optionally an `analyze` function and `prepareCache` function.
|
||||||
|
Official Builders are published to [npmjs.com](https://npmjs.com) as a package and referenced in the `use` property of the `now.json` configuration file.
|
||||||
|
However, the `use` property will work with any [npm install argument](https://docs.npmjs.com/cli/install) such as a git repo url which is useful for testing your Builder.
|
||||||
|
|
||||||
|
See the [Builders Documentation](https://zeit.co/docs/v2/advanced/builders) to view example usage.
|
||||||
|
|
||||||
|
## Builder Exports
|
||||||
|
|
||||||
|
### `version`
|
||||||
|
|
||||||
|
A **required** exported constant that decides which version of the Builder API to use.
|
||||||
|
|
||||||
|
The latest and suggested version is `2`.
|
||||||
|
|
||||||
|
### `analyze`
|
||||||
|
|
||||||
|
An **optional** exported function that returns a unique fingerprint used for the purpose of [build de-duplication](https://zeit.co/docs/v2/advanced/concepts/immutability#deduplication-algorithm). If the `analyze` function is not supplied, a random fingerprint is assigned to each build.
|
||||||
|
|
||||||
|
```js
|
||||||
|
export analyze({
|
||||||
|
files: Files,
|
||||||
|
entrypoint: String,
|
||||||
|
workPath: String,
|
||||||
|
config: Object
|
||||||
|
}) : String fingerprint
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are using TypeScript, you should use the following types:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { AnalyzeOptions } from '@now/build-utils'
|
||||||
|
|
||||||
|
export analyze(options: AnalyzeOptions) {
|
||||||
|
return 'fingerprint goes here'
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `build`
|
||||||
|
|
||||||
|
A **required** exported function that returns a [Files](#files) data structure that contains the Build outputs, which can be a [Static File](#file) or a [Serverless Function](#serverless-function).
|
||||||
|
|
||||||
|
What's a Serverless Function? Read about [Serverless Function concepts](https://zeit.co/docs/v2/deployments/concepts/lambdas) to learn more.
|
||||||
|
|
||||||
|
```js
|
||||||
|
build({
|
||||||
|
files: Files,
|
||||||
|
entrypoint: String,
|
||||||
|
workPath: String,
|
||||||
|
config: Object,
|
||||||
|
meta?: {
|
||||||
|
isDev?: Boolean,
|
||||||
|
requestPath?: String,
|
||||||
|
filesChanged?: Array<String>,
|
||||||
|
filesRemoved?: Array<String>
|
||||||
|
}
|
||||||
|
}) : {
|
||||||
|
watch: Array<String>,
|
||||||
|
output: Files output,
|
||||||
|
routes: Object
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are using TypeScript, you should use the following types:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { BuildOptions } from '@now/build-utils'
|
||||||
|
|
||||||
|
export build(options: BuildOptions) {
|
||||||
|
// Build the code here
|
||||||
|
|
||||||
|
return {
|
||||||
|
output: {
|
||||||
|
'path-to-file': File,
|
||||||
|
'path-to-lambda': Lambda
|
||||||
|
},
|
||||||
|
watch: [],
|
||||||
|
routes: {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `prepareCache`
|
||||||
|
|
||||||
|
An **optional** exported function that is equivalent to [`build`](#build), but it executes the instructions necessary to prepare a cache for the next run.
|
||||||
|
|
||||||
|
```js
|
||||||
|
prepareCache({
|
||||||
|
files: Files,
|
||||||
|
entrypoint: String,
|
||||||
|
workPath: String,
|
||||||
|
cachePath: String,
|
||||||
|
config: Object
|
||||||
|
}) : Files cacheOutput
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are using TypeScript, you can import the types for each of these functions by using the following:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { PrepareCacheOptions } from '@now/build-utils'
|
||||||
|
|
||||||
|
export prepareCache(options: PrepareCacheOptions) {
|
||||||
|
return { 'path-to-file': File }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `shouldServe`
|
||||||
|
|
||||||
|
An **optional** exported function that is only used by `now dev` in [Now CLI](https:///download) and indicates whether a [Builder](https://zeit.co/docs/v2/advanced/builders) wants to be responsible for building a certain request path.
|
||||||
|
|
||||||
|
```js
|
||||||
|
shouldServe({
|
||||||
|
entrypoint: String,
|
||||||
|
files: Files,
|
||||||
|
config: Object,
|
||||||
|
requestPath: String,
|
||||||
|
workPath: String
|
||||||
|
}) : Boolean
|
||||||
|
```
|
||||||
|
|
||||||
|
If you are using TypeScript, you can import the types for each of these functions by using the following:
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { ShouldServeOptions } from '@now/build-utils'
|
||||||
|
|
||||||
|
export shouldServe(options: ShouldServeOptions) {
|
||||||
|
return Boolean
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
If this method is not defined, Now CLI will default to [this function](https://github.com/zeit/now-builders/blob/52994bfe26c5f4f179bdb49783ee57ce19334631/packages/now-build-utils/src/should-serve.ts).
|
||||||
|
|
||||||
|
### Builder Options
|
||||||
|
|
||||||
|
The exported functions [`analyze`](#analyze), [`build`](#build), and [`prepareCache`](#preparecache) receive one argument with the following properties.
|
||||||
|
|
||||||
|
**Properties:**
|
||||||
|
|
||||||
|
- `files`: All source files of the project as a [Files](#files) data structure.
|
||||||
|
- `entrypoint`: Name of entrypoint file for this particular build job. Value `files[entrypoint]` is guaranteed to exist and be a valid [File](#files) reference. `entrypoint` is always a discrete file and never a glob, since globs are expanded into separate builds at deployment time.
|
||||||
|
- `workPath`: A writable temporary directory where you are encouraged to perform your build process. This directory will be populated with the restored cache from the previous run (if any) for [`analyze`](#analyze) and [`build`](#build).
|
||||||
|
- `cachePath`: A writable temporary directory where you can build a cache for the next run. This is only passed to `prepareCache`.
|
||||||
|
- `config`: An arbitrary object passed from by the user in the [Build definition](#defining-the-build-step) in `now.json`.
|
||||||
|
|
||||||
|
## Example: html-minifier
|
||||||
|
|
||||||
|
Let's walk through what it takes to create a simple builder that takes in a HTML source file and yields a minified HTML static file as its build output.
|
||||||
|
|
||||||
|
While this is a very simple builder, the approach demonstrated here can be used to return anything: one or more static files and/or one or more lambdas.
|
||||||
|
|
||||||
|
## Setting up the module
|
||||||
|
|
||||||
|
### Defining the analyze step
|
||||||
|
|
||||||
|
The `analyze` hook is optional. Its goal is to give the developer a tool to avoid wasting time _re-computing a build_ that has already occurred.
|
||||||
|
|
||||||
|
The return value of `analyze` is a _fingerprint_: a simple string that uniquely identifies the build process.
|
||||||
|
|
||||||
|
If `analyze` is not specified, its behavior is to use as the fingerprint the combined checksums of **all the files in the same directory level as the entrypoint**. This is a default that errs on making sure that we re-execute builds when files _other than the entrypoint_ (like dependencies, manifest files, etc) have changed.
|
||||||
|
|
||||||
|
For our `html-minify` example, we know that HTML files don't have dependencies. Therefore, our analyze step can just return the `digest` of the entrypoint.
|
||||||
|
|
||||||
|
Our `index.js` file looks as follows:
|
||||||
|
|
||||||
|
```js
|
||||||
|
exports.analyze = function({ files, entrypoint }) {
|
||||||
|
return files[entrypoint].digest
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
This means that we will only re-minify and re-create the build output _only if the file contents (and therefore its digest) change._
|
||||||
|
|
||||||
|
### Defining the build step
|
||||||
|
|
||||||
|
Your module will need some utilities to manipulate the data structures we pass you, create new ones and alter the filesystem.
|
||||||
|
|
||||||
|
To that end, we expose our API as part of a `@now/build-utils` package. This package is always loaded on your behalf, so make sure it's only included as `peerDependencies` in your `package.json`.
|
||||||
|
|
||||||
|
Builders can include dependencies of their liking:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const htmlMinifier = require('html-minifier')
|
||||||
|
|
||||||
|
exports.version = 2
|
||||||
|
|
||||||
|
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest
|
||||||
|
|
||||||
|
exports.build = async ({ files, entrypoint, config }) => {
|
||||||
|
const stream = files[entrypoint].toStream()
|
||||||
|
const options = Object.assign({}, config || {})
|
||||||
|
const { data } = await FileBlob.fromStream({ stream })
|
||||||
|
const content = data.toString()
|
||||||
|
const minified = htmlMinifier(content, options)
|
||||||
|
const result = new FileBlob({ data: minified })
|
||||||
|
|
||||||
|
return {
|
||||||
|
output: {
|
||||||
|
[entrypoint]: result
|
||||||
|
},
|
||||||
|
watch: [],
|
||||||
|
routes: {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Defining a `prepareCache` step
|
||||||
|
|
||||||
|
If our builder had performed work that could be re-used in the next build invocation, we could define a `prepareCache` step.
|
||||||
|
|
||||||
|
In this case, there are not intermediate artifacts that we can cache, and our `analyze` step already takes care of caching the full output based on the fingerprint of the input.
|
||||||
|
|
||||||
|
## Technical Details
|
||||||
|
|
||||||
|
### Execution Context
|
||||||
|
|
||||||
|
A [Serverless Function](https://zeit.co/docs/v2/advanced/concepts/lambdas) is created where the builder logic is executed. The lambda is run using the Node.js 8 runtime. A brand new sandbox is created for each deployment, for security reasons. The sandbox is cleaned up between executions to ensure no lingering temporary files are shared from build to build.
|
||||||
|
|
||||||
|
All the APIs you export ([`analyze`](#analyze), [`build`](#build) and [`prepareCache`](#preparecache)) are not guaranteed to be run in the same process, but the filesystem we expose (e.g.: `workPath` and the results of calling [`getWriteableDirectory`](#getWriteableDirectory) ) is retained.
|
||||||
|
|
||||||
|
If you need to share state between those steps, use the filesystem.
|
||||||
|
|
||||||
|
### Directory and Cache Lifecycle
|
||||||
|
|
||||||
|
When a new build is created, we pre-populate the `workPath` supplied to `analyze` with the results of the `prepareCache` step of the previous build.
|
||||||
|
|
||||||
|
The `analyze` step can modify that directory, and it will not be re-created when it's supplied to `build` and `prepareCache`.
|
||||||
|
|
||||||
|
To learn how the cache key is computed and invalidated, refer to the [overview](https://zeit.co/docs/v2/advanced/builders#technical-details).
|
||||||
|
|
||||||
|
### Accessing Environment and Secrets
|
||||||
|
|
||||||
|
The env and secrets specified by the user as `build.env` are passed to the builder process. This means you can access user env via `process.env` in Node.js.
|
||||||
|
|
||||||
|
### Utilities as peerDependencies
|
||||||
|
|
||||||
|
When you publish your builder to npm, make sure to not specify `@now/build-utils` (as seen below in the API definitions) as a dependency, but rather as part of `peerDependencies`.
|
||||||
|
|
||||||
|
## Types
|
||||||
|
|
||||||
|
### `Files`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { File } from '@now/build-utils'
|
||||||
|
type Files = { [filePath: string]: File }
|
||||||
|
```
|
||||||
|
|
||||||
|
This is an abstract type that is implemented as a plain [JavaScript Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object). It's helpful to think of it as a virtual filesystem representation.
|
||||||
|
|
||||||
|
When used as an input, the `Files` object will only contain `FileRefs`. When `Files` is an output, it may consist of `Lambda` (Serverless Functions) types as well as `FileRefs`.
|
||||||
|
|
||||||
|
An example of a valid output `Files` object is:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"index.html": FileRef,
|
||||||
|
"api/index.js": Lambda
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `File`
|
||||||
|
|
||||||
|
This is an abstract type that can be imported if you are using TypeScript.
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { File } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
Valid `File` types include:
|
||||||
|
|
||||||
|
- [`FileRef`](#fileref)
|
||||||
|
- [`FileFsRef`](#filefsref)
|
||||||
|
- [`FileBlob`](#fileblob)
|
||||||
|
|
||||||
|
### `FileRef`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { FileRef } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes) that represents an abstract file instance stored in our platform, based on the file identifier string (its checksum). When a `Files` object is passed as an input to `analyze` or `build`, all its values will be instances of `FileRef`.
|
||||||
|
|
||||||
|
**Properties:**
|
||||||
|
|
||||||
|
- `mode : Number` file mode
|
||||||
|
- `digest : String` a checksum that represents the file
|
||||||
|
|
||||||
|
**Methods:**
|
||||||
|
|
||||||
|
- `toStream() : Stream` creates a [Stream](https://nodejs.org/api/stream.html) of the file body
|
||||||
|
|
||||||
|
### `FileFsRef`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { FileFsRef } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes) that represents an abstract instance of a file present in the filesystem that the build process is executing in.
|
||||||
|
|
||||||
|
**Properties:**
|
||||||
|
|
||||||
|
- `mode : Number` file mode
|
||||||
|
- `fsPath : String` the absolute path of the file in file system
|
||||||
|
|
||||||
|
**Methods:**
|
||||||
|
|
||||||
|
- `static async fromStream({ mode : Number, stream : Stream, fsPath : String }) : FileFsRef` creates an instance of a [FileFsRef](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) from `Stream`, placing file at `fsPath` with `mode`
|
||||||
|
- `toStream() : Stream` creates a [Stream](https://nodejs.org/api/stream.html) of the file body
|
||||||
|
|
||||||
|
### `FileBlob`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { FileBlob } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes) that represents an abstract instance of a file present in memory.
|
||||||
|
|
||||||
|
**Properties:**
|
||||||
|
|
||||||
|
- `mode : Number` file mode
|
||||||
|
- `data : String | Buffer` the body of the file
|
||||||
|
|
||||||
|
**Methods:**
|
||||||
|
|
||||||
|
- `static async fromStream({ mode : Number, stream : Stream }) :FileBlob` creates an instance of a [FileBlob](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) from [`Stream`](https://nodejs.org/api/stream.html) with `mode`
|
||||||
|
- `toStream() : Stream` creates a [Stream](https://nodejs.org/api/stream.html) of the file body
|
||||||
|
|
||||||
|
### `Lambda`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { Lambda } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), called a Serverless Function, that can be created by supplying `files`, `handler`, `runtime`, and `environment` as an object to the [`createLambda`](#createlambda) helper. The instances of this class should not be created directly. Instead use a call to [`createLambda`](#createlambda).
|
||||||
|
|
||||||
|
**Properties:**
|
||||||
|
|
||||||
|
- `files : Files` the internal filesystem of the lambda
|
||||||
|
- `handler : String` path to handler file and (optionally) a function name it exports
|
||||||
|
- `runtime : LambdaRuntime` the name of the lambda runtime
|
||||||
|
- `environment : Object` key-value map of handler-related (aside of those passed by user) environment variables
|
||||||
|
|
||||||
|
### `LambdaRuntime`
|
||||||
|
|
||||||
|
This is an abstract enumeration type that is implemented by one of the following possible `String` values:
|
||||||
|
|
||||||
|
- `nodejs10.x`
|
||||||
|
- `nodejs8.10`
|
||||||
|
- `go1.x`
|
||||||
|
- `java-1.8.0-openjdk`
|
||||||
|
- `python3.6`
|
||||||
|
- `python2.7`
|
||||||
|
- `dotnetcore2.1`
|
||||||
|
- `dotnetcore2.0`
|
||||||
|
- `dotnetcore1.0`
|
||||||
|
|
||||||
|
## JavaScript API
|
||||||
|
|
||||||
|
The following is exposed by `@now/build-utils` to simplify the process of writing Builders, manipulating the file system, using the above types, etc.
|
||||||
|
|
||||||
|
### `createLambda`
|
||||||
|
|
||||||
|
Signature: `createLambda(Object spec) : Lambda`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { createLambda } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
Constructor for the [`Lambda`](#lambda) type.
|
||||||
|
|
||||||
|
```js
|
||||||
|
const { createLambda, FileBlob } = require('@now/build-utils')
|
||||||
|
await createLambda({
|
||||||
|
runtime: 'nodejs8.10',
|
||||||
|
handler: 'index.main',
|
||||||
|
files: {
|
||||||
|
'index.js': new FileBlob({ data: 'exports.main = () => {}' })
|
||||||
|
}
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
### `download`
|
||||||
|
|
||||||
|
Signature: `download() : Files`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { download } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
This utility allows you to download the contents of a [`Files`](#files) data structure, therefore creating the filesystem represented in it.
|
||||||
|
|
||||||
|
Since `Files` is an abstract way of representing files, you can think of `download` as a way of making that virtual filesystem _real_.
|
||||||
|
|
||||||
|
If the **optional** `meta` property is passed (the argument for [build](#build)), only the files that have changed are downloaded. This is decided using `filesRemoved` and `filesChanged` inside that object.
|
||||||
|
|
||||||
|
```js
|
||||||
|
await download(files, workPath, meta)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `glob`
|
||||||
|
|
||||||
|
Signature: `glob() : Files`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { glob } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
This utility allows you to _scan_ the filesystem and return a [`Files`](#files) representation of the matched glob search string. It can be thought of as the reverse of [`download`](#download).
|
||||||
|
|
||||||
|
The following trivial example downloads everything to the filesystem, only to return it back (therefore just re-creating the passed-in [`Files`](#files)):
|
||||||
|
|
||||||
|
```js
|
||||||
|
const { glob, download } = require('@now/build-utils')
|
||||||
|
|
||||||
|
exports.build = ({ files, workPath }) => {
|
||||||
|
await download(files, workPath)
|
||||||
|
return glob('**', workPath)
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `getWriteableDirectory`
|
||||||
|
|
||||||
|
Signature: `getWriteableDirectory() : String`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { getWriteableDirectory } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
In some occasions, you might want to write to a temporary directory.
|
||||||
|
|
||||||
|
### `rename`
|
||||||
|
|
||||||
|
Signature: `rename(Files) : Files`
|
||||||
|
|
||||||
|
```ts
|
||||||
|
import { rename } from '@now/build-utils'
|
||||||
|
```
|
||||||
|
|
||||||
|
Renames the keys of the [`Files`](#files) object, which represent the paths. For example, to remove the `*.go` suffix you can use:
|
||||||
|
|
||||||
|
```js
|
||||||
|
const rename = require('@now/build-utils')
|
||||||
|
const originalFiles = { 'one.go': fileFsRef1, 'two.go': fileFsRef2 }
|
||||||
|
const renamedFiles = rename(originalFiles, path => path.replace(/\.go$/, '')
|
||||||
|
```
|
||||||
66
README.md
66
README.md
@@ -1,27 +1,65 @@
|
|||||||
# now-builders
|
# now-builders
|
||||||
|
|
||||||
This is the full list of official Builders provided by the ZEIT team.
|
This is a monorepo containing the [Official Builders](https://zeit.co/docs/v2/advanced/builders) provided by the ZEIT team.
|
||||||
|
|
||||||
More details here: https://zeit.co/docs/v2/deployments/builders/overview/
|
## Channels
|
||||||
|
|
||||||
|
There are two Channels:
|
||||||
|
|
||||||
|
| Channel | Git Branch | npm dist-tag | use example |
|
||||||
|
| ------- | ------------------------------------------------------------- | ------------ | ------------------ |
|
||||||
|
| Canary | [canary](https://github.com/zeit/now-builders/commits/canary) | `@canary` | `@now/node@canary` |
|
||||||
|
| Stable | [master](https://github.com/zeit/now-builders/commits/master) | `@latest` | `@now/node@latest` |
|
||||||
|
|
||||||
|
All PRs should be submitted to the `canary` branch.
|
||||||
|
|
||||||
|
Once a PR is merged into the `canary` branch, it should be published to npm immediately using the Canary Channel.
|
||||||
|
|
||||||
### Publishing to npm
|
### Publishing to npm
|
||||||
|
|
||||||
Run the following command to publish modified builders to npm:
|
For the Canary Channel, publish the modified Builders to npm with the following:
|
||||||
|
|
||||||
For the stable channel use:
|
|
||||||
|
|
||||||
```
|
|
||||||
yarn publish-stable
|
|
||||||
```
|
|
||||||
|
|
||||||
For the canary channel use:
|
|
||||||
|
|
||||||
```
|
```
|
||||||
yarn publish-canary
|
yarn publish-canary
|
||||||
```
|
```
|
||||||
|
|
||||||
CircleCI will take care of publishing the updated packages to npm from there.
|
For the Stable Channel, you must do the following:
|
||||||
|
|
||||||
If for some reason CircleCI fails to publish the npm package, you may do so
|
- Cherry pick each commit from canary to master
|
||||||
|
- Verify that you are _in-sync_ with canary (with the exception of the `version` line in `package.json`)
|
||||||
|
- Deploy the modified Builders
|
||||||
|
|
||||||
|
```
|
||||||
|
# View differences excluding "Publish" commits
|
||||||
|
git checkout canary && git pull
|
||||||
|
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/canary.txt
|
||||||
|
git checkout master && git pull
|
||||||
|
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/master.txt
|
||||||
|
diff ~/Desktop/canary.txt ~/Desktop/master.txt
|
||||||
|
|
||||||
|
# Cherry pick all PRs from canary into master ...
|
||||||
|
git cherry-pick <PR501_COMMIT_SHA>
|
||||||
|
git cherry-pick <PR502_COMMIT_SHA>
|
||||||
|
git cherry-pick <PR503_COMMIT_SHA>
|
||||||
|
git cherry-pick <PR504_COMMIT_SHA>
|
||||||
|
|
||||||
|
# Verify the only difference is "version" in package.json
|
||||||
|
git diff origin/canary
|
||||||
|
|
||||||
|
# Ship it
|
||||||
|
yarn publish-stable
|
||||||
|
```
|
||||||
|
|
||||||
|
After running this publish step, GitHub Actions will take care of publishing the modified Builder packages to npm.
|
||||||
|
|
||||||
|
If for some reason GitHub Actions fails to publish the npm package, you may do so
|
||||||
manually by running `npm publish` from the package directory. Make sure to
|
manually by running `npm publish` from the package directory. Make sure to
|
||||||
include the `--tag canary` parameter if you are publishing a canary release!
|
use `npm publish --tag canary` if you are publishing a canary release!
|
||||||
|
|
||||||
|
### Contributing
|
||||||
|
|
||||||
|
See the [Contribution guidelines for this project](CONTRIBUTING.md), it also contains guidance on interpreting tests failures.
|
||||||
|
|
||||||
|
### Creating Your Own Builder
|
||||||
|
|
||||||
|
To create your own Builder, see [the Builder's Developer Reference](DEVELOPING_A_BUILDER.md).
|
||||||
|
|||||||
@@ -29,12 +29,11 @@ Serverless:
|
|||||||
- No runtime dependencies, meaning smaller lambda functions
|
- No runtime dependencies, meaning smaller lambda functions
|
||||||
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
||||||
|
|
||||||
|
|
||||||
#### Possible Ways to Fix It
|
#### Possible Ways to Fix It
|
||||||
|
|
||||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||||
|
|
||||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install next --save
|
npm install next --save
|
||||||
@@ -46,7 +45,7 @@ npm install next --save
|
|||||||
{
|
{
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"now-build": "next build"
|
"now-build": "next build"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -54,9 +53,9 @@ npm install next --save
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
module.exports = {
|
module.exports = {
|
||||||
target: 'serverless'
|
target: 'serverless',
|
||||||
// Other options are still valid
|
// Other options are still valid
|
||||||
}
|
};
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||||
@@ -70,4 +69,4 @@ module.exports = {
|
|||||||
|
|
||||||
### Useful Links
|
### Useful Links
|
||||||
|
|
||||||
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ npm install next --save
|
|||||||
{
|
{
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"now-build": "next build"
|
"now-build": "next build"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -28,9 +28,9 @@ npm install next --save
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
module.exports = {
|
module.exports = {
|
||||||
target: 'serverless'
|
target: 'serverless',
|
||||||
// Other options
|
// Other options
|
||||||
}
|
};
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||||
|
|||||||
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# `@now/static-build` Failed to detect a server running
|
||||||
|
|
||||||
|
#### Why This Warning Occurred
|
||||||
|
|
||||||
|
When running `now dev`, the `@now/static-build` builder proxies relevant HTTP
|
||||||
|
requests to the server that is created by the `now-dev` script in the
|
||||||
|
`package.json` file.
|
||||||
|
|
||||||
|
In order for `now dev` to know which port the server is running on, the builder
|
||||||
|
is provided a `$PORT` environment variable that the server _must_ bind to. The
|
||||||
|
error "Failed to detect a server running on port" is printed if the builder fails
|
||||||
|
to detect a server listening on that specific port within five minutes.
|
||||||
|
|
||||||
|
#### Possible Ways to Fix It
|
||||||
|
|
||||||
|
Please ensure that your `now-dev` script binds the spawned development server on
|
||||||
|
the provided `$PORT` that the builder expects the server to bind to.
|
||||||
|
|
||||||
|
For example, if you are using Gatsby, your `now-dev` script must use the `-p`
|
||||||
|
(port) option to bind to the `$PORT` specified from the builder:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
...
|
||||||
|
"scripts": {
|
||||||
|
...
|
||||||
|
"now-dev": "gatsby develop -p $PORT"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Consult your static builder program's `--help` or documentation to figure out what
|
||||||
|
the command line flag to bind to a specific port is (in many cases, it is one of:
|
||||||
|
`-p` / `-P` / `--port`).
|
||||||
|
|
||||||
|
### Useful Links
|
||||||
|
|
||||||
|
- [`@now/static-build` Local Development Documentation](https://zeit.co/docs/v2/deployments/official-builders/static-build-now-static-build#local-development)
|
||||||
@@ -1,5 +1,35 @@
|
|||||||
|
const { execSync } = require('child_process');
|
||||||
|
const { relative } = require('path');
|
||||||
|
|
||||||
|
const branch = execSync('git branch | grep "*" | cut -d " " -f2')
|
||||||
|
.toString()
|
||||||
|
.trim();
|
||||||
|
console.log(`Running tests on branch "${branch}"`);
|
||||||
|
const gitPath = branch === 'master' ? 'HEAD~1' : 'origin/canary...HEAD';
|
||||||
|
const diff = execSync(`git diff ${gitPath} --name-only`).toString();
|
||||||
|
|
||||||
|
const changed = diff
|
||||||
|
.split('\n')
|
||||||
|
.filter(item => Boolean(item) && item.includes('packages/'))
|
||||||
|
.map(item => relative('packages', item).split('/')[0]);
|
||||||
|
|
||||||
|
const matches = Array.from(new Set(changed));
|
||||||
|
|
||||||
|
if (matches.length === 0) {
|
||||||
|
matches.push('now-node');
|
||||||
|
console.log(`No packages changed, defaulting to ${matches[0]}`);
|
||||||
|
} else {
|
||||||
|
console.log('The following packages have changed:');
|
||||||
|
console.log(matches.join('\n'));
|
||||||
|
}
|
||||||
|
|
||||||
|
const testMatch = matches.map(
|
||||||
|
item => `**/${item}/**/?(*.)+(spec|test).[jt]s?(x)`,
|
||||||
|
);
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
testEnvironment: 'node',
|
testEnvironment: 'node',
|
||||||
|
testMatch,
|
||||||
collectCoverageFrom: [
|
collectCoverageFrom: [
|
||||||
'packages/(!test)/**/*.{js,jsx}',
|
'packages/(!test)/**/*.{js,jsx}',
|
||||||
'!**/node_modules/**',
|
'!**/node_modules/**',
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
{
|
{
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"useWorkspaces": true,
|
"useWorkspaces": true,
|
||||||
"packages": [
|
"packages": ["packages/*"],
|
||||||
"packages/*"
|
|
||||||
],
|
|
||||||
"command": {
|
"command": {
|
||||||
"publish": {
|
"publish": {
|
||||||
"npmClient": "npm",
|
"npmClient": "npm",
|
||||||
|
|||||||
46
package.json
46
package.json
@@ -12,8 +12,9 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"lerna": "lerna",
|
"lerna": "lerna",
|
||||||
"bootstrap": "lerna bootstrap",
|
"bootstrap": "lerna bootstrap",
|
||||||
"publish-stable": "lerna version",
|
"publish-stable": "git checkout master && git pull && lerna version --exact",
|
||||||
"publish-canary": "lerna version prerelease --preid canary",
|
"publish-canary": "git checkout canary && git pull && lerna version prerelease --preid canary --exact",
|
||||||
|
"publish-from-github": "./.circleci/publish.sh",
|
||||||
"build": "./.circleci/build.sh",
|
"build": "./.circleci/build.sh",
|
||||||
"lint": "eslint .",
|
"lint": "eslint .",
|
||||||
"codecov": "codecov",
|
"codecov": "codecov",
|
||||||
@@ -31,26 +32,35 @@
|
|||||||
"*.ts": [
|
"*.ts": [
|
||||||
"prettier --write",
|
"prettier --write",
|
||||||
"git add"
|
"git add"
|
||||||
|
],
|
||||||
|
"*.json": [
|
||||||
|
"prettier --write",
|
||||||
|
"git add"
|
||||||
|
],
|
||||||
|
"*.md": [
|
||||||
|
"prettier --write",
|
||||||
|
"git add"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/fs-extra": "^5.0.4",
|
"@types/node": "*",
|
||||||
"@types/glob": "^7.1.1",
|
"@zeit/ncc": "0.20.4",
|
||||||
"@types/multistream": "^2.1.1",
|
|
||||||
"@types/node": "^10.12.8",
|
|
||||||
"async-retry": "1.2.3",
|
"async-retry": "1.2.3",
|
||||||
"buffer-replace": "^1.0.0",
|
"buffer-replace": "1.0.0",
|
||||||
"codecov": "^3.2.0",
|
"codecov": "3.2.0",
|
||||||
"eslint": "^5.9.0",
|
"eslint": "5.9.0",
|
||||||
"eslint-config-airbnb-base": "^13.1.0",
|
"eslint-config-airbnb-base": "13.1.0",
|
||||||
"eslint-config-prettier": "^3.1.0",
|
"eslint-config-prettier": "3.3.0",
|
||||||
"eslint-plugin-import": "^2.14.0",
|
"eslint-plugin-import": "2.14.0",
|
||||||
"fs-extra": "^7.0.1",
|
"fs-extra": "^7.0.1",
|
||||||
"glob": "^7.1.3",
|
"jest": "24.7.1",
|
||||||
"jest": "^23.6.0",
|
"lint-staged": "8.1.0",
|
||||||
"lint-staged": "^8.0.4",
|
"node-fetch": "2.6.0",
|
||||||
"node-fetch": "^2.3.0",
|
"pre-commit": "1.2.2",
|
||||||
"pre-commit": "^1.2.2",
|
"prettier": "1.17.1"
|
||||||
"prettier": "^1.15.2"
|
},
|
||||||
|
"prettier": {
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -euo pipefail
|
|
||||||
cd "$LAMBDA_TASK_ROOT"
|
|
||||||
|
|
||||||
# Configure `import`
|
|
||||||
export IMPORT_CACHE="$LAMBDA_TASK_ROOT/.import-cache"
|
|
||||||
export PATH="$IMPORT_CACHE/bin:$PATH"
|
|
||||||
|
|
||||||
# Load `import` and runtime
|
|
||||||
# shellcheck disable=SC1090
|
|
||||||
. "$(which import)"
|
|
||||||
# shellcheck disable=SC1090
|
|
||||||
. "$IMPORT_CACHE/runtime.sh"
|
|
||||||
|
|
||||||
# Load user code and process events in a loop forever
|
|
||||||
_lambda_runtime_init
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
set -euo pipefail
|
|
||||||
|
|
||||||
# `import` debug logs are always enabled during build
|
|
||||||
export IMPORT_DEBUG=1
|
|
||||||
|
|
||||||
# Install `import`
|
|
||||||
IMPORT_BIN="$IMPORT_CACHE/bin/import"
|
|
||||||
mkdir -p "$(dirname "$IMPORT_BIN")"
|
|
||||||
curl -sfLS https://import.pw > "$IMPORT_BIN"
|
|
||||||
chmod +x "$IMPORT_BIN"
|
|
||||||
|
|
||||||
# For now only the entrypoint file is copied into the lambda
|
|
||||||
mkdir -p "$(dirname "$ENTRYPOINT")"
|
|
||||||
cp "$SRC/$ENTRYPOINT" "$ENTRYPOINT"
|
|
||||||
|
|
||||||
# Copy in the runtime
|
|
||||||
cp "$BUILDER/runtime.sh" "$IMPORT_CACHE"
|
|
||||||
cp "$BUILDER/bootstrap" .
|
|
||||||
|
|
||||||
# Load `import`
|
|
||||||
. "$(which import)"
|
|
||||||
|
|
||||||
# Cache runtime and user dependencies
|
|
||||||
echo "Caching imports in \"$ENTRYPOINT\"…"
|
|
||||||
. "$IMPORT_CACHE/runtime.sh"
|
|
||||||
. "$ENTRYPOINT"
|
|
||||||
echo "Done caching imports"
|
|
||||||
|
|
||||||
# Run user build script
|
|
||||||
if declare -f build > /dev/null; then
|
|
||||||
echo "Running \`build\` function in \"$ENTRYPOINT\"…"
|
|
||||||
build "$@"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure the entrypoint defined a `handler` function
|
|
||||||
if ! declare -f handler > /dev/null; then
|
|
||||||
echo "ERROR: A \`handler\` function must be defined in \"$ENTRYPOINT\"!" >&2
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,57 +0,0 @@
|
|||||||
const execa = require('execa');
|
|
||||||
const { join } = require('path');
|
|
||||||
const snakeCase = require('snake-case');
|
|
||||||
const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
|
|
||||||
exports.config = {
|
|
||||||
maxLambdaSize: '10mb',
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
|
||||||
|
|
||||||
exports.build = async ({
|
|
||||||
workPath, files, entrypoint, config,
|
|
||||||
}) => {
|
|
||||||
const srcDir = await getWritableDirectory();
|
|
||||||
|
|
||||||
console.log('downloading files...');
|
|
||||||
await download(files, srcDir);
|
|
||||||
|
|
||||||
const configEnv = Object.keys(config).reduce((o, v) => {
|
|
||||||
o[`IMPORT_${snakeCase(v).toUpperCase()}`] = config[v]; // eslint-disable-line no-param-reassign
|
|
||||||
return o;
|
|
||||||
}, {});
|
|
||||||
|
|
||||||
const IMPORT_CACHE = `${workPath}/.import-cache`;
|
|
||||||
const env = Object.assign({}, process.env, configEnv, {
|
|
||||||
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
|
||||||
IMPORT_CACHE,
|
|
||||||
SRC: srcDir,
|
|
||||||
BUILDER: __dirname,
|
|
||||||
ENTRYPOINT: entrypoint,
|
|
||||||
});
|
|
||||||
|
|
||||||
const builderPath = join(__dirname, 'builder.sh');
|
|
||||||
|
|
||||||
await execa(builderPath, [entrypoint], {
|
|
||||||
env,
|
|
||||||
cwd: workPath,
|
|
||||||
stdio: 'inherit',
|
|
||||||
});
|
|
||||||
|
|
||||||
const lambda = await createLambda({
|
|
||||||
files: await glob('**', workPath),
|
|
||||||
handler: entrypoint, // not actually used in `bootstrap`
|
|
||||||
runtime: 'provided',
|
|
||||||
environment: Object.assign({}, configEnv, {
|
|
||||||
SCRIPT_FILENAME: entrypoint,
|
|
||||||
}),
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
[entrypoint]: lambda,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@now/bash",
|
|
||||||
"version": "0.2.0",
|
|
||||||
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
|
||||||
"main": "index.js",
|
|
||||||
"author": "Nathan Rajlich <nate@zeit.co>",
|
|
||||||
"license": "MIT",
|
|
||||||
"repository": {
|
|
||||||
"type": "git",
|
|
||||||
"url": "https://github.com/zeit/now-builders.git",
|
|
||||||
"directory": "packages/now-bash"
|
|
||||||
},
|
|
||||||
"files": [
|
|
||||||
"builder.sh",
|
|
||||||
"runtime.sh",
|
|
||||||
"bootstrap",
|
|
||||||
"index.js",
|
|
||||||
"package.json"
|
|
||||||
],
|
|
||||||
"dependencies": {
|
|
||||||
"execa": "^1.0.0",
|
|
||||||
"snake-case": "^2.1.0"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,119 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
import "static-binaries@1.0.0"
|
|
||||||
static_binaries jq
|
|
||||||
|
|
||||||
# These get reset upon each request
|
|
||||||
_STATUS_CODE="$(mktemp)"
|
|
||||||
_HEADERS="$(mktemp)"
|
|
||||||
|
|
||||||
_lambda_runtime_api() {
|
|
||||||
local endpoint="$1"
|
|
||||||
shift
|
|
||||||
curl -sfLS "http://$AWS_LAMBDA_RUNTIME_API/2018-06-01/runtime/$endpoint" "$@"
|
|
||||||
}
|
|
||||||
|
|
||||||
_lambda_runtime_init() {
|
|
||||||
# Initialize user code
|
|
||||||
# shellcheck disable=SC1090
|
|
||||||
. "$SCRIPT_FILENAME" || {
|
|
||||||
local exit_code="$?"
|
|
||||||
local error_message="Initialization failed for '$SCRIPT_FILENAME' (exit code $exit_code)"
|
|
||||||
echo "$error_message" >&2
|
|
||||||
local error='{"errorMessage":"'"$error_message"'"}'
|
|
||||||
_lambda_runtime_api "init/error" -X POST -d "$error"
|
|
||||||
exit "$exit_code"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Process events
|
|
||||||
while true; do _lambda_runtime_next; done
|
|
||||||
}
|
|
||||||
|
|
||||||
_lambda_runtime_next() {
|
|
||||||
echo 200 > "$_STATUS_CODE"
|
|
||||||
echo '{"content-type":"text/plain; charset=utf8"}' > "$_HEADERS"
|
|
||||||
|
|
||||||
local headers
|
|
||||||
headers="$(mktemp)"
|
|
||||||
|
|
||||||
# Get an event
|
|
||||||
local event
|
|
||||||
event="$(mktemp)"
|
|
||||||
_lambda_runtime_api invocation/next -D "$headers" | jq --raw-output --monochrome-output '.body' > "$event"
|
|
||||||
|
|
||||||
local request_id
|
|
||||||
request_id="$(grep -Fi Lambda-Runtime-Aws-Request-Id "$headers" | tr -d '[:space:]' | cut -d: -f2)"
|
|
||||||
rm -f "$headers"
|
|
||||||
|
|
||||||
# Execute the handler function from the script
|
|
||||||
local body
|
|
||||||
body="$(mktemp)"
|
|
||||||
|
|
||||||
# Stdin of the `handler` function is the HTTP request body.
|
|
||||||
# Need to use a fifo here instead of bash <() because Lambda
|
|
||||||
# errors with "/dev/fd/63 not found" for some reason :/
|
|
||||||
local stdin
|
|
||||||
stdin="$(mktemp -u)"
|
|
||||||
mkfifo "$stdin"
|
|
||||||
_lambda_runtime_body < "$event" > "$stdin" &
|
|
||||||
|
|
||||||
local exit_code=0
|
|
||||||
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
|
||||||
|
|
||||||
rm -f "$event" "$stdin"
|
|
||||||
|
|
||||||
if [ "$exit_code" -eq 0 ]; then
|
|
||||||
# Send the response
|
|
||||||
jq --raw-input --raw-output --compact-output --slurp --monochrome-output \
|
|
||||||
--arg statusCode "$(cat "$_STATUS_CODE")" \
|
|
||||||
--argjson headers "$(cat "$_HEADERS")" \
|
|
||||||
'{statusCode:$statusCode|tonumber, headers:$headers, encoding:"base64", body:.|@base64}' < "$body" \
|
|
||||||
| _lambda_runtime_api "invocation/$request_id/response" -X POST -d @- > /dev/null
|
|
||||||
rm -f "$body" "$_HEADERS"
|
|
||||||
else
|
|
||||||
local error_message="Invocation failed for 'handler' function in '$SCRIPT_FILENAME' (exit code $exit_code)"
|
|
||||||
echo "$error_message" >&2
|
|
||||||
_lambda_runtime_api "invocation/$request_id/error" -X POST -d '{"errorMessage":"'"$error_message"'"}' > /dev/null
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
_lambda_runtime_body() {
|
|
||||||
local event
|
|
||||||
event="$(cat)"
|
|
||||||
if [ "$(jq --raw-output '.body | type' <<< "$event")" = "string" ]; then
|
|
||||||
if [ "$(jq --raw-output '.encoding' <<< "$event")" = "base64" ]; then
|
|
||||||
jq --raw-output '.body' <<< "$event" | base64 --decode
|
|
||||||
else
|
|
||||||
# assume plain-text body
|
|
||||||
jq --raw-output '.body' <<< "$event"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Set the response status code.
|
|
||||||
http_response_code() {
|
|
||||||
echo "$1" > "$_STATUS_CODE"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Sets a response header.
|
|
||||||
# Overrides existing header if it has already been set.
|
|
||||||
http_response_header() {
|
|
||||||
local name="$1"
|
|
||||||
local value="$2"
|
|
||||||
local tmp
|
|
||||||
tmp="$(mktemp)"
|
|
||||||
jq \
|
|
||||||
--arg name "$name" \
|
|
||||||
--arg value "$value" \
|
|
||||||
'.[$name] = $value' < "$_HEADERS" > "$tmp"
|
|
||||||
mv -f "$tmp" "$_HEADERS"
|
|
||||||
}
|
|
||||||
|
|
||||||
http_response_redirect() {
|
|
||||||
http_response_code "${2:-302}"
|
|
||||||
http_response_header "location" "$1"
|
|
||||||
}
|
|
||||||
|
|
||||||
http_response_json() {
|
|
||||||
http_response_header "content-type" "application/json; charset=utf8"
|
|
||||||
}
|
|
||||||
@@ -1,146 +0,0 @@
|
|||||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
|
||||||
# yarn lockfile v1
|
|
||||||
|
|
||||||
|
|
||||||
cross-spawn@^6.0.0:
|
|
||||||
version "6.0.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
|
|
||||||
integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
|
|
||||||
dependencies:
|
|
||||||
nice-try "^1.0.4"
|
|
||||||
path-key "^2.0.1"
|
|
||||||
semver "^5.5.0"
|
|
||||||
shebang-command "^1.2.0"
|
|
||||||
which "^1.2.9"
|
|
||||||
|
|
||||||
end-of-stream@^1.1.0:
|
|
||||||
version "1.4.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43"
|
|
||||||
integrity sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==
|
|
||||||
dependencies:
|
|
||||||
once "^1.4.0"
|
|
||||||
|
|
||||||
execa@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
|
|
||||||
integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
|
|
||||||
dependencies:
|
|
||||||
cross-spawn "^6.0.0"
|
|
||||||
get-stream "^4.0.0"
|
|
||||||
is-stream "^1.1.0"
|
|
||||||
npm-run-path "^2.0.0"
|
|
||||||
p-finally "^1.0.0"
|
|
||||||
signal-exit "^3.0.0"
|
|
||||||
strip-eof "^1.0.0"
|
|
||||||
|
|
||||||
get-stream@^4.0.0:
|
|
||||||
version "4.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
|
||||||
integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
|
|
||||||
dependencies:
|
|
||||||
pump "^3.0.0"
|
|
||||||
|
|
||||||
is-stream@^1.1.0:
|
|
||||||
version "1.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
|
||||||
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
|
|
||||||
|
|
||||||
isexe@^2.0.0:
|
|
||||||
version "2.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
|
||||||
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
|
|
||||||
|
|
||||||
lower-case@^1.1.1:
|
|
||||||
version "1.1.4"
|
|
||||||
resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac"
|
|
||||||
integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw=
|
|
||||||
|
|
||||||
nice-try@^1.0.4:
|
|
||||||
version "1.0.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
|
|
||||||
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
|
||||||
|
|
||||||
no-case@^2.2.0:
|
|
||||||
version "2.3.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac"
|
|
||||||
integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ==
|
|
||||||
dependencies:
|
|
||||||
lower-case "^1.1.1"
|
|
||||||
|
|
||||||
npm-run-path@^2.0.0:
|
|
||||||
version "2.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
|
|
||||||
integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
|
|
||||||
dependencies:
|
|
||||||
path-key "^2.0.0"
|
|
||||||
|
|
||||||
once@^1.3.1, once@^1.4.0:
|
|
||||||
version "1.4.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
|
|
||||||
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
|
|
||||||
dependencies:
|
|
||||||
wrappy "1"
|
|
||||||
|
|
||||||
p-finally@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
|
|
||||||
integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
|
|
||||||
|
|
||||||
path-key@^2.0.0, path-key@^2.0.1:
|
|
||||||
version "2.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
|
|
||||||
integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
|
|
||||||
|
|
||||||
pump@^3.0.0:
|
|
||||||
version "3.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
|
||||||
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
|
|
||||||
dependencies:
|
|
||||||
end-of-stream "^1.1.0"
|
|
||||||
once "^1.3.1"
|
|
||||||
|
|
||||||
semver@^5.5.0:
|
|
||||||
version "5.6.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.6.0.tgz#7e74256fbaa49c75aa7c7a205cc22799cac80004"
|
|
||||||
integrity sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==
|
|
||||||
|
|
||||||
shebang-command@^1.2.0:
|
|
||||||
version "1.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
|
|
||||||
integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
|
|
||||||
dependencies:
|
|
||||||
shebang-regex "^1.0.0"
|
|
||||||
|
|
||||||
shebang-regex@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
|
|
||||||
integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
|
|
||||||
|
|
||||||
signal-exit@^3.0.0:
|
|
||||||
version "3.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
|
|
||||||
integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=
|
|
||||||
|
|
||||||
snake-case@^2.1.0:
|
|
||||||
version "2.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-2.1.0.tgz#41bdb1b73f30ec66a04d4e2cad1b76387d4d6d9f"
|
|
||||||
integrity sha1-Qb2xtz8w7GagTU4srRt2OH1NbZ8=
|
|
||||||
dependencies:
|
|
||||||
no-case "^2.2.0"
|
|
||||||
|
|
||||||
strip-eof@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
|
|
||||||
integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
|
|
||||||
|
|
||||||
which@^1.2.9:
|
|
||||||
version "1.3.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
|
|
||||||
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
|
|
||||||
dependencies:
|
|
||||||
isexe "^2.0.0"
|
|
||||||
|
|
||||||
wrappy@1:
|
|
||||||
version "1.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
|
||||||
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
|
|
||||||
6
packages/now-build-utils/build.sh
Executable file
6
packages/now-build-utils/build.sh
Executable file
@@ -0,0 +1,6 @@
|
|||||||
|
tsc
|
||||||
|
|
||||||
|
rm dist/index.js
|
||||||
|
ncc build src/index.ts -o dist/main
|
||||||
|
mv dist/main/index.js dist/index.js
|
||||||
|
rm -rf dist/main
|
||||||
@@ -1 +1 @@
|
|||||||
module.exports = require('../dist/fs/download').default;
|
module.exports = require('../dist/index').download;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
module.exports = require('../dist/fs/get-writable-directory').default;
|
module.exports = require('../dist/index').getWriteableDirectory;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
module.exports = require('../dist/fs/glob').default;
|
module.exports = require('../dist/index').glob;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
module.exports = require('../dist/fs/rename').default;
|
module.exports = require('../dist/index').rename;
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
module.exports = require('../dist/fs/run-user-scripts');
|
module.exports = require('../dist/index');
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
module.exports = require('../dist/fs/stream-to-buffer').default;
|
module.exports = require('../dist/index').streamToBuffer;
|
||||||
|
|||||||
@@ -1,39 +1,43 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/build-utils",
|
"name": "@now/build-utils",
|
||||||
"version": "0.5.0",
|
"version": "0.9.11",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"types": "./dist/index.d.js",
|
"types": "./dist/index.d.js",
|
||||||
|
"homepage": "https://zeit.co/docs/v2/deployments/builders/developer-guide",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/zeit/now-builders.git",
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
"directory": "packages/now-build-utils"
|
"directory": "packages/now-build-utils"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
|
||||||
"async-retry": "1.2.3",
|
|
||||||
"async-sema": "2.1.4",
|
|
||||||
"end-of-stream": "1.4.1",
|
|
||||||
"fs-extra": "7.0.0",
|
|
||||||
"glob": "7.1.3",
|
|
||||||
"into-stream": "5.0.0",
|
|
||||||
"memory-fs": "0.4.1",
|
|
||||||
"multistream": "2.1.1",
|
|
||||||
"node-fetch": "2.2.0",
|
|
||||||
"yazl": "2.4.3"
|
|
||||||
},
|
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "tsc",
|
"build": "./build.sh",
|
||||||
"test": "tsc && jest",
|
"test": "./build.sh && jest",
|
||||||
"prepublish": "tsc"
|
"prepublishOnly": "./build.sh"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/async-retry": "^1.2.1",
|
"@types/async-retry": "^1.2.1",
|
||||||
|
"@types/cross-spawn": "6.0.0",
|
||||||
"@types/end-of-stream": "^1.4.0",
|
"@types/end-of-stream": "^1.4.0",
|
||||||
"@types/fs-extra": "^5.0.5",
|
"@types/fs-extra": "^5.0.5",
|
||||||
"@types/glob": "^7.1.1",
|
"@types/glob": "^7.1.1",
|
||||||
|
"@types/multistream": "2.1.1",
|
||||||
"@types/node-fetch": "^2.1.6",
|
"@types/node-fetch": "^2.1.6",
|
||||||
|
"@types/semver": "6.0.0",
|
||||||
"@types/yazl": "^2.4.1",
|
"@types/yazl": "^2.4.1",
|
||||||
|
"async-retry": "1.2.3",
|
||||||
|
"async-sema": "2.1.4",
|
||||||
|
"cross-spawn": "6.0.5",
|
||||||
|
"end-of-stream": "1.4.1",
|
||||||
"execa": "^1.0.0",
|
"execa": "^1.0.0",
|
||||||
"typescript": "3.3.4000"
|
"fs-extra": "7.0.0",
|
||||||
|
"glob": "7.1.3",
|
||||||
|
"into-stream": "5.0.0",
|
||||||
|
"minimatch": "3.0.4",
|
||||||
|
"multistream": "2.1.1",
|
||||||
|
"node-fetch": "2.2.0",
|
||||||
|
"semver": "6.1.1",
|
||||||
|
"typescript": "3.5.2",
|
||||||
|
"yazl": "2.4.3"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
7
packages/now-build-utils/src/debug.ts
Normal file
7
packages/now-build-utils/src/debug.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export default function debug(message: string, ...additional: any[]) {
|
||||||
|
if (process.env.NOW_BUILDER_DEBUG) {
|
||||||
|
console.log(message, ...additional);
|
||||||
|
} else if (process.env.NOW_BUILDER_ANNOTATE) {
|
||||||
|
console.log(`[now-builder-debug] ${message}`, ...additional);
|
||||||
|
}
|
||||||
|
}
|
||||||
173
packages/now-build-utils/src/detect-builders.ts
Normal file
173
packages/now-build-utils/src/detect-builders.ts
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
import { PackageJson, Builder, Config } from './types';
|
||||||
|
import minimatch from 'minimatch';
|
||||||
|
|
||||||
|
interface ErrorResponse {
|
||||||
|
code: string;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Options {
|
||||||
|
tag?: 'canary' | 'latest';
|
||||||
|
}
|
||||||
|
|
||||||
|
const src: string = 'package.json';
|
||||||
|
const config: Config = { zeroConfig: true };
|
||||||
|
|
||||||
|
// Static builders are special cased in `@now/static-build`
|
||||||
|
const BUILDERS = new Map<string, Builder>([
|
||||||
|
['next', { src, use: '@now/next', config }],
|
||||||
|
]);
|
||||||
|
|
||||||
|
const API_BUILDERS: Builder[] = [
|
||||||
|
{ src: 'api/**/*.js', use: '@now/node', config },
|
||||||
|
{ src: 'api/**/*.ts', use: '@now/node', config },
|
||||||
|
{ src: 'api/**/*.go', use: '@now/go', config },
|
||||||
|
{ src: 'api/**/*.py', use: '@now/python', config },
|
||||||
|
{ src: 'api/**/*.rb', use: '@now/ruby', config },
|
||||||
|
];
|
||||||
|
|
||||||
|
const MISSING_BUILD_SCRIPT_ERROR: ErrorResponse = {
|
||||||
|
code: 'missing_build_script',
|
||||||
|
message:
|
||||||
|
'Your `package.json` file is missing a `build` property inside the `script` property.' +
|
||||||
|
'\nMore details: https://zeit.co/docs/v2/advanced/platform/frequently-asked-questions#missing-build-script',
|
||||||
|
};
|
||||||
|
|
||||||
|
function hasPublicDirectory(files: string[]) {
|
||||||
|
return files.some(name => name.startsWith('public/'));
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasBuildScript(pkg: PackageJson | undefined) {
|
||||||
|
const { scripts = {} } = pkg || {};
|
||||||
|
return Boolean(scripts && scripts['build']);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function detectBuilder(pkg: PackageJson): Promise<Builder> {
|
||||||
|
for (const [dependency, builder] of BUILDERS) {
|
||||||
|
const deps = Object.assign({}, pkg.dependencies, pkg.devDependencies);
|
||||||
|
|
||||||
|
// Return the builder when a dependency matches
|
||||||
|
if (deps[dependency]) {
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// By default we'll choose the `static-build` builder
|
||||||
|
return { src, use: '@now/static-build', config };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Files that match a specific pattern will get ignored
|
||||||
|
export function ignoreApiFilter(file: string) {
|
||||||
|
if (file.includes('/.')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.includes('/_')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.endsWith('.d.ts')) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the file does not match any builder we also
|
||||||
|
// don't want to create a route e.g. `package.json`
|
||||||
|
if (API_BUILDERS.every(({ src }) => !minimatch(file, src))) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// We need to sort the file paths by alphabet to make
|
||||||
|
// sure the routes stay in the same order e.g. for deduping
|
||||||
|
export function sortFiles(fileA: string, fileB: string) {
|
||||||
|
return fileA.localeCompare(fileB);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function detectApiBuilders(files: string[]): Promise<Builder[]> {
|
||||||
|
const builds = files
|
||||||
|
.sort(sortFiles)
|
||||||
|
.filter(ignoreApiFilter)
|
||||||
|
.map(file => {
|
||||||
|
const result = API_BUILDERS.find(
|
||||||
|
({ src }): boolean => minimatch(file, src)
|
||||||
|
);
|
||||||
|
|
||||||
|
return result ? { ...result, src: file } : null;
|
||||||
|
});
|
||||||
|
|
||||||
|
const finishedBuilds = builds.filter(Boolean);
|
||||||
|
return finishedBuilds as Builder[];
|
||||||
|
}
|
||||||
|
|
||||||
|
// When zero config is used we can call this function
|
||||||
|
// to determine what builders to use
|
||||||
|
export async function detectBuilders(
|
||||||
|
files: string[],
|
||||||
|
pkg?: PackageJson | undefined | null,
|
||||||
|
options?: Options
|
||||||
|
): Promise<{
|
||||||
|
builders: Builder[] | null;
|
||||||
|
errors: ErrorResponse[] | null;
|
||||||
|
}> {
|
||||||
|
const errors: ErrorResponse[] = [];
|
||||||
|
|
||||||
|
// Detect all builders for the `api` directory before anything else
|
||||||
|
let builders = await detectApiBuilders(files);
|
||||||
|
|
||||||
|
if (pkg && hasBuildScript(pkg)) {
|
||||||
|
builders.push(await detectBuilder(pkg));
|
||||||
|
} else {
|
||||||
|
if (pkg && builders.length === 0) {
|
||||||
|
// We only show this error when there are no api builders
|
||||||
|
// since the dependencies of the pkg could be used for those
|
||||||
|
errors.push(MISSING_BUILD_SCRIPT_ERROR);
|
||||||
|
return { errors, builders: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
// We allow a `public` directory
|
||||||
|
// when there are no build steps
|
||||||
|
if (hasPublicDirectory(files)) {
|
||||||
|
builders.push({
|
||||||
|
use: '@now/static',
|
||||||
|
src: 'public/**/*',
|
||||||
|
config,
|
||||||
|
});
|
||||||
|
} else if (builders.length > 0) {
|
||||||
|
// We can't use pattern matching, since `!(api)` and `!(api)/**/*`
|
||||||
|
// won't give the correct results
|
||||||
|
builders.push(
|
||||||
|
...files
|
||||||
|
.filter(name => !name.startsWith('api/'))
|
||||||
|
.filter(name => !(name === 'package.json'))
|
||||||
|
.map(name => ({
|
||||||
|
use: '@now/static',
|
||||||
|
src: name,
|
||||||
|
config,
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Change the tag for the builders
|
||||||
|
if (builders && builders.length) {
|
||||||
|
const tag = options && options.tag;
|
||||||
|
|
||||||
|
if (tag) {
|
||||||
|
builders = builders.map((builder: Builder) => {
|
||||||
|
// @now/static has no canary builder
|
||||||
|
if (builder.use !== '@now/static') {
|
||||||
|
builder.use = `${builder.use}@${tag}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
builders: builders.length ? builders : null,
|
||||||
|
errors: errors.length ? errors : null,
|
||||||
|
};
|
||||||
|
}
|
||||||
286
packages/now-build-utils/src/detect-routes.ts
Normal file
286
packages/now-build-utils/src/detect-routes.ts
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
import { Route, Builder } from './types';
|
||||||
|
import { parse as parsePath } from 'path';
|
||||||
|
import { ignoreApiFilter, sortFiles } from './detect-builders';
|
||||||
|
|
||||||
|
function escapeName(name: string) {
|
||||||
|
const special = '[]^$.|?*+()'.split('');
|
||||||
|
|
||||||
|
for (const char of special) {
|
||||||
|
name = name.replace(new RegExp(`\\${char}`, 'g'), `\\${char}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
function joinPath(...segments: string[]) {
|
||||||
|
const joinedPath = segments.join('/');
|
||||||
|
return joinedPath.replace(/\/{2,}/g, '/');
|
||||||
|
}
|
||||||
|
|
||||||
|
function concatArrayOfText(texts: string[]): string {
|
||||||
|
if (texts.length <= 2) {
|
||||||
|
return texts.join(' and ');
|
||||||
|
}
|
||||||
|
|
||||||
|
const last = texts.pop();
|
||||||
|
return `${texts.join(', ')}, and ${last}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Takes a filename or foldername, strips the extension
|
||||||
|
// gets the part between the "[]" brackets.
|
||||||
|
// It will return `null` if there are no brackets
|
||||||
|
// and therefore no segment.
|
||||||
|
function getSegmentName(segment: string): string | null {
|
||||||
|
const { name } = parsePath(segment);
|
||||||
|
|
||||||
|
if (name.startsWith('[') && name.endsWith(']')) {
|
||||||
|
return name.slice(1, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createRouteFromPath(filePath: string): Route {
|
||||||
|
const parts = filePath.split('/');
|
||||||
|
|
||||||
|
let counter: number = 1;
|
||||||
|
const query: string[] = [];
|
||||||
|
|
||||||
|
const srcParts = parts.map(
|
||||||
|
(segment, index): string => {
|
||||||
|
const name = getSegmentName(segment);
|
||||||
|
const isLast = index === parts.length - 1;
|
||||||
|
|
||||||
|
if (name !== null) {
|
||||||
|
// We can't use `URLSearchParams` because `$` would get escaped
|
||||||
|
query.push(`${name}=$${counter++}`);
|
||||||
|
return `([^\\/]+)`;
|
||||||
|
} else if (isLast) {
|
||||||
|
const { name: fileName, ext } = parsePath(segment);
|
||||||
|
const isIndex = fileName === 'index';
|
||||||
|
|
||||||
|
// Either filename with extension, filename without extension
|
||||||
|
// or nothing when the filename is `index`
|
||||||
|
return `(${escapeName(fileName)}|${escapeName(fileName)}${escapeName(
|
||||||
|
ext
|
||||||
|
)})${isIndex ? '?' : ''}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return segment;
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const src = `^/${srcParts.join('/')}$`;
|
||||||
|
const dest = `/${filePath}${query.length ? '?' : ''}${query.join('&')}`;
|
||||||
|
|
||||||
|
return { src, dest };
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the path partially matches and has the same
|
||||||
|
// name for the path segment at the same position
|
||||||
|
function partiallyMatches(pathA: string, pathB: string): boolean {
|
||||||
|
const partsA = pathA.split('/');
|
||||||
|
const partsB = pathB.split('/');
|
||||||
|
|
||||||
|
const long = partsA.length > partsB.length ? partsA : partsB;
|
||||||
|
const short = long === partsA ? partsB : partsA;
|
||||||
|
|
||||||
|
let index = 0;
|
||||||
|
|
||||||
|
for (const segmentShort of short) {
|
||||||
|
const segmentLong = long[index];
|
||||||
|
|
||||||
|
const nameLong = getSegmentName(segmentLong);
|
||||||
|
const nameShort = getSegmentName(segmentShort);
|
||||||
|
|
||||||
|
// If there are no segments or the paths differ we
|
||||||
|
// return as they are not matching
|
||||||
|
if (segmentShort !== segmentLong && (!nameLong || !nameShort)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nameLong !== nameShort) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
index += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Counts how often a path occurres when all placeholders
|
||||||
|
// got resolved, so we can check if they have conflicts
|
||||||
|
function pathOccurrences(filePath: string, files: string[]): string[] {
|
||||||
|
const getAbsolutePath = (unresolvedPath: string): string => {
|
||||||
|
const { dir, name } = parsePath(unresolvedPath);
|
||||||
|
const parts = joinPath(dir, name).split('/');
|
||||||
|
return parts.map(part => part.replace(/\[.*\]/, '1')).join('/');
|
||||||
|
};
|
||||||
|
|
||||||
|
const currentAbsolutePath = getAbsolutePath(filePath);
|
||||||
|
|
||||||
|
return files.reduce((prev: string[], file: string): string[] => {
|
||||||
|
const absolutePath = getAbsolutePath(file);
|
||||||
|
|
||||||
|
if (absolutePath === currentAbsolutePath) {
|
||||||
|
prev.push(file);
|
||||||
|
} else if (partiallyMatches(filePath, file)) {
|
||||||
|
prev.push(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
return prev;
|
||||||
|
}, []);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Checks if a placeholder with the same name is used
|
||||||
|
// multiple times inside the same path
|
||||||
|
function getConflictingSegment(filePath: string): string | null {
|
||||||
|
const segments = new Set<string>();
|
||||||
|
|
||||||
|
for (const segment of filePath.split('/')) {
|
||||||
|
const name = getSegmentName(segment);
|
||||||
|
|
||||||
|
if (name !== null && segments.has(name)) {
|
||||||
|
return name;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (name) {
|
||||||
|
segments.add(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortFilesBySegmentCount(fileA: string, fileB: string): number {
|
||||||
|
const lengthA = fileA.split('/').length;
|
||||||
|
const lengthB = fileB.split('/').length;
|
||||||
|
|
||||||
|
if (lengthA > lengthB) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (lengthA < lengthB) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Paths that have the same segment length but
|
||||||
|
// less placeholders are preferred
|
||||||
|
const countSegments = (prev: number, segment: string) =>
|
||||||
|
getSegmentName(segment) ? prev + 1 : 0;
|
||||||
|
const segmentLengthA = fileA.split('/').reduce(countSegments, 0);
|
||||||
|
const segmentLengthB = fileB.split('/').reduce(countSegments, 0);
|
||||||
|
|
||||||
|
if (segmentLengthA > segmentLengthB) {
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (segmentLengthA < segmentLengthB) {
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface RoutesResult {
|
||||||
|
defaultRoutes: Route[] | null;
|
||||||
|
error: { [key: string]: string } | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
|
||||||
|
if (!files || files.length === 0) {
|
||||||
|
return { defaultRoutes: null, error: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
// The deepest routes need to be
|
||||||
|
// the first ones to get handled
|
||||||
|
const sortedFiles = files
|
||||||
|
.filter(ignoreApiFilter)
|
||||||
|
.sort(sortFiles)
|
||||||
|
.sort(sortFilesBySegmentCount);
|
||||||
|
|
||||||
|
const defaultRoutes: Route[] = [];
|
||||||
|
|
||||||
|
for (const file of sortedFiles) {
|
||||||
|
// We only consider every file in the api directory
|
||||||
|
// as we will strip extensions as well as resolving "[segments]"
|
||||||
|
if (!file.startsWith('api/')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const conflictingSegment = getConflictingSegment(file);
|
||||||
|
|
||||||
|
if (conflictingSegment) {
|
||||||
|
return {
|
||||||
|
defaultRoutes: null,
|
||||||
|
error: {
|
||||||
|
code: 'conflicting_path_segment',
|
||||||
|
message:
|
||||||
|
`The segment "${conflictingSegment}" occurres more than ` +
|
||||||
|
`one time in your path "${file}". Please make sure that ` +
|
||||||
|
`every segment in a path is unique`,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const occurrences = pathOccurrences(file, sortedFiles).filter(
|
||||||
|
name => name !== file
|
||||||
|
);
|
||||||
|
|
||||||
|
if (occurrences.length > 0) {
|
||||||
|
const messagePaths = concatArrayOfText(
|
||||||
|
occurrences.map(name => `"${name}"`)
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
defaultRoutes: null,
|
||||||
|
error: {
|
||||||
|
code: 'conflicting_file_path',
|
||||||
|
message:
|
||||||
|
`Two or more files have conflicting paths or names. ` +
|
||||||
|
`Please make sure path segments and filenames, without their extension, are unique. ` +
|
||||||
|
`The path "${file}" has conflicts with ${messagePaths}`,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
defaultRoutes.push(createRouteFromPath(file));
|
||||||
|
}
|
||||||
|
|
||||||
|
// 404 Route to disable directory listing
|
||||||
|
if (defaultRoutes.length) {
|
||||||
|
defaultRoutes.push({
|
||||||
|
status: 404,
|
||||||
|
src: '/api(\\/.*)?$',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return { defaultRoutes, error: null };
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasPublicBuilder(builders: Builder[]): boolean {
|
||||||
|
return builders.some(
|
||||||
|
builder =>
|
||||||
|
builder.use === '@now/static' &&
|
||||||
|
builder.src === 'public/**/*' &&
|
||||||
|
builder.config &&
|
||||||
|
builder.config.zeroConfig === true
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function detectRoutes(
|
||||||
|
files: string[],
|
||||||
|
builders: Builder[]
|
||||||
|
): Promise<RoutesResult> {
|
||||||
|
const routesResult = await detectApiRoutes(files);
|
||||||
|
|
||||||
|
if (routesResult.defaultRoutes && hasPublicBuilder(builders)) {
|
||||||
|
routesResult.defaultRoutes.push({
|
||||||
|
src: '/(.*)',
|
||||||
|
dest: '/public/$1',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return routesResult;
|
||||||
|
}
|
||||||
@@ -31,7 +31,10 @@ class FileFsRef implements File {
|
|||||||
this.fsPath = fsPath;
|
this.fsPath = fsPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
static async fromFsPath({ mode, fsPath }: FileFsRefOptions): Promise<FileFsRef> {
|
static async fromFsPath({
|
||||||
|
mode,
|
||||||
|
fsPath,
|
||||||
|
}: FileFsRefOptions): Promise<FileFsRef> {
|
||||||
let m = mode;
|
let m = mode;
|
||||||
if (!m) {
|
if (!m) {
|
||||||
const stat = await fs.lstat(fsPath);
|
const stat = await fs.lstat(fsPath);
|
||||||
@@ -40,7 +43,11 @@ class FileFsRef implements File {
|
|||||||
return new FileFsRef({ mode: m, fsPath });
|
return new FileFsRef({ mode: m, fsPath });
|
||||||
}
|
}
|
||||||
|
|
||||||
static async fromStream({ mode = 0o100644, stream, fsPath }: FromStreamOptions): Promise<FileFsRef> {
|
static async fromStream({
|
||||||
|
mode = 0o100644,
|
||||||
|
stream,
|
||||||
|
fsPath,
|
||||||
|
}: FromStreamOptions): Promise<FileFsRef> {
|
||||||
assert(typeof mode === 'number');
|
assert(typeof mode === 'number');
|
||||||
assert(typeof stream.pipe === 'function'); // is-stream
|
assert(typeof stream.pipe === 'function'); // is-stream
|
||||||
assert(typeof fsPath === 'string');
|
assert(typeof fsPath === 'string');
|
||||||
@@ -48,7 +55,7 @@ class FileFsRef implements File {
|
|||||||
|
|
||||||
await new Promise<void>((resolve, reject) => {
|
await new Promise<void>((resolve, reject) => {
|
||||||
const dest = fs.createWriteStream(fsPath, {
|
const dest = fs.createWriteStream(fsPath, {
|
||||||
mode: mode & 0o777
|
mode: mode & 0o777,
|
||||||
});
|
});
|
||||||
stream.pipe(dest);
|
stream.pipe(dest);
|
||||||
stream.on('error', reject);
|
stream.on('error', reject);
|
||||||
@@ -72,15 +79,15 @@ class FileFsRef implements File {
|
|||||||
let flag = false;
|
let flag = false;
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
// eslint-disable-next-line consistent-return
|
||||||
return multiStream((cb) => {
|
return multiStream(cb => {
|
||||||
if (flag) return cb(null, null);
|
if (flag) return cb(null, null);
|
||||||
flag = true;
|
flag = true;
|
||||||
|
|
||||||
this.toStreamAsync()
|
this.toStreamAsync()
|
||||||
.then((stream) => {
|
.then(stream => {
|
||||||
cb(null, stream);
|
cb(null, stream);
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch(error => {
|
||||||
cb(error, null);
|
cb(error, null);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import { File } from './types';
|
|||||||
interface FileRefOptions {
|
interface FileRefOptions {
|
||||||
mode?: number;
|
mode?: number;
|
||||||
digest: string;
|
digest: string;
|
||||||
mutable?: boolean;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const semaToDownloadFromS3 = new Sema(5);
|
const semaToDownloadFromS3 = new Sema(5);
|
||||||
@@ -26,26 +25,29 @@ export default class FileRef implements File {
|
|||||||
public type: 'FileRef';
|
public type: 'FileRef';
|
||||||
public mode: number;
|
public mode: number;
|
||||||
public digest: string;
|
public digest: string;
|
||||||
public mutable: boolean;
|
|
||||||
|
|
||||||
constructor({ mode = 0o100644, digest, mutable = false }: FileRefOptions) {
|
constructor({ mode = 0o100644, digest }: FileRefOptions) {
|
||||||
assert(typeof mode === 'number');
|
assert(typeof mode === 'number');
|
||||||
assert(typeof digest === 'string');
|
assert(typeof digest === 'string');
|
||||||
assert(typeof mutable === 'boolean');
|
|
||||||
this.type = 'FileRef';
|
this.type = 'FileRef';
|
||||||
this.mode = mode;
|
this.mode = mode;
|
||||||
this.digest = digest;
|
this.digest = digest;
|
||||||
this.mutable = mutable;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||||
let url = '';
|
let url = '';
|
||||||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
||||||
const digestParts = this.digest.split(':');
|
const [digestType, digestHash] = this.digest.split(':');
|
||||||
if (digestParts[0] === 'sha') {
|
if (digestType === 'sha') {
|
||||||
url = this.mutable
|
// This CloudFront URL edge caches the `now-files` S3 bucket to prevent
|
||||||
? `https://s3.amazonaws.com/now-files/${digestParts[1]}`
|
// overloading it
|
||||||
: `https://dmmcy0pwk6bqi.cloudfront.net/${digestParts[1]}`;
|
// `https://now-files.s3.amazonaws.com/${digestHash}`
|
||||||
|
url = `https://dmmcy0pwk6bqi.cloudfront.net/${digestHash}`;
|
||||||
|
} else if (digestType === 'sha+ephemeral') {
|
||||||
|
// This URL is currently only used for cache files that constantly
|
||||||
|
// change. We shouldn't cache it on CloudFront because it'd always be a
|
||||||
|
// MISS.
|
||||||
|
url = `https://now-ephemeral-files.s3.amazonaws.com/${digestHash}`;
|
||||||
} else {
|
} else {
|
||||||
throw new Error('Expected digest to be sha');
|
throw new Error('Expected digest to be sha');
|
||||||
}
|
}
|
||||||
@@ -58,14 +60,14 @@ export default class FileRef implements File {
|
|||||||
const resp = await fetch(url);
|
const resp = await fetch(url);
|
||||||
if (!resp.ok) {
|
if (!resp.ok) {
|
||||||
const error = new BailableError(
|
const error = new BailableError(
|
||||||
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
`download: ${resp.status} ${resp.statusText} for ${url}`
|
||||||
);
|
);
|
||||||
if (resp.status === 403) error.bail = true;
|
if (resp.status === 403) error.bail = true;
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
return resp.body;
|
return resp.body;
|
||||||
},
|
},
|
||||||
{ factor: 1, retries: 3 },
|
{ factor: 1, retries: 3 }
|
||||||
);
|
);
|
||||||
} finally {
|
} finally {
|
||||||
// console.timeEnd(`downloading ${url}`);
|
// console.timeEnd(`downloading ${url}`);
|
||||||
@@ -77,15 +79,15 @@ export default class FileRef implements File {
|
|||||||
let flag = false;
|
let flag = false;
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
// eslint-disable-next-line consistent-return
|
||||||
return multiStream((cb) => {
|
return multiStream(cb => {
|
||||||
if (flag) return cb(null, null);
|
if (flag) return cb(null, null);
|
||||||
flag = true;
|
flag = true;
|
||||||
|
|
||||||
this.toStreamAsync()
|
this.toStreamAsync()
|
||||||
.then((stream) => {
|
.then(stream => {
|
||||||
cb(null, stream);
|
cb(null, stream);
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch(error => {
|
||||||
cb(error, null);
|
cb(error, null);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ import { File, Files, Meta } from '../types';
|
|||||||
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
||||||
|
|
||||||
export interface DownloadedFiles {
|
export interface DownloadedFiles {
|
||||||
[filePath: string]: FileFsRef
|
[filePath: string]: FileFsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
const S_IFMT = 61440; /* 0170000 type of file */
|
const S_IFMT = 61440; /* 0170000 type of file */
|
||||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||||
|
|
||||||
export function isSymbolicLink(mode: number): boolean {
|
export function isSymbolicLink(mode: number): boolean {
|
||||||
return (mode & S_IFMT) === S_IFLNK;
|
return (mode & S_IFMT) === S_IFLNK;
|
||||||
@@ -17,9 +17,9 @@ export function isSymbolicLink(mode: number): boolean {
|
|||||||
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||||
const { mode } = file;
|
const { mode } = file;
|
||||||
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
||||||
const [ target ] = await Promise.all([
|
const [target] = await Promise.all([
|
||||||
readlink((file as FileFsRef).fsPath),
|
readlink((file as FileFsRef).fsPath),
|
||||||
mkdirp(path.dirname(fsPath))
|
mkdirp(path.dirname(fsPath)),
|
||||||
]);
|
]);
|
||||||
await symlink(target, fsPath);
|
await symlink(target, fsPath);
|
||||||
return FileFsRef.fromFsPath({ mode, fsPath });
|
return FileFsRef.fromFsPath({ mode, fsPath });
|
||||||
@@ -34,12 +34,25 @@ async function removeFile(basePath: string, fileMatched: string) {
|
|||||||
await remove(file);
|
await remove(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function download(files: Files, basePath: string, meta?: Meta): Promise<DownloadedFiles> {
|
export default async function download(
|
||||||
|
files: Files,
|
||||||
|
basePath: string,
|
||||||
|
meta?: Meta
|
||||||
|
): Promise<DownloadedFiles> {
|
||||||
|
const { isDev = false, skipDownload = false, filesChanged = null, filesRemoved = null } =
|
||||||
|
meta || {};
|
||||||
|
|
||||||
|
if (isDev || skipDownload) {
|
||||||
|
// In `now dev`, the `download()` function is a no-op because
|
||||||
|
// the `basePath` matches the `cwd` of the dev server, so the
|
||||||
|
// source files are already available.
|
||||||
|
return files as DownloadedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
const files2: DownloadedFiles = {};
|
const files2: DownloadedFiles = {};
|
||||||
const { filesChanged = null, filesRemoved = null } = meta || {};
|
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
Object.keys(files).map(async (name) => {
|
Object.keys(files).map(async name => {
|
||||||
// If the file does not exist anymore, remove it.
|
// If the file does not exist anymore, remove it.
|
||||||
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
||||||
await removeFile(basePath, name);
|
await removeFile(basePath, name);
|
||||||
@@ -55,7 +68,7 @@ export default async function download(files: Files, basePath: string, meta?: Me
|
|||||||
const fsPath = path.join(basePath, name);
|
const fsPath = path.join(basePath, name);
|
||||||
|
|
||||||
files2[name] = await downloadFile(file, fsPath);
|
files2[name] = await downloadFile(file, fsPath);
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
return files2;
|
return files2;
|
||||||
|
|||||||
@@ -8,12 +8,16 @@ import FileFsRef from '../file-fs-ref';
|
|||||||
type GlobOptions = vanillaGlob_.IOptions;
|
type GlobOptions = vanillaGlob_.IOptions;
|
||||||
|
|
||||||
interface FsFiles {
|
interface FsFiles {
|
||||||
[filePath: string]: FileFsRef
|
[filePath: string]: FileFsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
const vanillaGlob = promisify(vanillaGlob_);
|
const vanillaGlob = promisify(vanillaGlob_);
|
||||||
|
|
||||||
export default async function glob(pattern: string, opts: GlobOptions | string, mountpoint?: string): Promise<FsFiles> {
|
export default async function glob(
|
||||||
|
pattern: string,
|
||||||
|
opts: GlobOptions | string,
|
||||||
|
mountpoint?: string
|
||||||
|
): Promise<FsFiles> {
|
||||||
let options: GlobOptions;
|
let options: GlobOptions;
|
||||||
if (typeof opts === 'string') {
|
if (typeof opts === 'string') {
|
||||||
options = { cwd: opts };
|
options = { cwd: opts };
|
||||||
@@ -23,7 +27,7 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
|||||||
|
|
||||||
if (!options.cwd) {
|
if (!options.cwd) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Second argument (basePath) must be specified for names of resulting files',
|
'Second argument (basePath) must be specified for names of resulting files'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -41,11 +45,11 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
|||||||
const files = await vanillaGlob(pattern, options);
|
const files = await vanillaGlob(pattern, options);
|
||||||
|
|
||||||
for (const relativePath of files) {
|
for (const relativePath of files) {
|
||||||
const fsPath = path.join(options.cwd!, relativePath);
|
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||||
assert(
|
assert(
|
||||||
stat,
|
stat,
|
||||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
|
||||||
);
|
);
|
||||||
if (stat.isFile()) {
|
if (stat.isFile()) {
|
||||||
const isSymlink = options.symlinks![fsPath];
|
const isSymlink = options.symlinks![fsPath];
|
||||||
|
|||||||
53
packages/now-build-utils/src/fs/node-version.ts
Normal file
53
packages/now-build-utils/src/fs/node-version.ts
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
import { intersects } from 'semver';
|
||||||
|
import { NodeVersion } from '../types';
|
||||||
|
|
||||||
|
const supportedOptions: NodeVersion[] = [
|
||||||
|
{ major: 10, range: '10.x', runtime: 'nodejs10.x' },
|
||||||
|
{ major: 8, range: '8.10.x', runtime: 'nodejs8.10' },
|
||||||
|
];
|
||||||
|
|
||||||
|
// This version should match Fargate's default in the PATH
|
||||||
|
// Today that is Node 8
|
||||||
|
export const defaultSelection = supportedOptions.find(
|
||||||
|
o => o.major === 8
|
||||||
|
) as NodeVersion;
|
||||||
|
|
||||||
|
export async function getSupportedNodeVersion(
|
||||||
|
engineRange?: string,
|
||||||
|
silent?: boolean
|
||||||
|
): Promise<NodeVersion> {
|
||||||
|
let selection = defaultSelection;
|
||||||
|
|
||||||
|
if (!engineRange) {
|
||||||
|
if (!silent) {
|
||||||
|
console.log(
|
||||||
|
'missing `engines` in `package.json`, using default range: ' +
|
||||||
|
selection.range
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const found = supportedOptions.some(o => {
|
||||||
|
// the array is already in order so return the first
|
||||||
|
// match which will be the newest version of node
|
||||||
|
selection = o;
|
||||||
|
return intersects(o.range, engineRange);
|
||||||
|
});
|
||||||
|
if (found) {
|
||||||
|
if (!silent) {
|
||||||
|
console.log(
|
||||||
|
'Found `engines` in `package.json`, selecting range: ' +
|
||||||
|
selection.range
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!silent) {
|
||||||
|
throw new Error(
|
||||||
|
'found `engines` in `package.json` with an unsupported node range: ' +
|
||||||
|
engineRange +
|
||||||
|
'\nplease use `10.x` or `8.10.x` instead'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return selection;
|
||||||
|
}
|
||||||
@@ -7,6 +7,6 @@ export default function rename(files: Files, delegate: Delegate): Files {
|
|||||||
...newFiles,
|
...newFiles,
|
||||||
[delegate(name)]: files[name],
|
[delegate(name)]: files[name],
|
||||||
}),
|
}),
|
||||||
{},
|
{}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,31 @@
|
|||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import fs from 'fs-extra';
|
import fs from 'fs-extra';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { spawn, SpawnOptions } from 'child_process';
|
import spawn from 'cross-spawn';
|
||||||
|
import { SpawnOptions } from 'child_process';
|
||||||
|
import { deprecate } from 'util';
|
||||||
|
import { Meta, PackageJson, NodeVersion, Config } from '../types';
|
||||||
|
import { getSupportedNodeVersion } from './node-version';
|
||||||
|
|
||||||
function spawnAsync(command: string, args: string[], cwd: string, opts: SpawnOptions = {}) {
|
function spawnAsync(
|
||||||
|
command: string,
|
||||||
|
args: string[],
|
||||||
|
cwd: string,
|
||||||
|
opts: SpawnOptions = {}
|
||||||
|
) {
|
||||||
return new Promise<void>((resolve, reject) => {
|
return new Promise<void>((resolve, reject) => {
|
||||||
const stderrLogs: Buffer[] = []
|
const stderrLogs: Buffer[] = [];
|
||||||
opts = { stdio: 'inherit', cwd, ...opts };
|
opts = { stdio: 'inherit', cwd, ...opts };
|
||||||
const child = spawn(command, args, opts);
|
const child = spawn(command, args, opts);
|
||||||
|
|
||||||
if (opts.stdio === 'pipe'){
|
if (opts.stdio === 'pipe' && child.stderr) {
|
||||||
child.stderr.on('data', data => stderrLogs.push(data));
|
child.stderr.on('data', data => stderrLogs.push(data));
|
||||||
}
|
}
|
||||||
|
|
||||||
child.on('error', reject);
|
child.on('error', reject);
|
||||||
child.on('close', (code, signal) => {
|
child.on('close', (code, signal) => {
|
||||||
if (code === 0) {
|
if (code === 0) {
|
||||||
return resolve()
|
return resolve();
|
||||||
}
|
}
|
||||||
|
|
||||||
const errorLogs = stderrLogs.map(line => line.toString()).join('');
|
const errorLogs = stderrLogs.map(line => line.toString()).join('');
|
||||||
@@ -37,19 +46,59 @@ async function chmodPlusX(fsPath: string) {
|
|||||||
await fs.chmod(fsPath, base8);
|
await fs.chmod(fsPath, base8);
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function runShellScript(fsPath: string) {
|
export async function runShellScript(
|
||||||
|
fsPath: string,
|
||||||
|
args: string[] = [],
|
||||||
|
spawnOpts?: SpawnOptions
|
||||||
|
) {
|
||||||
assert(path.isAbsolute(fsPath));
|
assert(path.isAbsolute(fsPath));
|
||||||
const destPath = path.dirname(fsPath);
|
const destPath = path.dirname(fsPath);
|
||||||
await chmodPlusX(fsPath);
|
await chmodPlusX(fsPath);
|
||||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
await spawnAsync(`./${path.basename(fsPath)}`, args, destPath, spawnOpts);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function scanParentDirs(destPath: string, scriptName?: string) {
|
export function getSpawnOptions(
|
||||||
|
meta: Meta,
|
||||||
|
nodeVersion: NodeVersion
|
||||||
|
): SpawnOptions {
|
||||||
|
const opts = {
|
||||||
|
env: { ...process.env },
|
||||||
|
};
|
||||||
|
|
||||||
|
if (!meta.isDev) {
|
||||||
|
opts.env.PATH = `/node${nodeVersion.major}/bin:${opts.env.PATH}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return opts;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getNodeVersion(
|
||||||
|
destPath: string,
|
||||||
|
minNodeVersion?: string,
|
||||||
|
config?: Config
|
||||||
|
): Promise<NodeVersion> {
|
||||||
|
const { packageJson } = await scanParentDirs(destPath, true);
|
||||||
|
let range: string | undefined;
|
||||||
|
let silent = false;
|
||||||
|
if (packageJson && packageJson.engines && packageJson.engines.node) {
|
||||||
|
range = packageJson.engines.node;
|
||||||
|
} else if (minNodeVersion) {
|
||||||
|
range = minNodeVersion;
|
||||||
|
silent = true;
|
||||||
|
} else if (config && config.zeroConfig) {
|
||||||
|
// Use latest node version zero config detected
|
||||||
|
range = '10.x';
|
||||||
|
silent = true;
|
||||||
|
}
|
||||||
|
return getSupportedNodeVersion(range, silent);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function scanParentDirs(destPath: string, readPackageJson = false) {
|
||||||
assert(path.isAbsolute(destPath));
|
assert(path.isAbsolute(destPath));
|
||||||
|
|
||||||
let hasScript = false;
|
|
||||||
let hasPackageLockJson = false;
|
let hasPackageLockJson = false;
|
||||||
|
let packageJson: PackageJson | undefined;
|
||||||
let currentDestPath = destPath;
|
let currentDestPath = destPath;
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
@@ -58,13 +107,12 @@ async function scanParentDirs(destPath: string, scriptName?: string) {
|
|||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
if (await fs.pathExists(packageJsonPath)) {
|
if (await fs.pathExists(packageJsonPath)) {
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
if (readPackageJson) {
|
||||||
hasScript = Boolean(
|
packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||||
packageJson.scripts && scriptName && packageJson.scripts[scriptName],
|
}
|
||||||
);
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
hasPackageLockJson = await fs.pathExists(
|
hasPackageLockJson = await fs.pathExists(
|
||||||
path.join(currentDestPath, 'package-lock.json'),
|
path.join(currentDestPath, 'package-lock.json')
|
||||||
);
|
);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@@ -74,40 +122,36 @@ async function scanParentDirs(destPath: string, scriptName?: string) {
|
|||||||
currentDestPath = newDestPath;
|
currentDestPath = newDestPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
return { hasScript, hasPackageLockJson };
|
return { hasPackageLockJson, packageJson };
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function installDependencies(destPath: string, args: string[] = []) {
|
export async function runNpmInstall(
|
||||||
|
destPath: string,
|
||||||
|
args: string[] = [],
|
||||||
|
spawnOpts?: SpawnOptions
|
||||||
|
) {
|
||||||
assert(path.isAbsolute(destPath));
|
assert(path.isAbsolute(destPath));
|
||||||
|
|
||||||
let commandArgs = args;
|
let commandArgs = args;
|
||||||
console.log(`installing to ${destPath}`);
|
console.log(`installing to ${destPath}`);
|
||||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||||
|
|
||||||
const opts = {
|
const opts = spawnOpts || { env: process.env };
|
||||||
env: {
|
|
||||||
...process.env,
|
|
||||||
// This is a little hack to force `node-gyp` to build for the
|
|
||||||
// Node.js version that `@now/node` and `@now/node-server` use
|
|
||||||
npm_config_target: '8.10.0',
|
|
||||||
},
|
|
||||||
stdio: 'pipe'
|
|
||||||
};
|
|
||||||
|
|
||||||
if (hasPackageLockJson) {
|
if (hasPackageLockJson) {
|
||||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||||
await spawnAsync(
|
await spawnAsync(
|
||||||
'npm',
|
'npm',
|
||||||
['install'].concat(commandArgs),
|
commandArgs.concat(['install', '--unsafe-perm']),
|
||||||
destPath,
|
destPath,
|
||||||
opts as SpawnOptions
|
opts
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
await spawnAsync(
|
await spawnAsync(
|
||||||
'yarn',
|
'yarn',
|
||||||
['--cwd', destPath].concat(commandArgs),
|
commandArgs.concat(['--ignore-engines', '--cwd', destPath]),
|
||||||
destPath,
|
destPath,
|
||||||
opts as SpawnOptions,
|
opts
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -118,9 +162,15 @@ export async function runPackageJsonScript(
|
|||||||
opts?: SpawnOptions
|
opts?: SpawnOptions
|
||||||
) {
|
) {
|
||||||
assert(path.isAbsolute(destPath));
|
assert(path.isAbsolute(destPath));
|
||||||
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
const { packageJson, hasPackageLockJson } = await scanParentDirs(
|
||||||
destPath,
|
destPath,
|
||||||
scriptName,
|
true
|
||||||
|
);
|
||||||
|
const hasScript = Boolean(
|
||||||
|
packageJson &&
|
||||||
|
packageJson.scripts &&
|
||||||
|
scriptName &&
|
||||||
|
packageJson.scripts[scriptName]
|
||||||
);
|
);
|
||||||
if (!hasScript) return false;
|
if (!hasScript) return false;
|
||||||
|
|
||||||
@@ -129,10 +179,22 @@ export async function runPackageJsonScript(
|
|||||||
await spawnAsync('npm', ['run', scriptName], destPath, opts);
|
await spawnAsync('npm', ['run', scriptName], destPath, opts);
|
||||||
} else {
|
} else {
|
||||||
console.log(`running "yarn run ${scriptName}"`);
|
console.log(`running "yarn run ${scriptName}"`);
|
||||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath, opts);
|
await spawnAsync(
|
||||||
|
'yarn',
|
||||||
|
['--cwd', destPath, 'run', scriptName],
|
||||||
|
destPath,
|
||||||
|
opts
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const runNpmInstall = installDependencies;
|
/**
|
||||||
|
* @deprecate installDependencies() is deprecated.
|
||||||
|
* Please use runNpmInstall() instead.
|
||||||
|
*/
|
||||||
|
export const installDependencies = deprecate(
|
||||||
|
runNpmInstall,
|
||||||
|
'installDependencies() is deprecated. Please use runNpmInstall() instead.'
|
||||||
|
);
|
||||||
|
|||||||
@@ -1,26 +1,28 @@
|
|||||||
import eos from 'end-of-stream';
|
import eos from 'end-of-stream';
|
||||||
|
|
||||||
export default function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
export default function streamToBuffer(
|
||||||
|
stream: NodeJS.ReadableStream
|
||||||
|
): Promise<Buffer> {
|
||||||
return new Promise<Buffer>((resolve, reject) => {
|
return new Promise<Buffer>((resolve, reject) => {
|
||||||
const buffers: Buffer[] = [];
|
const buffers: Buffer[] = [];
|
||||||
|
|
||||||
stream.on('data', buffers.push.bind(buffers))
|
stream.on('data', buffers.push.bind(buffers));
|
||||||
|
|
||||||
eos(stream, (err) => {
|
eos(stream, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(err);
|
reject(err);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
switch (buffers.length) {
|
switch (buffers.length) {
|
||||||
case 0:
|
case 0:
|
||||||
resolve(Buffer.allocUnsafe(0));
|
resolve(Buffer.allocUnsafe(0));
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
resolve(buffers[0]);
|
resolve(buffers[0]);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
resolve(Buffer.concat(buffers));
|
resolve(Buffer.concat(buffers));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,34 +1,47 @@
|
|||||||
import FileBlob from './file-blob';
|
import FileBlob from './file-blob';
|
||||||
import FileFsRef from './file-fs-ref';
|
import FileFsRef from './file-fs-ref';
|
||||||
import FileRef from './file-ref';
|
import FileRef from './file-ref';
|
||||||
import { File, Files, AnalyzeOptions, BuildOptions, PrepareCacheOptions, ShouldServeOptions, Meta } from './types';
|
|
||||||
import { Lambda, createLambda } from './lambda';
|
import { Lambda, createLambda } from './lambda';
|
||||||
import download from './fs/download';
|
import download, { DownloadedFiles } from './fs/download';
|
||||||
import getWriteableDirectory from './fs/get-writable-directory'
|
import getWriteableDirectory from './fs/get-writable-directory';
|
||||||
import glob from './fs/glob';
|
import glob from './fs/glob';
|
||||||
import rename from './fs/rename';
|
import rename from './fs/rename';
|
||||||
import { installDependencies, runPackageJsonScript, runNpmInstall, runShellScript } from './fs/run-user-scripts';
|
import {
|
||||||
|
installDependencies,
|
||||||
|
runPackageJsonScript,
|
||||||
|
runNpmInstall,
|
||||||
|
runShellScript,
|
||||||
|
getNodeVersion,
|
||||||
|
getSpawnOptions,
|
||||||
|
} from './fs/run-user-scripts';
|
||||||
import streamToBuffer from './fs/stream-to-buffer';
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
import shouldServe from './should-serve';
|
import shouldServe from './should-serve';
|
||||||
|
import { detectBuilders } from './detect-builders';
|
||||||
|
import { detectRoutes } from './detect-routes';
|
||||||
|
import debug from './debug';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
FileBlob,
|
FileBlob,
|
||||||
FileFsRef,
|
FileFsRef,
|
||||||
FileRef,
|
FileRef,
|
||||||
Files,
|
Lambda,
|
||||||
File,
|
createLambda,
|
||||||
Meta,
|
download,
|
||||||
Lambda,
|
DownloadedFiles,
|
||||||
createLambda,
|
getWriteableDirectory,
|
||||||
download,
|
glob,
|
||||||
getWriteableDirectory,
|
rename,
|
||||||
glob,
|
installDependencies,
|
||||||
rename,
|
runPackageJsonScript,
|
||||||
installDependencies, runPackageJsonScript, runNpmInstall, runShellScript,
|
runNpmInstall,
|
||||||
streamToBuffer,
|
runShellScript,
|
||||||
AnalyzeOptions,
|
getNodeVersion,
|
||||||
BuildOptions,
|
getSpawnOptions,
|
||||||
PrepareCacheOptions,
|
streamToBuffer,
|
||||||
ShouldServeOptions,
|
shouldServe,
|
||||||
shouldServe,
|
detectBuilders,
|
||||||
|
detectRoutes,
|
||||||
|
debug,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export * from './types';
|
||||||
|
|||||||
@@ -32,9 +32,7 @@ export class Lambda {
|
|||||||
public runtime: string;
|
public runtime: string;
|
||||||
public environment: Environment;
|
public environment: Environment;
|
||||||
|
|
||||||
constructor({
|
constructor({ zipBuffer, handler, runtime, environment }: LambdaOptions) {
|
||||||
zipBuffer, handler, runtime, environment,
|
|
||||||
}: LambdaOptions) {
|
|
||||||
this.type = 'Lambda';
|
this.type = 'Lambda';
|
||||||
this.zipBuffer = zipBuffer;
|
this.zipBuffer = zipBuffer;
|
||||||
this.handler = handler;
|
this.handler = handler;
|
||||||
@@ -47,7 +45,10 @@ const sema = new Sema(10);
|
|||||||
const mtime = new Date(1540000000000);
|
const mtime = new Date(1540000000000);
|
||||||
|
|
||||||
export async function createLambda({
|
export async function createLambda({
|
||||||
files, handler, runtime, environment = {},
|
files,
|
||||||
|
handler,
|
||||||
|
runtime,
|
||||||
|
environment = {},
|
||||||
}: CreateLambdaOptions): Promise<Lambda> {
|
}: CreateLambdaOptions): Promise<Lambda> {
|
||||||
assert(typeof files === 'object', '"files" must be an object');
|
assert(typeof files === 'object', '"files" must be an object');
|
||||||
assert(typeof handler === 'string', '"handler" is not a string');
|
assert(typeof handler === 'string', '"handler" is not a string');
|
||||||
@@ -97,7 +98,9 @@ export async function createZip(files: Files): Promise<Buffer> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
zipFile.end();
|
zipFile.end();
|
||||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
streamToBuffer(zipFile.outputStream)
|
||||||
|
.then(resolve)
|
||||||
|
.catch(reject);
|
||||||
});
|
});
|
||||||
|
|
||||||
return zipBuffer;
|
return zipBuffer;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import FileFsRef from './file-fs-ref';
|
|||||||
export default function shouldServe({
|
export default function shouldServe({
|
||||||
entrypoint,
|
entrypoint,
|
||||||
files,
|
files,
|
||||||
requestPath
|
requestPath,
|
||||||
}: ShouldServeOptions): boolean {
|
}: ShouldServeOptions): boolean {
|
||||||
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
||||||
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
||||||
@@ -23,5 +23,5 @@ export default function shouldServe({
|
|||||||
}
|
}
|
||||||
|
|
||||||
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
||||||
return Object.hasOwnProperty.call(obj, key)
|
return Object.hasOwnProperty.call(obj, key);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,9 @@ export interface File {
|
|||||||
type: string;
|
type: string;
|
||||||
mode: number;
|
mode: number;
|
||||||
toStream: () => NodeJS.ReadableStream;
|
toStream: () => NodeJS.ReadableStream;
|
||||||
|
/**
|
||||||
|
* The absolute path to the file in the filesystem
|
||||||
|
*/
|
||||||
fsPath?: string;
|
fsPath?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -12,12 +15,40 @@ export interface Files {
|
|||||||
[filePath: string]: File;
|
[filePath: string]: File;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Route {
|
||||||
|
src?: string;
|
||||||
|
dest?: string;
|
||||||
|
handle?: string;
|
||||||
|
type?: string;
|
||||||
|
headers?: {
|
||||||
|
[key: string]: string;
|
||||||
|
};
|
||||||
|
continue?: boolean;
|
||||||
|
status?: number;
|
||||||
|
}
|
||||||
|
|
||||||
export interface Config {
|
export interface Config {
|
||||||
[key: string]: string;
|
[key: string]:
|
||||||
|
| string
|
||||||
|
| string[]
|
||||||
|
| boolean
|
||||||
|
| number
|
||||||
|
| { [key: string]: string }
|
||||||
|
| undefined;
|
||||||
|
maxLambdaSize?: string;
|
||||||
|
includeFiles?: string | string[];
|
||||||
|
bundle?: boolean;
|
||||||
|
ldsflags?: string;
|
||||||
|
helpers?: boolean;
|
||||||
|
rust?: string;
|
||||||
|
debug?: boolean;
|
||||||
|
zeroConfig?: boolean;
|
||||||
|
import?: { [key: string]: string };
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Meta {
|
export interface Meta {
|
||||||
isDev?: boolean;
|
isDev?: boolean;
|
||||||
|
skipDownload?: boolean;
|
||||||
requestPath?: string;
|
requestPath?: string;
|
||||||
filesChanged?: string[];
|
filesChanged?: string[];
|
||||||
filesRemoved?: string[];
|
filesRemoved?: string[];
|
||||||
@@ -152,3 +183,34 @@ export interface ShouldServeOptions {
|
|||||||
*/
|
*/
|
||||||
config: Config;
|
config: Config;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface PackageJson {
|
||||||
|
name: string;
|
||||||
|
version: string;
|
||||||
|
engines?: {
|
||||||
|
[key: string]: string;
|
||||||
|
node: string;
|
||||||
|
npm: string;
|
||||||
|
};
|
||||||
|
scripts?: {
|
||||||
|
[key: string]: string;
|
||||||
|
};
|
||||||
|
dependencies?: {
|
||||||
|
[key: string]: string;
|
||||||
|
};
|
||||||
|
devDependencies?: {
|
||||||
|
[key: string]: string;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface NodeVersion {
|
||||||
|
major: number;
|
||||||
|
range: string;
|
||||||
|
runtime: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Builder {
|
||||||
|
use: string;
|
||||||
|
src: string;
|
||||||
|
config?: Config;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "api/index.js", "use": "@now/node" }],
|
||||||
{ "src": "api/index.js", "use": "@now/node" }
|
|
||||||
],
|
|
||||||
"probes": [
|
"probes": [
|
||||||
{ "path": "/api/index.js", "mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER" }
|
{
|
||||||
|
"path": "/api/index.js",
|
||||||
|
"mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
9
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/index.js
vendored
Normal file
9
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/index.js
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
const scheduler = require('@google-cloud/scheduler');
|
||||||
|
|
||||||
|
module.exports = (_, res) => {
|
||||||
|
if (scheduler) {
|
||||||
|
res.end('found:RANDOMNESS_PLACEHOLDER');
|
||||||
|
} else {
|
||||||
|
res.end('nope:RANDOMNESS_PLACEHOLDER');
|
||||||
|
}
|
||||||
|
};
|
||||||
10
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/now.json
vendored
Normal file
10
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/now.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{
|
||||||
|
"src": "index.js",
|
||||||
|
"use": "@now/node"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"probes": [{ "path": "/", "mustContain": "found:RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
8
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/package.json
vendored
Normal file
8
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/package.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"name": "15-yarn-ignore-engines",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"dependencies": {
|
||||||
|
"@google-cloud/scheduler": "0.3.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,8 +4,14 @@ const fs = require('fs-extra');
|
|||||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||||
const execa = require('execa');
|
const execa = require('execa');
|
||||||
const assert = require('assert');
|
const assert = require('assert');
|
||||||
const { glob, download } = require('../');
|
const {
|
||||||
|
glob, download, detectBuilders, detectRoutes,
|
||||||
|
} = require('../');
|
||||||
const { createZip } = require('../dist/lambda');
|
const { createZip } = require('../dist/lambda');
|
||||||
|
const {
|
||||||
|
getSupportedNodeVersion,
|
||||||
|
defaultSelection,
|
||||||
|
} = require('../dist/fs/node-version');
|
||||||
|
|
||||||
const {
|
const {
|
||||||
packAndDeploy,
|
packAndDeploy,
|
||||||
@@ -64,6 +70,82 @@ it('should create zip files with symlinks properly', async () => {
|
|||||||
assert(aStat.isFile());
|
assert(aStat.isFile());
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should only match supported node versions', () => {
|
||||||
|
expect(getSupportedNodeVersion('10.x')).resolves.toHaveProperty('major', 10);
|
||||||
|
expect(getSupportedNodeVersion('8.10.x')).resolves.toHaveProperty('major', 8);
|
||||||
|
expect(getSupportedNodeVersion('8.11.x')).rejects.toThrow();
|
||||||
|
expect(getSupportedNodeVersion('6.x')).rejects.toThrow();
|
||||||
|
expect(getSupportedNodeVersion('999.x')).rejects.toThrow();
|
||||||
|
expect(getSupportedNodeVersion('foo')).rejects.toThrow();
|
||||||
|
expect(getSupportedNodeVersion('')).resolves.toBe(defaultSelection);
|
||||||
|
expect(getSupportedNodeVersion(null)).resolves.toBe(defaultSelection);
|
||||||
|
expect(getSupportedNodeVersion(undefined)).resolves.toBe(defaultSelection);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should match all semver ranges', () => {
|
||||||
|
// See https://docs.npmjs.com/files/package.json#engines
|
||||||
|
expect(getSupportedNodeVersion('10.0.0')).resolves.toHaveProperty(
|
||||||
|
'major',
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
expect(getSupportedNodeVersion('10.x')).resolves.toHaveProperty('major', 10);
|
||||||
|
expect(getSupportedNodeVersion('>=10')).resolves.toHaveProperty('major', 10);
|
||||||
|
expect(getSupportedNodeVersion('>=10.3.0')).resolves.toHaveProperty(
|
||||||
|
'major',
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
expect(getSupportedNodeVersion('8.5.0 - 10.5.0')).resolves.toHaveProperty(
|
||||||
|
'major',
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
expect(getSupportedNodeVersion('>=9.0.0')).resolves.toHaveProperty(
|
||||||
|
'major',
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
expect(getSupportedNodeVersion('>=9.5.0 <=10.5.0')).resolves.toHaveProperty(
|
||||||
|
'major',
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
expect(getSupportedNodeVersion('~10.5.0')).resolves.toHaveProperty(
|
||||||
|
'major',
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
expect(getSupportedNodeVersion('^10.5.0')).resolves.toHaveProperty(
|
||||||
|
'major',
|
||||||
|
10,
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should support require by path for legacy builders', () => {
|
||||||
|
const index = require('@now/build-utils');
|
||||||
|
|
||||||
|
const download2 = require('@now/build-utils/fs/download.js');
|
||||||
|
const getWriteableDirectory2 = require('@now/build-utils/fs/get-writable-directory.js');
|
||||||
|
const glob2 = require('@now/build-utils/fs/glob.js');
|
||||||
|
const rename2 = require('@now/build-utils/fs/rename.js');
|
||||||
|
const {
|
||||||
|
runNpmInstall: runNpmInstall2,
|
||||||
|
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||||
|
const streamToBuffer2 = require('@now/build-utils/fs/stream-to-buffer.js');
|
||||||
|
|
||||||
|
const FileBlob2 = require('@now/build-utils/file-blob.js');
|
||||||
|
const FileFsRef2 = require('@now/build-utils/file-fs-ref.js');
|
||||||
|
const FileRef2 = require('@now/build-utils/file-ref.js');
|
||||||
|
const { Lambda: Lambda2 } = require('@now/build-utils/lambda.js');
|
||||||
|
|
||||||
|
expect(download2).toBe(index.download);
|
||||||
|
expect(getWriteableDirectory2).toBe(index.getWriteableDirectory);
|
||||||
|
expect(glob2).toBe(index.glob);
|
||||||
|
expect(rename2).toBe(index.rename);
|
||||||
|
expect(runNpmInstall2).toBe(index.runNpmInstall);
|
||||||
|
expect(streamToBuffer2).toBe(index.streamToBuffer);
|
||||||
|
|
||||||
|
expect(FileBlob2).toBe(index.FileBlob);
|
||||||
|
expect(FileFsRef2).toBe(index.FileFsRef);
|
||||||
|
expect(FileRef2).toBe(index.FileRef);
|
||||||
|
expect(Lambda2).toBe(index.Lambda);
|
||||||
|
});
|
||||||
|
|
||||||
// own fixtures
|
// own fixtures
|
||||||
|
|
||||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||||
@@ -83,7 +165,7 @@ for (const fixture of fs.readdirSync(fixturesPath)) {
|
|||||||
|
|
||||||
// few foreign tests
|
// few foreign tests
|
||||||
|
|
||||||
const buildersToTestWith = ['now-node-server', 'now-static-build'];
|
const buildersToTestWith = ['now-next', 'now-node', 'now-static-build'];
|
||||||
|
|
||||||
// eslint-disable-next-line no-restricted-syntax
|
// eslint-disable-next-line no-restricted-syntax
|
||||||
for (const builder of buildersToTestWith) {
|
for (const builder of buildersToTestWith) {
|
||||||
@@ -95,7 +177,7 @@ for (const builder of buildersToTestWith) {
|
|||||||
// eslint-disable-next-line no-restricted-syntax
|
// eslint-disable-next-line no-restricted-syntax
|
||||||
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
||||||
// don't run all foreign fixtures, just some
|
// don't run all foreign fixtures, just some
|
||||||
if (['01-cowsay', '03-env-vars'].includes(fixture)) {
|
if (['01-cowsay', '01-cache-headers', '03-env-vars'].includes(fixture)) {
|
||||||
// eslint-disable-next-line no-loop-func
|
// eslint-disable-next-line no-loop-func
|
||||||
it(`should build ${builder}/${fixture}`, async () => {
|
it(`should build ${builder}/${fixture}`, async () => {
|
||||||
await expect(
|
await expect(
|
||||||
@@ -108,3 +190,400 @@ for (const builder of buildersToTestWith) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
it('Test `detectBuilders`', async () => {
|
||||||
|
{
|
||||||
|
// package.json + no build
|
||||||
|
const pkg = { dependencies: { next: '9.0.0' } };
|
||||||
|
const files = ['package.json', 'pages/index.js', 'public/index.html'];
|
||||||
|
const { builders, errors } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders).toBe(null);
|
||||||
|
expect(errors.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// package.json + no build + next
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'next build' },
|
||||||
|
dependencies: { next: '9.0.0' },
|
||||||
|
};
|
||||||
|
const files = ['package.json', 'pages/index.js'];
|
||||||
|
const { builders, errors } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders[0].use).toBe('@now/next');
|
||||||
|
expect(errors).toBe(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// package.json + no build + next
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'next build' },
|
||||||
|
devDependencies: { next: '9.0.0' },
|
||||||
|
};
|
||||||
|
const files = ['package.json', 'pages/index.js'];
|
||||||
|
const { builders, errors } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders[0].use).toBe('@now/next');
|
||||||
|
expect(errors).toBe(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// package.json + no build
|
||||||
|
const pkg = {};
|
||||||
|
const files = ['package.json'];
|
||||||
|
const { builders, errors } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders).toBe(null);
|
||||||
|
expect(errors.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// static file
|
||||||
|
const files = ['index.html'];
|
||||||
|
const { builders, errors } = await detectBuilders(files);
|
||||||
|
expect(builders).toBe(null);
|
||||||
|
expect(errors).toBe(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// no package.json + public
|
||||||
|
const files = ['api/users.js', 'public/index.html'];
|
||||||
|
const { builders, errors } = await detectBuilders(files);
|
||||||
|
expect(builders[1].use).toBe('@now/static');
|
||||||
|
expect(errors).toBe(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// no package.json + no build + raw static + api
|
||||||
|
const files = ['api/users.js', 'index.html'];
|
||||||
|
const { builders, errors } = await detectBuilders(files);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/users.js');
|
||||||
|
expect(builders[1].use).toBe('@now/static');
|
||||||
|
expect(builders[1].src).toBe('index.html');
|
||||||
|
expect(builders.length).toBe(2);
|
||||||
|
expect(errors).toBe(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// package.json + no build + root + api
|
||||||
|
const files = ['index.html', 'api/[endpoint].js', 'static/image.png'];
|
||||||
|
const { builders, errors } = await detectBuilders(files);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/[endpoint].js');
|
||||||
|
expect(builders[1].use).toBe('@now/static');
|
||||||
|
expect(builders[1].src).toBe('index.html');
|
||||||
|
expect(builders[2].use).toBe('@now/static');
|
||||||
|
expect(builders[2].src).toBe('static/image.png');
|
||||||
|
expect(builders.length).toBe(3);
|
||||||
|
expect(errors).toBe(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// api + ignore files
|
||||||
|
const files = [
|
||||||
|
'api/_utils/handler.js',
|
||||||
|
'api/[endpoint]/.helper.js',
|
||||||
|
'api/[endpoint]/[id].js',
|
||||||
|
];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/[endpoint]/[id].js');
|
||||||
|
expect(builders.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// api + next + public
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'next build' },
|
||||||
|
devDependencies: { next: '9.0.0' },
|
||||||
|
};
|
||||||
|
const files = ['package.json', 'api/endpoint.js', 'public/index.html'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/endpoint.js');
|
||||||
|
expect(builders[1].use).toBe('@now/next');
|
||||||
|
expect(builders[1].src).toBe('package.json');
|
||||||
|
expect(builders.length).toBe(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// api + next + raw static
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'next build' },
|
||||||
|
devDependencies: { next: '9.0.0' },
|
||||||
|
};
|
||||||
|
const files = ['package.json', 'api/endpoint.js', 'index.html'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/endpoint.js');
|
||||||
|
expect(builders[1].use).toBe('@now/next');
|
||||||
|
expect(builders[1].src).toBe('package.json');
|
||||||
|
expect(builders.length).toBe(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// api + raw static
|
||||||
|
const files = ['api/endpoint.js', 'index.html', 'favicon.ico'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/endpoint.js');
|
||||||
|
expect(builders[1].use).toBe('@now/static');
|
||||||
|
expect(builders[1].src).toBe('favicon.ico');
|
||||||
|
expect(builders[2].use).toBe('@now/static');
|
||||||
|
expect(builders[2].src).toBe('index.html');
|
||||||
|
expect(builders.length).toBe(3);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// api + public
|
||||||
|
const files = [
|
||||||
|
'api/endpoint.js',
|
||||||
|
'public/index.html',
|
||||||
|
'public/favicon.ico',
|
||||||
|
'README.md',
|
||||||
|
];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/endpoint.js');
|
||||||
|
expect(builders[1].use).toBe('@now/static');
|
||||||
|
expect(builders[1].src).toBe('public/**/*');
|
||||||
|
expect(builders.length).toBe(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// just public
|
||||||
|
const files = ['public/index.html', 'public/favicon.ico', 'README.md'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
expect(builders[0].src).toBe('public/**/*');
|
||||||
|
expect(builders.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// next + public
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'next build' },
|
||||||
|
devDependencies: { next: '9.0.0' },
|
||||||
|
};
|
||||||
|
const files = ['package.json', 'public/index.html', 'README.md'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders[0].use).toBe('@now/next');
|
||||||
|
expect(builders[0].src).toBe('package.json');
|
||||||
|
expect(builders.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// nuxt
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'nuxt build' },
|
||||||
|
dependencies: { nuxt: '2.8.1' },
|
||||||
|
};
|
||||||
|
const files = ['package.json', 'pages/index.js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders[0].use).toBe('@now/static-build');
|
||||||
|
expect(builders[0].src).toBe('package.json');
|
||||||
|
expect(builders.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// package.json with no build + api
|
||||||
|
const pkg = { dependencies: { next: '9.0.0' } };
|
||||||
|
const files = ['package.json', 'api/[endpoint].js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[0].src).toBe('api/[endpoint].js');
|
||||||
|
expect(builders.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// package.json with no build + public directory
|
||||||
|
const pkg = { dependencies: { next: '9.0.0' } };
|
||||||
|
const files = ['package.json', 'public/index.html'];
|
||||||
|
|
||||||
|
const { builders, errors } = await detectBuilders(files, pkg);
|
||||||
|
expect(builders).toBe(null);
|
||||||
|
expect(errors.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// no package.json + api
|
||||||
|
const files = ['api/[endpoint].js', 'api/[endpoint]/[id].js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
expect(builders.length).toBe(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// no package.json + no api
|
||||||
|
const files = ['index.html'];
|
||||||
|
|
||||||
|
const { builders, errors } = await detectBuilders(files);
|
||||||
|
expect(builders).toBe(null);
|
||||||
|
expect(errors).toBe(null);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
// package.json + api + canary
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'next build' },
|
||||||
|
dependencies: { next: '9.0.0' },
|
||||||
|
};
|
||||||
|
const files = [
|
||||||
|
'pages/index.js',
|
||||||
|
'api/[endpoint].js',
|
||||||
|
'api/[endpoint]/[id].js',
|
||||||
|
];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files, pkg, { tag: 'canary' });
|
||||||
|
expect(builders[0].use).toBe('@now/node@canary');
|
||||||
|
expect(builders[1].use).toBe('@now/node@canary');
|
||||||
|
expect(builders[2].use).toBe('@now/next@canary');
|
||||||
|
expect(builders.length).toBe(3);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
it('Test `detectRoutes`', async () => {
|
||||||
|
{
|
||||||
|
const files = ['api/user.go', 'api/team.js', 'api/package.json'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
expect(defaultRoutes.length).toBe(3);
|
||||||
|
expect(defaultRoutes[0].dest).toBe('/api/team.js');
|
||||||
|
expect(defaultRoutes[1].dest).toBe('/api/user.go');
|
||||||
|
expect(defaultRoutes[2].dest).not.toBeDefined();
|
||||||
|
expect(defaultRoutes[2].status).toBe(404);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['api/user.go', 'api/user.js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { error } = await detectRoutes(files, builders);
|
||||||
|
expect(error.code).toBe('conflicting_file_path');
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['api/[user].go', 'api/[team]/[id].js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { error } = await detectRoutes(files, builders);
|
||||||
|
expect(error.code).toBe('conflicting_file_path');
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['api/[team]/[team].js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { error } = await detectRoutes(files, builders);
|
||||||
|
expect(error.code).toBe('conflicting_path_segment');
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['api/date/index.js', 'api/date/index.go'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes, error } = await detectRoutes(files, builders);
|
||||||
|
expect(defaultRoutes).toBe(null);
|
||||||
|
expect(error.code).toBe('conflicting_file_path');
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['api/[endpoint].js', 'api/[endpoint]/[id].js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
expect(defaultRoutes.length).toBe(3);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = [
|
||||||
|
'public/index.html',
|
||||||
|
'api/[endpoint].js',
|
||||||
|
'api/[endpoint]/[id].js',
|
||||||
|
];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
expect(defaultRoutes[2].status).toBe(404);
|
||||||
|
expect(defaultRoutes[2].src).toBe('/api(\\/.*)?$');
|
||||||
|
expect(defaultRoutes[3].src).toBe('/(.*)');
|
||||||
|
expect(defaultRoutes[3].dest).toBe('/public/$1');
|
||||||
|
expect(defaultRoutes.length).toBe(4);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const pkg = {
|
||||||
|
scripts: { build: 'next build' },
|
||||||
|
devDependencies: { next: '9.0.0' },
|
||||||
|
};
|
||||||
|
const files = ['public/index.html', 'api/[endpoint].js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files, pkg);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
expect(defaultRoutes[1].status).toBe(404);
|
||||||
|
expect(defaultRoutes[1].src).toBe('/api(\\/.*)?$');
|
||||||
|
expect(defaultRoutes.length).toBe(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['public/index.html'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
|
||||||
|
expect(defaultRoutes.length).toBe(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['api/date/index.js', 'api/date.js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
|
||||||
|
expect(defaultRoutes.length).toBe(3);
|
||||||
|
expect(defaultRoutes[0].src).toBe('^/api/date/(index|index\\.js)?$');
|
||||||
|
expect(defaultRoutes[0].dest).toBe('/api/date/index.js');
|
||||||
|
expect(defaultRoutes[1].src).toBe('^/api/(date|date\\.js)$');
|
||||||
|
expect(defaultRoutes[1].dest).toBe('/api/date.js');
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = ['api/date.js', 'api/[date]/index.js'];
|
||||||
|
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
|
||||||
|
expect(defaultRoutes.length).toBe(3);
|
||||||
|
expect(defaultRoutes[0].src).toBe('^/api/([^\\/]+)/(index|index\\.js)?$');
|
||||||
|
expect(defaultRoutes[0].dest).toBe('/api/[date]/index.js?date=$1');
|
||||||
|
expect(defaultRoutes[1].src).toBe('^/api/(date|date\\.js)$');
|
||||||
|
expect(defaultRoutes[1].dest).toBe('/api/date.js');
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
const files = [
|
||||||
|
'api/index.ts',
|
||||||
|
'api/index.d.ts',
|
||||||
|
'api/users/index.ts',
|
||||||
|
'api/users/index.d.ts',
|
||||||
|
'api/food.ts',
|
||||||
|
'api/ts/gold.ts',
|
||||||
|
];
|
||||||
|
const { builders } = await detectBuilders(files);
|
||||||
|
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||||
|
|
||||||
|
expect(builders.length).toBe(4);
|
||||||
|
expect(builders[0].use).toBe('@now/node');
|
||||||
|
expect(builders[1].use).toBe('@now/node');
|
||||||
|
expect(builders[2].use).toBe('@now/node');
|
||||||
|
expect(builders[3].use).toBe('@now/node');
|
||||||
|
expect(defaultRoutes.length).toBe(5);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|||||||
@@ -15,10 +15,6 @@
|
|||||||
"strict": true,
|
"strict": true,
|
||||||
"target": "esnext"
|
"target": "esnext"
|
||||||
},
|
},
|
||||||
"include": [
|
"include": ["src/**/*"],
|
||||||
"src/**/*"
|
"exclude": ["node_modules"]
|
||||||
],
|
|
||||||
"exclude": [
|
|
||||||
"node_modules"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,15 +5,17 @@ const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/
|
|||||||
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
const { shouldServe } = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
|
||||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||||
|
|
||||||
exports.build = async ({ files, entrypoint }) => {
|
exports.build = async ({
|
||||||
|
workPath, files, entrypoint, meta,
|
||||||
|
}) => {
|
||||||
console.log('downloading files...');
|
console.log('downloading files...');
|
||||||
const srcDir = await getWritableDirectory();
|
|
||||||
const outDir = await getWritableDirectory();
|
const outDir = await getWritableDirectory();
|
||||||
|
|
||||||
await download(files, srcDir);
|
await download(files, workPath, meta);
|
||||||
|
|
||||||
const handlerPath = path.join(__dirname, 'handler');
|
const handlerPath = path.join(__dirname, 'handler');
|
||||||
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
||||||
@@ -23,7 +25,7 @@ exports.build = async ({ files, entrypoint }) => {
|
|||||||
|
|
||||||
// For now only the entrypoint file is copied into the lambda
|
// For now only the entrypoint file is copied into the lambda
|
||||||
await copyFile(
|
await copyFile(
|
||||||
path.join(srcDir, entrypoint),
|
path.join(workPath, entrypoint),
|
||||||
path.join(outDir, entrypoint),
|
path.join(outDir, entrypoint),
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -40,3 +42,5 @@ exports.build = async ({ files, entrypoint }) => {
|
|||||||
[entrypoint]: lambda,
|
[entrypoint]: lambda,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
exports.shouldServe = shouldServe;
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/cgi",
|
"name": "@now/cgi",
|
||||||
"version": "0.1.0",
|
"version": "0.1.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
5
packages/now-go/.gitignore
vendored
5
packages/now-go/.gitignore
vendored
@@ -1,5 +1,6 @@
|
|||||||
node_modules
|
node_modules
|
||||||
|
dist
|
||||||
*.log
|
*.log
|
||||||
/?.js
|
|
||||||
/go
|
/go
|
||||||
/get-exported-function-name
|
/analyze
|
||||||
|
*.js
|
||||||
|
|||||||
2
packages/now-go/build.sh
Executable file
2
packages/now-go/build.sh
Executable file
@@ -0,0 +1,2 @@
|
|||||||
|
ncc build index.ts -o dist
|
||||||
|
ncc build install.ts -o dist/install
|
||||||
@@ -1,129 +0,0 @@
|
|||||||
const tar = require('tar');
|
|
||||||
const execa = require('execa');
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const { mkdirp } = require('fs-extra');
|
|
||||||
const { dirname, join } = require('path');
|
|
||||||
const debug = require('debug')('@now/go:go-helpers');
|
|
||||||
|
|
||||||
const archMap = new Map([['x64', 'amd64'], ['x86', '386']]);
|
|
||||||
const platformMap = new Map([['win32', 'windows']]);
|
|
||||||
|
|
||||||
// Location where the `go` binary will be installed after `postinstall`
|
|
||||||
const GO_DIR = join(__dirname, 'go');
|
|
||||||
const GO_BIN = join(GO_DIR, 'bin/go');
|
|
||||||
|
|
||||||
const getPlatform = p => platformMap.get(p) || p;
|
|
||||||
const getArch = a => archMap.get(a) || a;
|
|
||||||
const getGoUrl = (version, platform, arch) => {
|
|
||||||
const goArch = getArch(arch);
|
|
||||||
const goPlatform = getPlatform(platform);
|
|
||||||
const ext = platform === 'win32' ? 'zip' : 'tar.gz';
|
|
||||||
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
|
||||||
};
|
|
||||||
|
|
||||||
async function getExportedFunctionName(filePath) {
|
|
||||||
debug('Detecting handler name for %o', filePath);
|
|
||||||
const bin = join(__dirname, 'get-exported-function-name');
|
|
||||||
const args = [filePath];
|
|
||||||
const name = await execa.stdout(bin, args);
|
|
||||||
debug('Detected exported name %o', name);
|
|
||||||
return name;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Creates a `$GOPATH` directory tree, as per `go help gopath` instructions.
|
|
||||||
// Without this, `go` won't recognize the `$GOPATH`.
|
|
||||||
function createGoPathTree(goPath, platform, arch) {
|
|
||||||
const tuple = `${getPlatform(platform)}_${getArch(arch)}`;
|
|
||||||
debug('Creating GOPATH directory structure for %o (%s)', goPath, tuple);
|
|
||||||
return Promise.all([
|
|
||||||
mkdirp(join(goPath, 'bin')),
|
|
||||||
mkdirp(join(goPath, 'pkg', tuple)),
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function get({ src } = {}) {
|
|
||||||
const args = ['get'];
|
|
||||||
if (src) {
|
|
||||||
debug('Fetching `go` dependencies for file %o', src);
|
|
||||||
args.push(src);
|
|
||||||
} else {
|
|
||||||
debug('Fetching `go` dependencies for cwd %o', this.cwd);
|
|
||||||
}
|
|
||||||
await this(...args);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function build({ src, dest }) {
|
|
||||||
debug('Building `go` binary %o -> %o', src, dest);
|
|
||||||
let sources;
|
|
||||||
if (Array.isArray(src)) {
|
|
||||||
sources = src;
|
|
||||||
} else {
|
|
||||||
sources = [src];
|
|
||||||
}
|
|
||||||
await this('build', '-o', dest, ...sources);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function createGo(
|
|
||||||
goPath,
|
|
||||||
platform = process.platform,
|
|
||||||
arch = process.arch,
|
|
||||||
opts = {},
|
|
||||||
goMod = false,
|
|
||||||
) {
|
|
||||||
const env = {
|
|
||||||
...process.env,
|
|
||||||
PATH: `${dirname(GO_BIN)}:${process.env.PATH}`,
|
|
||||||
GOPATH: goPath,
|
|
||||||
...opts.env,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (goMod) {
|
|
||||||
env.GO111MODULE = 'on';
|
|
||||||
}
|
|
||||||
|
|
||||||
function go(...args) {
|
|
||||||
debug('Exec %o', `go ${args.join(' ')}`);
|
|
||||||
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
|
||||||
}
|
|
||||||
go.cwd = opts.cwd || process.cwd();
|
|
||||||
go.get = get;
|
|
||||||
go.build = build;
|
|
||||||
go.goPath = goPath;
|
|
||||||
await createGoPathTree(goPath, platform, arch);
|
|
||||||
return go;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function downloadGo(
|
|
||||||
dir = GO_DIR,
|
|
||||||
version = '1.12',
|
|
||||||
platform = process.platform,
|
|
||||||
arch = process.arch,
|
|
||||||
) {
|
|
||||||
debug('Installing `go` v%s to %o for %s %s', version, dir, platform, arch);
|
|
||||||
|
|
||||||
const url = getGoUrl(version, platform, arch);
|
|
||||||
debug('Downloading `go` URL: %o', url);
|
|
||||||
const res = await fetch(url);
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
throw new Error(`Failed to download: ${url} (${res.status})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO: use a zip extractor when `ext === "zip"`
|
|
||||||
await mkdirp(dir);
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
res.body
|
|
||||||
.on('error', reject)
|
|
||||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
|
||||||
.on('error', reject)
|
|
||||||
.on('finish', resolve);
|
|
||||||
});
|
|
||||||
|
|
||||||
return createGo(dir, platform, arch);
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
createGo,
|
|
||||||
downloadGo,
|
|
||||||
getExportedFunctionName,
|
|
||||||
};
|
|
||||||
170
packages/now-go/go-helpers.ts
Normal file
170
packages/now-go/go-helpers.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import tar from 'tar';
|
||||||
|
import execa from 'execa';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import { mkdirp, pathExists } from 'fs-extra';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { homedir } from 'os';
|
||||||
|
import Debug from 'debug';
|
||||||
|
|
||||||
|
const debug = Debug('@now/go:go-helpers');
|
||||||
|
const archMap = new Map([['x64', 'amd64'], ['x86', '386']]);
|
||||||
|
const platformMap = new Map([['win32', 'windows']]);
|
||||||
|
|
||||||
|
// Location where the `go` binary will be installed after `postinstall`
|
||||||
|
const GO_DIR = join(__dirname, 'go');
|
||||||
|
const GO_BIN = join(GO_DIR, 'bin/go');
|
||||||
|
|
||||||
|
const getPlatform = (p: string) => platformMap.get(p) || p;
|
||||||
|
const getArch = (a: string) => archMap.get(a) || a;
|
||||||
|
const getGoUrl = (version: string, platform: string, arch: string) => {
|
||||||
|
const goArch = getArch(arch);
|
||||||
|
const goPlatform = getPlatform(platform);
|
||||||
|
const ext = platform === 'win32' ? 'zip' : 'tar.gz';
|
||||||
|
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function getAnalyzedEntrypoint(filePath: string, modulePath = '') {
|
||||||
|
debug('Analyzing entrypoint %o', filePath);
|
||||||
|
const bin = join(__dirname, 'analyze');
|
||||||
|
|
||||||
|
const isAnalyzeExist = await pathExists(bin);
|
||||||
|
if (!isAnalyzeExist) {
|
||||||
|
const src = join(__dirname, 'util', 'analyze.go');
|
||||||
|
const dest = join(__dirname, 'analyze');
|
||||||
|
const go = await downloadGo();
|
||||||
|
await go.build(src, dest);
|
||||||
|
}
|
||||||
|
|
||||||
|
const args = [`-modpath=${modulePath}`, filePath];
|
||||||
|
|
||||||
|
const analyzed = await execa.stdout(bin, args);
|
||||||
|
debug('Analyzed entrypoint %o', analyzed);
|
||||||
|
return analyzed;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a `$GOPATH` directory tree, as per `go help gopath` instructions.
|
||||||
|
// Without this, `go` won't recognize the `$GOPATH`.
|
||||||
|
function createGoPathTree(goPath: string, platform: string, arch: string) {
|
||||||
|
const tuple = `${getPlatform(platform)}_${getArch(arch)}`;
|
||||||
|
debug('Creating GOPATH directory structure for %o (%s)', goPath, tuple);
|
||||||
|
return Promise.all([
|
||||||
|
mkdirp(join(goPath, 'bin')),
|
||||||
|
mkdirp(join(goPath, 'pkg', tuple)),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
class GoWrapper {
|
||||||
|
private env: { [key: string]: string };
|
||||||
|
private opts: execa.Options;
|
||||||
|
|
||||||
|
constructor(env: { [key: string]: string }, opts: execa.Options = {}) {
|
||||||
|
if (!opts.cwd) {
|
||||||
|
opts.cwd = process.cwd();
|
||||||
|
}
|
||||||
|
this.env = env;
|
||||||
|
this.opts = opts;
|
||||||
|
}
|
||||||
|
|
||||||
|
private execute(...args: string[]) {
|
||||||
|
const { opts, env } = this;
|
||||||
|
debug('Exec %o', `go ${args.join(' ')}`);
|
||||||
|
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||||
|
}
|
||||||
|
|
||||||
|
mod() {
|
||||||
|
return this.execute('mod', 'tidy');
|
||||||
|
}
|
||||||
|
|
||||||
|
get(src?: string) {
|
||||||
|
const args = ['get'];
|
||||||
|
if (src) {
|
||||||
|
debug('Fetching `go` dependencies for file %o', src);
|
||||||
|
args.push(src);
|
||||||
|
} else {
|
||||||
|
debug('Fetching `go` dependencies for cwd %o', this.opts.cwd);
|
||||||
|
}
|
||||||
|
return this.execute(...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
build(src: string | string[], dest: string, ldsflags = '-s -w') {
|
||||||
|
debug('Building optimized `go` binary %o -> %o', src, dest);
|
||||||
|
const sources = Array.isArray(src) ? src : [src];
|
||||||
|
return this.execute('build', '-ldflags', ldsflags, '-o', dest, ...sources);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createGo(
|
||||||
|
goPath: string,
|
||||||
|
platform = process.platform,
|
||||||
|
arch = process.arch,
|
||||||
|
opts: execa.Options = {},
|
||||||
|
goMod = false
|
||||||
|
) {
|
||||||
|
const path = `${dirname(GO_BIN)}:${process.env.PATH}`;
|
||||||
|
const env: { [key: string]: string } = {
|
||||||
|
...process.env,
|
||||||
|
PATH: path,
|
||||||
|
GOPATH: goPath,
|
||||||
|
...opts.env,
|
||||||
|
};
|
||||||
|
if (goMod) {
|
||||||
|
env.GO111MODULE = 'on';
|
||||||
|
}
|
||||||
|
await createGoPathTree(goPath, platform, arch);
|
||||||
|
return new GoWrapper(env, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downloadGo(
|
||||||
|
dir = GO_DIR,
|
||||||
|
version = '1.12',
|
||||||
|
platform = process.platform,
|
||||||
|
arch = process.arch
|
||||||
|
) {
|
||||||
|
// Check default `Go` in user machine
|
||||||
|
const isUserGo = await pathExists(join(homedir(), 'go'));
|
||||||
|
|
||||||
|
// If we found GOPATH in ENV, or default `Go` path exists
|
||||||
|
// asssume that user have `Go` installed
|
||||||
|
if (isUserGo || process.env.GOPATH !== undefined) {
|
||||||
|
const { stdout } = await execa('go', ['version']);
|
||||||
|
|
||||||
|
if (parseInt(stdout.split('.')[1]) >= 11) {
|
||||||
|
return createGo(dir, platform, arch);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(
|
||||||
|
`Your current ${stdout} doesn't support Go Modules. Please update.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Check `Go` bin in builder CWD
|
||||||
|
const isGoExist = await pathExists(join(dir, 'bin'));
|
||||||
|
if (!isGoExist) {
|
||||||
|
debug(
|
||||||
|
'Installing `go` v%s to %o for %s %s',
|
||||||
|
version,
|
||||||
|
dir,
|
||||||
|
platform,
|
||||||
|
arch
|
||||||
|
);
|
||||||
|
const url = getGoUrl(version, platform, arch);
|
||||||
|
debug('Downloading `go` URL: %o', url);
|
||||||
|
console.log('Downloading Go ...');
|
||||||
|
const res = await fetch(url);
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`Failed to download: ${url} (${res.status})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: use a zip extractor when `ext === "zip"`
|
||||||
|
await mkdirp(dir);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
res.body
|
||||||
|
.on('error', reject)
|
||||||
|
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||||
|
.on('error', reject)
|
||||||
|
.on('finish', resolve);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return createGo(dir, platform, arch);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,210 +0,0 @@
|
|||||||
const { join, sep, dirname } = require('path');
|
|
||||||
const {
|
|
||||||
readFile, writeFile, pathExists, move,
|
|
||||||
} = require('fs-extra');
|
|
||||||
|
|
||||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js'); // eslint-disable-line import/no-extraneous-dependencies
|
|
||||||
const { createGo, getExportedFunctionName } = require('./go-helpers');
|
|
||||||
|
|
||||||
const config = {
|
|
||||||
maxLambdaSize: '10mb',
|
|
||||||
};
|
|
||||||
|
|
||||||
async function build({ files, entrypoint }) {
|
|
||||||
console.log('Downloading user files...');
|
|
||||||
|
|
||||||
const [goPath, outDir] = await Promise.all([
|
|
||||||
getWritableDirectory(),
|
|
||||||
getWritableDirectory(),
|
|
||||||
]);
|
|
||||||
|
|
||||||
const srcPath = join(goPath, 'src', 'lambda');
|
|
||||||
const downloadedFiles = await download(files, srcPath);
|
|
||||||
|
|
||||||
console.log(`Parsing AST for "${entrypoint}"`);
|
|
||||||
let parseFunctionName;
|
|
||||||
try {
|
|
||||||
parseFunctionName = await getExportedFunctionName(
|
|
||||||
downloadedFiles[entrypoint].fsPath,
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
console.log(`Failed to parse AST for "${entrypoint}"`);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!parseFunctionName) {
|
|
||||||
const err = new Error(
|
|
||||||
`Could not find an exported function in "${entrypoint}"`,
|
|
||||||
);
|
|
||||||
console.log(err.message);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
const handlerFunctionName = parseFunctionName.split(',')[0];
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// we need `main.go` in the same dir as the entrypoint,
|
|
||||||
// otherwise `go build` will refuse to build
|
|
||||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
|
||||||
|
|
||||||
// check if package name other than main
|
|
||||||
const packageName = parseFunctionName.split(',')[1];
|
|
||||||
const isGoModExist = await pathExists(join(entrypointDirname, 'go.mod'));
|
|
||||||
if (packageName !== 'main') {
|
|
||||||
const go = await createGo(
|
|
||||||
goPath,
|
|
||||||
process.platform,
|
|
||||||
process.arch,
|
|
||||||
{
|
|
||||||
cwd: entrypointDirname,
|
|
||||||
},
|
|
||||||
true,
|
|
||||||
);
|
|
||||||
if (!isGoModExist) {
|
|
||||||
try {
|
|
||||||
const defaultGoModContent = `module ${packageName}`;
|
|
||||||
|
|
||||||
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
|
||||||
} catch (err) {
|
|
||||||
console.log(`failed to create default go.mod for ${packageName}`);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const mainModGoFileName = 'main__mod__.go';
|
|
||||||
const modMainGoContents = await readFile(
|
|
||||||
join(__dirname, mainModGoFileName),
|
|
||||||
'utf8',
|
|
||||||
);
|
|
||||||
|
|
||||||
let goPackageName = `${packageName}/${packageName}`;
|
|
||||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
|
||||||
|
|
||||||
if (isGoModExist) {
|
|
||||||
const goModContents = await readFile(
|
|
||||||
join(entrypointDirname, 'go.mod'),
|
|
||||||
'utf8',
|
|
||||||
);
|
|
||||||
goPackageName = `${
|
|
||||||
goModContents.split('\n')[0].split(' ')[1]
|
|
||||||
}/${packageName}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const mainModGoContents = modMainGoContents
|
|
||||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
|
||||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
|
||||||
|
|
||||||
// write main__mod__.go
|
|
||||||
await writeFile(
|
|
||||||
join(entrypointDirname, mainModGoFileName),
|
|
||||||
mainModGoContents,
|
|
||||||
);
|
|
||||||
|
|
||||||
// move user go file to folder
|
|
||||||
try {
|
|
||||||
// default path
|
|
||||||
let finalDestination = join(entrypointDirname, packageName, entrypoint);
|
|
||||||
const entrypointArr = entrypoint.split(sep);
|
|
||||||
|
|
||||||
// if `entrypoint` include folder, only use filename
|
|
||||||
if (entrypointArr.length > 1) {
|
|
||||||
finalDestination = join(
|
|
||||||
entrypointDirname,
|
|
||||||
packageName,
|
|
||||||
entrypointArr.pop(),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await move(downloadedFiles[entrypoint].fsPath, finalDestination);
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to move entry to package folder');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('tidy go.mod file');
|
|
||||||
try {
|
|
||||||
// ensure go.mod up-to-date
|
|
||||||
await go('mod', 'tidy');
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to `go mod tidy`');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Running `go build`...');
|
|
||||||
const destPath = join(outDir, 'handler');
|
|
||||||
try {
|
|
||||||
const src = [join(entrypointDirname, mainModGoFileName)];
|
|
||||||
await go.build({ src, dest: destPath });
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to `go build`');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const go = await createGo(
|
|
||||||
goPath,
|
|
||||||
process.platform,
|
|
||||||
process.arch,
|
|
||||||
{
|
|
||||||
cwd: entrypointDirname,
|
|
||||||
},
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
const origianlMainGoContents = await readFile(
|
|
||||||
join(__dirname, 'main.go'),
|
|
||||||
'utf8',
|
|
||||||
);
|
|
||||||
const mainGoContents = origianlMainGoContents.replace(
|
|
||||||
'__NOW_HANDLER_FUNC_NAME',
|
|
||||||
handlerFunctionName,
|
|
||||||
);
|
|
||||||
|
|
||||||
// in order to allow the user to have `main.go`,
|
|
||||||
// we need our `main.go` to be called something else
|
|
||||||
const mainGoFileName = 'main__now__go__.go';
|
|
||||||
|
|
||||||
// Go doesn't like to build files in different directories,
|
|
||||||
// so now we place `main.go` together with the user code
|
|
||||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
|
||||||
|
|
||||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
|
||||||
// and download any packages that aren't part of the stdlib
|
|
||||||
try {
|
|
||||||
await go.get();
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to `go get`');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('Running `go build`...');
|
|
||||||
const destPath = join(outDir, 'handler');
|
|
||||||
try {
|
|
||||||
const src = [
|
|
||||||
join(entrypointDirname, mainGoFileName),
|
|
||||||
downloadedFiles[entrypoint].fsPath,
|
|
||||||
];
|
|
||||||
await go.build({ src, dest: destPath });
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to `go build`');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const lambda = await createLambda({
|
|
||||||
files: await glob('**', outDir),
|
|
||||||
handler: 'handler',
|
|
||||||
runtime: 'go1.x',
|
|
||||||
environment: {},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
[entrypoint]: lambda,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = { config, build };
|
|
||||||
427
packages/now-go/index.ts
Normal file
427
packages/now-go/index.ts
Normal file
@@ -0,0 +1,427 @@
|
|||||||
|
import { join, sep, dirname, basename } from 'path';
|
||||||
|
import { readFile, writeFile, pathExists, move } from 'fs-extra';
|
||||||
|
import { homedir } from 'os';
|
||||||
|
import execa from 'execa';
|
||||||
|
|
||||||
|
import {
|
||||||
|
glob,
|
||||||
|
download,
|
||||||
|
createLambda,
|
||||||
|
getWriteableDirectory,
|
||||||
|
BuildOptions,
|
||||||
|
shouldServe,
|
||||||
|
Files,
|
||||||
|
} from '@now/build-utils';
|
||||||
|
|
||||||
|
import { createGo, getAnalyzedEntrypoint } from './go-helpers';
|
||||||
|
|
||||||
|
interface Analyzed {
|
||||||
|
found?: boolean;
|
||||||
|
packageName: string;
|
||||||
|
functionName: string;
|
||||||
|
watch: string[];
|
||||||
|
}
|
||||||
|
interface BuildParamsMeta {
|
||||||
|
isDev: boolean | undefined;
|
||||||
|
}
|
||||||
|
interface BuildParamsType extends BuildOptions {
|
||||||
|
files: Files;
|
||||||
|
entrypoint: string;
|
||||||
|
workPath: string;
|
||||||
|
meta: BuildParamsMeta;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize private git repo for Go Modules
|
||||||
|
async function initPrivateGit(credentials: string) {
|
||||||
|
await execa('git', [
|
||||||
|
'config',
|
||||||
|
'--global',
|
||||||
|
'credential.helper',
|
||||||
|
`store --file ${join(homedir(), '.git-credentials')}`,
|
||||||
|
]);
|
||||||
|
|
||||||
|
await writeFile(join(homedir(), '.git-credentials'), credentials);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const version = 2;
|
||||||
|
|
||||||
|
export async function build({
|
||||||
|
files,
|
||||||
|
entrypoint,
|
||||||
|
config,
|
||||||
|
workPath,
|
||||||
|
meta = {} as BuildParamsMeta,
|
||||||
|
}: BuildParamsType) {
|
||||||
|
if (process.env.GIT_CREDENTIALS && !meta.isDev) {
|
||||||
|
console.log('Initialize Git credentials...');
|
||||||
|
await initPrivateGit(process.env.GIT_CREDENTIALS);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.env.GO111MODULE) {
|
||||||
|
console.log(`\nManually assigning 'GO111MODULE' is not recommended.
|
||||||
|
|
||||||
|
By default:
|
||||||
|
- 'GO111MODULE=on' If entrypoint package name is not 'main'
|
||||||
|
- 'GO111MODULE=off' If entrypoint package name is 'main'
|
||||||
|
|
||||||
|
We highly recommend you leverage Go Modules in your project.
|
||||||
|
Learn more: https://github.com/golang/go/wiki/Modules
|
||||||
|
`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Downloading user files...');
|
||||||
|
const entrypointArr = entrypoint.split(sep);
|
||||||
|
|
||||||
|
let [goPath, outDir] = await Promise.all([
|
||||||
|
getWriteableDirectory(),
|
||||||
|
getWriteableDirectory(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const srcPath = join(goPath, 'src', 'lambda');
|
||||||
|
let downloadedFiles;
|
||||||
|
if (meta.isDev) {
|
||||||
|
downloadedFiles = await download(files, workPath, meta);
|
||||||
|
} else {
|
||||||
|
downloadedFiles = await download(files, srcPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Parsing AST for "${entrypoint}"`);
|
||||||
|
let analyzed: string;
|
||||||
|
try {
|
||||||
|
let goModAbsPathDir = '';
|
||||||
|
for (const file of Object.keys(downloadedFiles)) {
|
||||||
|
if (file === 'go.mod') {
|
||||||
|
goModAbsPathDir = dirname(downloadedFiles[file].fsPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
analyzed = await getAnalyzedEntrypoint(
|
||||||
|
downloadedFiles[entrypoint].fsPath,
|
||||||
|
goModAbsPathDir
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!analyzed) {
|
||||||
|
const err = new Error(
|
||||||
|
`Could not find an exported function in "${entrypoint}"
|
||||||
|
Learn more: https://zeit.co/docs/v2/deployments/official-builders/go-now-go/#entrypoint
|
||||||
|
`
|
||||||
|
);
|
||||||
|
console.log(err.message);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedAnalyzed = JSON.parse(analyzed) as Analyzed;
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
let base = null;
|
||||||
|
|
||||||
|
if (config && config.zeroConfig) {
|
||||||
|
base = workPath;
|
||||||
|
} else {
|
||||||
|
base = dirname(downloadedFiles['now.json'].fsPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
const destNow = join(
|
||||||
|
base,
|
||||||
|
'.now',
|
||||||
|
'cache',
|
||||||
|
basename(entrypoint, '.go'),
|
||||||
|
'src',
|
||||||
|
'lambda'
|
||||||
|
);
|
||||||
|
// this will ensure Go rebuilt fast
|
||||||
|
goPath = join(base, '.now', 'cache', basename(entrypoint, '.go'));
|
||||||
|
await download(downloadedFiles, destNow);
|
||||||
|
|
||||||
|
downloadedFiles = await glob('**', destNow);
|
||||||
|
}
|
||||||
|
|
||||||
|
// find `go.mod` in downloadedFiles
|
||||||
|
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||||
|
let isGoModExist = false;
|
||||||
|
let goModPath = '';
|
||||||
|
let isGoModInRootDir = false;
|
||||||
|
for (const file of Object.keys(downloadedFiles)) {
|
||||||
|
const fileDirname = dirname(downloadedFiles[file].fsPath);
|
||||||
|
if (file === 'go.mod') {
|
||||||
|
isGoModExist = true;
|
||||||
|
isGoModInRootDir = true;
|
||||||
|
goModPath = fileDirname;
|
||||||
|
} else if (file.endsWith('go.mod') && !file.endsWith('vendor')) {
|
||||||
|
if (entrypointDirname === fileDirname) {
|
||||||
|
isGoModExist = true;
|
||||||
|
goModPath = fileDirname;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const input = entrypointDirname;
|
||||||
|
var includedFiles: Files = {};
|
||||||
|
|
||||||
|
if (config && config.includeFiles) {
|
||||||
|
for (const pattern of config.includeFiles) {
|
||||||
|
const files = await glob(pattern, input);
|
||||||
|
for (const assetName of Object.keys(files)) {
|
||||||
|
includedFiles[assetName] = files[assetName];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handlerFunctionName = parsedAnalyzed.functionName;
|
||||||
|
console.log(
|
||||||
|
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!isGoModExist && 'vendor' in downloadedFiles) {
|
||||||
|
throw new Error('`go.mod` is required to use a `vendor` directory.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if package name other than main
|
||||||
|
// using `go.mod` way building the handler
|
||||||
|
const packageName = parsedAnalyzed.packageName;
|
||||||
|
|
||||||
|
if (isGoModExist && packageName === 'main') {
|
||||||
|
throw new Error('Please change `package main` to `package handler`');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (packageName !== 'main') {
|
||||||
|
const go = await createGo(
|
||||||
|
goPath,
|
||||||
|
process.platform,
|
||||||
|
process.arch,
|
||||||
|
{
|
||||||
|
cwd: entrypointDirname,
|
||||||
|
},
|
||||||
|
true
|
||||||
|
);
|
||||||
|
if (!isGoModExist) {
|
||||||
|
try {
|
||||||
|
const defaultGoModContent = `module ${packageName}`;
|
||||||
|
|
||||||
|
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(`failed to create default go.mod for ${packageName}`);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mainModGoFileName = 'main__mod__.go';
|
||||||
|
const modMainGoContents = await readFile(
|
||||||
|
join(__dirname, mainModGoFileName),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
|
||||||
|
let goPackageName = `${packageName}/${packageName}`;
|
||||||
|
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||||
|
|
||||||
|
if (isGoModExist) {
|
||||||
|
const goModContents = await readFile(join(goModPath, 'go.mod'), 'utf8');
|
||||||
|
const usrModName = goModContents.split('\n')[0].split(' ')[1];
|
||||||
|
|
||||||
|
if (entrypointArr.length > 1 && isGoModInRootDir) {
|
||||||
|
let cleanPackagePath = [...entrypointArr];
|
||||||
|
cleanPackagePath.pop();
|
||||||
|
goPackageName = `${usrModName}/${cleanPackagePath.join('/')}`;
|
||||||
|
} else {
|
||||||
|
goPackageName = `${usrModName}/${packageName}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mainModGoContents = modMainGoContents
|
||||||
|
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||||
|
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||||
|
|
||||||
|
if (meta.isDev && isGoModExist && isGoModInRootDir) {
|
||||||
|
await writeFile(
|
||||||
|
join(dirname(downloadedFiles['go.mod'].fsPath), mainModGoFileName),
|
||||||
|
mainModGoContents
|
||||||
|
);
|
||||||
|
} else if (isGoModExist && isGoModInRootDir) {
|
||||||
|
await writeFile(join(srcPath, mainModGoFileName), mainModGoContents);
|
||||||
|
} else if (isGoModExist && !isGoModInRootDir) {
|
||||||
|
// using `go.mod` path to write main__mod__.go
|
||||||
|
await writeFile(join(goModPath, mainModGoFileName), mainModGoContents);
|
||||||
|
} else {
|
||||||
|
// using `entrypointDirname` to write main__mod__.go
|
||||||
|
await writeFile(
|
||||||
|
join(entrypointDirname, mainModGoFileName),
|
||||||
|
mainModGoContents
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// move user go file to folder
|
||||||
|
try {
|
||||||
|
// default path
|
||||||
|
let finalDestination = join(entrypointDirname, packageName, entrypoint);
|
||||||
|
let forceMove = false;
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
forceMove = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if `entrypoint` include folder, only use filename
|
||||||
|
if (entrypointArr.length > 1) {
|
||||||
|
finalDestination = join(
|
||||||
|
entrypointDirname,
|
||||||
|
packageName,
|
||||||
|
entrypointArr[entrypointArr.length - 1]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
dirname(downloadedFiles[entrypoint].fsPath) === goModPath ||
|
||||||
|
!isGoModExist
|
||||||
|
) {
|
||||||
|
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
||||||
|
overwrite: forceMove,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to move entry to package folder');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
let baseGoModPath = '';
|
||||||
|
if (meta.isDev && isGoModExist && isGoModInRootDir) {
|
||||||
|
baseGoModPath = dirname(downloadedFiles['go.mod'].fsPath);
|
||||||
|
} else if (isGoModExist && isGoModInRootDir) {
|
||||||
|
baseGoModPath = srcPath;
|
||||||
|
} else if (isGoModExist && !isGoModInRootDir) {
|
||||||
|
baseGoModPath = goModPath;
|
||||||
|
} else {
|
||||||
|
baseGoModPath = entrypointDirname;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
const isGoModBk = await pathExists(join(baseGoModPath, 'go.mod.bk'));
|
||||||
|
if (isGoModBk) {
|
||||||
|
await move(
|
||||||
|
join(baseGoModPath, 'go.mod.bk'),
|
||||||
|
join(baseGoModPath, 'go.mod'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
await move(
|
||||||
|
join(baseGoModPath, 'go.sum.bk'),
|
||||||
|
join(baseGoModPath, 'go.sum'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Tidy `go.mod` file...');
|
||||||
|
try {
|
||||||
|
// ensure go.mod up-to-date
|
||||||
|
await go.mod();
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go mod tidy`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Running `go build`...');
|
||||||
|
const destPath = join(outDir, 'handler');
|
||||||
|
|
||||||
|
try {
|
||||||
|
let src = [join(baseGoModPath, mainModGoFileName)];
|
||||||
|
|
||||||
|
await go.build(src, destPath, config.ldsflags);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go build`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
if (meta.isDev) {
|
||||||
|
// caching for `now dev`
|
||||||
|
await move(
|
||||||
|
join(baseGoModPath, 'go.mod'),
|
||||||
|
join(baseGoModPath, 'go.mod.bk'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
await move(
|
||||||
|
join(baseGoModPath, 'go.sum'),
|
||||||
|
join(baseGoModPath, 'go.sum.bk'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// legacy mode
|
||||||
|
// we need `main.go` in the same dir as the entrypoint,
|
||||||
|
// otherwise `go build` will refuse to build
|
||||||
|
const go = await createGo(
|
||||||
|
goPath,
|
||||||
|
process.platform,
|
||||||
|
process.arch,
|
||||||
|
{
|
||||||
|
cwd: entrypointDirname,
|
||||||
|
},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
const origianlMainGoContents = await readFile(
|
||||||
|
join(__dirname, 'main.go'),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
const mainGoContents = origianlMainGoContents.replace(
|
||||||
|
'__NOW_HANDLER_FUNC_NAME',
|
||||||
|
handlerFunctionName
|
||||||
|
);
|
||||||
|
|
||||||
|
// in order to allow the user to have `main.go`,
|
||||||
|
// we need our `main.go` to be called something else
|
||||||
|
const mainGoFileName = 'main__now__go__.go';
|
||||||
|
|
||||||
|
// Go doesn't like to build files in different directories,
|
||||||
|
// so now we place `main.go` together with the user code
|
||||||
|
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||||
|
|
||||||
|
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||||
|
// and download any packages that aren't part of the stdlib
|
||||||
|
console.log('Running `go get`...');
|
||||||
|
try {
|
||||||
|
await go.get();
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go get`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Running `go build`...');
|
||||||
|
const destPath = join(outDir, 'handler');
|
||||||
|
try {
|
||||||
|
const src = [
|
||||||
|
join(entrypointDirname, mainGoFileName),
|
||||||
|
downloadedFiles[entrypoint].fsPath,
|
||||||
|
];
|
||||||
|
await go.build(src, destPath);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go build`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const lambda = await createLambda({
|
||||||
|
files: { ...(await glob('**', outDir)), ...includedFiles },
|
||||||
|
handler: 'handler',
|
||||||
|
runtime: 'go1.x',
|
||||||
|
environment: {},
|
||||||
|
});
|
||||||
|
const output = {
|
||||||
|
[entrypoint]: lambda,
|
||||||
|
};
|
||||||
|
|
||||||
|
let watch = parsedAnalyzed.watch;
|
||||||
|
let watchSub: string[] = [];
|
||||||
|
// if `entrypoint` located in subdirectory
|
||||||
|
// we will need to concat it with return watch array
|
||||||
|
if (entrypointArr.length > 1) {
|
||||||
|
entrypointArr.pop();
|
||||||
|
watchSub = parsedAnalyzed.watch.map(file => join(...entrypointArr, file));
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
output,
|
||||||
|
watch: watch.concat(watchSub),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { shouldServe };
|
||||||
6
packages/now-go/install.ts
Normal file
6
packages/now-go/install.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { downloadGo } from './go-helpers';
|
||||||
|
|
||||||
|
downloadGo().catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -1,27 +1,34 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/go",
|
"name": "@now/go",
|
||||||
"version": "0.4.0",
|
"version": "0.5.8",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
|
"main": "./dist/index",
|
||||||
|
"homepage": "https://zeit.co/docs/v2/deployments/official-builders/go-now-go",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "https://github.com/zeit/now-builders.git",
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
"directory": "packages/now-go"
|
"directory": "packages/now-go"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"postinstall": "node ./util/install"
|
"build": "./build.sh",
|
||||||
|
"test": "./build.sh && jest",
|
||||||
|
"prepublish": "./build.sh",
|
||||||
|
"now-postinstall": "node dist/install/index.js"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"*.js",
|
"dist"
|
||||||
"main.go",
|
|
||||||
"main__mod__.go",
|
|
||||||
"util"
|
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"devDependencies": {
|
||||||
|
"@types/debug": "^4.1.3",
|
||||||
|
"@types/execa": "^0.9.0",
|
||||||
|
"@types/fs-extra": "^5.0.5",
|
||||||
|
"@types/node-fetch": "^2.3.0",
|
||||||
|
"@types/tar": "^4.0.0",
|
||||||
"debug": "^4.1.1",
|
"debug": "^4.1.1",
|
||||||
"execa": "^1.0.0",
|
"execa": "^1.0.0",
|
||||||
"fs-extra": "^7.0.0",
|
"fs-extra": "^7.0.0",
|
||||||
"mkdirp-promise": "5.0.1",
|
|
||||||
"node-fetch": "^2.2.1",
|
"node-fetch": "^2.2.1",
|
||||||
"tar": "4.4.6"
|
"tar": "4.4.6",
|
||||||
|
"typescript": "3.5.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,4 +8,4 @@
|
|||||||
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||||
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
1
packages/now-go/test/fixtures/03-env-vars/build-env/go.mod
vendored
Normal file
1
packages/now-go/test/fixtures/03-env-vars/build-env/go.mod
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module build-env
|
||||||
17
packages/now-go/test/fixtures/03-env-vars/build-env/index.go
vendored
Normal file
17
packages/now-go/test/fixtures/03-env-vars/build-env/index.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package buildenv
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rdm := os.Getenv("RANDOMNESS_BUILD_ENV")
|
||||||
|
if rdm == "" {
|
||||||
|
fmt.Println("No build env received")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, rdm+":build-env")
|
||||||
|
}
|
||||||
1
packages/now-go/test/fixtures/03-env-vars/env/go.mod
vendored
Normal file
1
packages/now-go/test/fixtures/03-env-vars/env/go.mod
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module env
|
||||||
17
packages/now-go/test/fixtures/03-env-vars/env/index.go
vendored
Normal file
17
packages/now-go/test/fixtures/03-env-vars/env/index.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package env
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rdm := os.Getenv("RANDOMNESS_ENV")
|
||||||
|
if rdm == "" {
|
||||||
|
fmt.Println("No env received")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, rdm)
|
||||||
|
}
|
||||||
18
packages/now-go/test/fixtures/03-env-vars/now.json
vendored
Normal file
18
packages/now-go/test/fixtures/03-env-vars/now.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{
|
||||||
|
"src": "env/index.go",
|
||||||
|
"use": "@now/go"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"RANDOMNESS_ENV": "RANDOMNESS_PLACEHOLDER"
|
||||||
|
},
|
||||||
|
"probes": [
|
||||||
|
{
|
||||||
|
"path": "/env",
|
||||||
|
"mustContain": "RANDOMNESS_PLACEHOLDER"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
11
packages/now-go/test/fixtures/06-content-type/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/06-content-type/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
4
packages/now-go/test/fixtures/06-content-type/now.json
vendored
Normal file
4
packages/now-go/test/fixtures/06-content-type/now.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "index.go", "use": "@now/go" }]
|
||||||
|
}
|
||||||
7
packages/now-go/test/fixtures/07-content-length/now.json
vendored
Normal file
7
packages/now-go/test/fixtures/07-content-length/now.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "*.go", "use": "@now/go" }],
|
||||||
|
"env": {
|
||||||
|
"RANDOMNESS_ENV_VAR": "RANDOMNESS_PLACEHOLDER"
|
||||||
|
}
|
||||||
|
}
|
||||||
16
packages/now-go/test/fixtures/07-content-length/test1.go
vendored
Normal file
16
packages/now-go/test/fixtures/07-content-length/test1.go
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HandlerTest1 function
|
||||||
|
func HandlerTest1(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rdm := os.Getenv("RANDOMNESS_ENV_VAR")
|
||||||
|
|
||||||
|
w.WriteHeader(401)
|
||||||
|
w.Header().Set("content-length", strconv.Itoa(len(rdm+":content-length")))
|
||||||
|
w.Write([]byte(rdm + ":content-length"))
|
||||||
|
}
|
||||||
12
packages/now-go/test/fixtures/07-content-length/test2.go
vendored
Normal file
12
packages/now-go/test/fixtures/07-content-length/test2.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HandlerTest2 function
|
||||||
|
func HandlerTest2(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Length", "2")
|
||||||
|
w.WriteHeader(401)
|
||||||
|
w.Write([]byte(""))
|
||||||
|
}
|
||||||
13
packages/now-go/test/fixtures/07-content-length/test3.go
vendored
Normal file
13
packages/now-go/test/fixtures/07-content-length/test3.go
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HandlerTest3 function
|
||||||
|
func HandlerTest3(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rev := os.Getenv("RANDOMNESS_ENV_VAR")
|
||||||
|
w.WriteHeader(401)
|
||||||
|
w.Write([]byte(rev + ":content-length"))
|
||||||
|
}
|
||||||
16
packages/now-go/test/fixtures/08-include-files/index.go
vendored
Normal file
16
packages/now-go/test/fixtures/08-include-files/index.go
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
package cowsay
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
bts, err := ioutil.ReadFile("templates/foo.txt")
|
||||||
|
if err != nil {
|
||||||
|
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||||
|
}
|
||||||
|
fmt.Fprintf(w, string(bts))
|
||||||
|
}
|
||||||
18
packages/now-go/test/fixtures/08-include-files/now.json
vendored
Normal file
18
packages/now-go/test/fixtures/08-include-files/now.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{
|
||||||
|
"src": "index.go",
|
||||||
|
"use": "@now/go",
|
||||||
|
"config": {
|
||||||
|
"includeFiles": ["templates/**"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"probes": [
|
||||||
|
{
|
||||||
|
"path": "/",
|
||||||
|
"mustContain": "foobar from file"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
1
packages/now-go/test/fixtures/08-include-files/templates/foo.txt
vendored
Normal file
1
packages/now-go/test/fixtures/08-include-files/templates/foo.txt
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
foobar from file
|
||||||
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Person struct
|
||||||
|
type Person struct {
|
||||||
|
name string
|
||||||
|
age int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPerson struct method
|
||||||
|
func NewPerson(name string, age int) *Person {
|
||||||
|
return &Person{name: name, age: age}
|
||||||
|
}
|
||||||
|
|
||||||
|
// H func
|
||||||
|
func H(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module go-mod
|
||||||
|
|
||||||
|
go 1.12
|
||||||
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"with-shared/shared"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, shared.Say("RANDOMNESS_PLACEHOLDER"))
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module with-shared
|
||||||
|
|
||||||
|
go 1.12
|
||||||
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "api/*.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/api", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package shared
|
||||||
|
|
||||||
|
// Say func
|
||||||
|
func Say(text string) string {
|
||||||
|
return text
|
||||||
|
}
|
||||||
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "**/*.go", "use": "@now/go" }],
|
||||||
|
"probes": [
|
||||||
|
{ "path": "/sub-1", "mustContain": "RANDOMNESS_PLACEHOLDER" },
|
||||||
|
{ "path": "/sub-2", "mustContain": "RANDOMNESS_PLACEHOLDER" }
|
||||||
|
]
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module sub-1
|
||||||
|
|
||||||
|
go 1.12
|
||||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package sub1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module sub-2
|
||||||
|
|
||||||
|
go 1.12
|
||||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package sub2
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
12
packages/now-go/test/fixtures/13-go-mod-nested/api/nested/index.go
vendored
Normal file
12
packages/now-go/test/fixtures/13-go-mod-nested/api/nested/index.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package nested
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"with-nested/shared"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, shared.Say("lol:RANDOMNESS_PLACEHOLDER"))
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/13-go-mod-nested/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/13-go-mod-nested/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module with-nested
|
||||||
|
|
||||||
|
go 1.12
|
||||||
5
packages/now-go/test/fixtures/13-go-mod-nested/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/13-go-mod-nested/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "api/nested/*.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/api/nested", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
6
packages/now-go/test/fixtures/13-go-mod-nested/shared/shared.go
vendored
Normal file
6
packages/now-go/test/fixtures/13-go-mod-nested/shared/shared.go
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package shared
|
||||||
|
|
||||||
|
// Say func
|
||||||
|
func Say(text string) string {
|
||||||
|
return text
|
||||||
|
}
|
||||||
11
packages/now-go/test/fixtures/14-go-mod-sub/api/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/14-go-mod-sub/api/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "hello:RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
5
packages/now-go/test/fixtures/14-go-mod-sub/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/14-go-mod-sub/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "api/**/*.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/api", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user