mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
613 Commits
@now/php@0
...
@now/php-b
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e186f89cfd | ||
|
|
50cade8bba | ||
|
|
13866e61f6 | ||
|
|
b72f902271 | ||
|
|
159cfe99dd | ||
|
|
1d9a96d104 | ||
|
|
245f846d3e | ||
|
|
c5ef7f3f35 | ||
|
|
ccba15a5aa | ||
|
|
f49aefa8e4 | ||
|
|
d6b36df4ce | ||
|
|
3e4dd10a79 | ||
|
|
73956706bd | ||
|
|
bd8da5360d | ||
|
|
6d5a2a4438 | ||
|
|
c88dc78e33 | ||
|
|
63ac11e9f7 | ||
|
|
1840632729 | ||
|
|
00d8eb0f65 | ||
|
|
3db58ac373 | ||
|
|
92a1720eea | ||
|
|
9abbfbe3f3 | ||
|
|
11ef8aa816 | ||
|
|
3a122ea950 | ||
|
|
737e50630a | ||
|
|
fb27b7b9be | ||
|
|
d1a4aecd2f | ||
|
|
5ef7014ed8 | ||
|
|
0ff2c9950e | ||
|
|
ddcdcdf3e2 | ||
|
|
bfc99f19d2 | ||
|
|
de2c08cfe8 | ||
|
|
9679f07124 | ||
|
|
6ce24d6a4e | ||
|
|
e3e029f5f6 | ||
|
|
89172a6e89 | ||
|
|
e8f1dbaa46 | ||
|
|
16b5b6fdf3 | ||
|
|
3bab29ff76 | ||
|
|
d675d2e668 | ||
|
|
2dda88e676 | ||
|
|
5a0090eb1f | ||
|
|
d438b4ec4e | ||
|
|
f8810fd7e6 | ||
|
|
a642cfea96 | ||
|
|
2daa20a9f2 | ||
|
|
4d5c0c40f0 | ||
|
|
29051681df | ||
|
|
96d5e81538 | ||
|
|
9ba9dd6949 | ||
|
|
b362d57270 | ||
|
|
4ff95e1718 | ||
|
|
ef02bedd4d | ||
|
|
ed68a09c3e | ||
|
|
ac7ae5fc5d | ||
|
|
9727b1f020 | ||
|
|
2dc454f15f | ||
|
|
4463af5c7a | ||
|
|
c00fb37cf6 | ||
|
|
4deb426f9c | ||
|
|
008b04413a | ||
|
|
f177ba46e9 | ||
|
|
c030fce589 | ||
|
|
50a5150bb5 | ||
|
|
0578ccf47e | ||
|
|
e32cd36ded | ||
|
|
6ac0ab121c | ||
|
|
05db2e6a73 | ||
|
|
0b89d30d6c | ||
|
|
8a021c9417 | ||
|
|
f218771382 | ||
|
|
17309291ed | ||
|
|
86300577ae | ||
|
|
f9594e0d61 | ||
|
|
20fd4b2e12 | ||
|
|
718e4d0e0c | ||
|
|
dc3584cd08 | ||
|
|
b41788b241 | ||
|
|
af9a2f9792 | ||
|
|
f8b8e760de | ||
|
|
93d6ec8024 | ||
|
|
7ed6b84056 | ||
|
|
31da488365 | ||
|
|
8eaf05f782 | ||
|
|
9311e90f27 | ||
|
|
c0de970de2 | ||
|
|
465ac2093d | ||
|
|
19ab0e8698 | ||
|
|
02fa98e5e3 | ||
|
|
4aef9d48b0 | ||
|
|
bd2d05344e | ||
|
|
edc7696623 | ||
|
|
e2f91094bc | ||
|
|
38dba57378 | ||
|
|
be6a6ba1d7 | ||
|
|
31fb5d9ec8 | ||
|
|
6c8f946a48 | ||
|
|
d59e1b9789 | ||
|
|
2852d3fbc3 | ||
|
|
d0292eb751 | ||
|
|
17bbf69346 | ||
|
|
4fb4229c90 | ||
|
|
03b7586b50 | ||
|
|
a1427866ca | ||
|
|
5f787b8146 | ||
|
|
b03405a665 | ||
|
|
4393dad15a | ||
|
|
b4d604b2e9 | ||
|
|
5fb6e5c0ba | ||
|
|
9d7dd3a713 | ||
|
|
4f867b320d | ||
|
|
c153690104 | ||
|
|
8c1b96edf7 | ||
|
|
15c83a69f7 | ||
|
|
0986de85ee | ||
|
|
94c5d83ccc | ||
|
|
ff49b9d32d | ||
|
|
ec5290dab1 | ||
|
|
4f758ec84e | ||
|
|
7951be156a | ||
|
|
1bafc1d7b7 | ||
|
|
1493101325 | ||
|
|
824b044a96 | ||
|
|
0978be4c3d | ||
|
|
dc832aa6c3 | ||
|
|
8df77fe4fa | ||
|
|
ff413b45fa | ||
|
|
e7befb5dc1 | ||
|
|
b898f82771 | ||
|
|
e6b22cb0df | ||
|
|
cbfe4a133d | ||
|
|
823b78c626 | ||
|
|
03e1255043 | ||
|
|
3373cbca4e | ||
|
|
4fba4b5f67 | ||
|
|
9fcf6da3c1 | ||
|
|
d8a5da6a3e | ||
|
|
48f7b72bb2 | ||
|
|
8e2d5de446 | ||
|
|
2b3efb06be | ||
|
|
13e57bf68f | ||
|
|
dc3f112d4f | ||
|
|
da1c423033 | ||
|
|
5e1d58f0e8 | ||
|
|
8940f7fa33 | ||
|
|
0aeecd81d3 | ||
|
|
bd4cb3d2a0 | ||
|
|
b3d14f536d | ||
|
|
445d4d84cb | ||
|
|
30d4ec8cbd | ||
|
|
51355c263c | ||
|
|
ec6d695f24 | ||
|
|
da910dc097 | ||
|
|
00cb55f953 | ||
|
|
54ee557187 | ||
|
|
e9a49f658b | ||
|
|
f3484bc7c9 | ||
|
|
f76d200fd2 | ||
|
|
24ba90bfeb | ||
|
|
9f0fdada18 | ||
|
|
a987c4b298 | ||
|
|
a746adda93 | ||
|
|
6a00f02137 | ||
|
|
65558f8197 | ||
|
|
a8bf77091f | ||
|
|
ee179b9b52 | ||
|
|
b480b07cce | ||
|
|
fc8452abfd | ||
|
|
48b6d0ebfc | ||
|
|
a3d6cea3c6 | ||
|
|
8a61b1b513 | ||
|
|
50e648d28a | ||
|
|
52994bfe26 | ||
|
|
1339f17585 | ||
|
|
9dd12cf1a7 | ||
|
|
6dab09f38e | ||
|
|
c79d7be591 | ||
|
|
9af3425d6d | ||
|
|
0700c16504 | ||
|
|
4e55d9f709 | ||
|
|
945eb24bdc | ||
|
|
c884102401 | ||
|
|
36e79efd7f | ||
|
|
21ee0f3707 | ||
|
|
ea5d3b8e80 | ||
|
|
301e0f216b | ||
|
|
7a6fbd8c3d | ||
|
|
77e7a0f502 | ||
|
|
6bc42bbce9 | ||
|
|
de88969c46 | ||
|
|
e86cd38787 | ||
|
|
dc1badc931 | ||
|
|
ed3c176f5c | ||
|
|
749ee5264c | ||
|
|
9808ea1d8f | ||
|
|
a77e7109c7 | ||
|
|
3b87c7ca83 | ||
|
|
1887df779a | ||
|
|
daccd0d8fc | ||
|
|
fc9bbd2578 | ||
|
|
f23f6ca643 | ||
|
|
c8d90fbcd1 | ||
|
|
f4247da49a | ||
|
|
9d781403ef | ||
|
|
ca188cf8e2 | ||
|
|
207d895c0c | ||
|
|
685821976d | ||
|
|
fef5638cb9 | ||
|
|
073ed247ad | ||
|
|
f071788ce6 | ||
|
|
16f24bc3c8 | ||
|
|
97fe3d489d | ||
|
|
522d3a530c | ||
|
|
bafb49c464 | ||
|
|
7d5bd91e23 | ||
|
|
213614881c | ||
|
|
a225a4f855 | ||
|
|
ed2fd1dd29 | ||
|
|
bd33528fc7 | ||
|
|
16969803f8 | ||
|
|
03cc4c0b01 | ||
|
|
0b9699da75 | ||
|
|
6737011a63 | ||
|
|
6d2b0e014c | ||
|
|
409359bfec | ||
|
|
2151812596 | ||
|
|
22860be6d0 | ||
|
|
78c3cbd7b4 | ||
|
|
a458a55e99 | ||
|
|
911d85be39 | ||
|
|
98b5a4b0e9 | ||
|
|
5f80e451b8 | ||
|
|
0288f2d1a3 | ||
|
|
e39a5eca04 | ||
|
|
d4493f7d39 | ||
|
|
145e5a10c2 | ||
|
|
bd2d289252 | ||
|
|
a673e5f752 | ||
|
|
b2dc31a6b4 | ||
|
|
62a308bed7 | ||
|
|
ac08bfd26f | ||
|
|
d7f1371799 | ||
|
|
c97ad02aca | ||
|
|
c0460b734d | ||
|
|
3b0ed55b57 | ||
|
|
402153f076 | ||
|
|
6ec823e292 | ||
|
|
a9af9ebb5a | ||
|
|
ce88a64693 | ||
|
|
490cd8363e | ||
|
|
71d1651797 | ||
|
|
0da7197c3e | ||
|
|
950a4e98e9 | ||
|
|
8258ede23f | ||
|
|
77f84fe2aa | ||
|
|
5c4b946864 | ||
|
|
dfc51ad97f | ||
|
|
d32afc8332 | ||
|
|
9d1263ccc2 | ||
|
|
7bf2cfb3dc | ||
|
|
9b37460c4f | ||
|
|
b7f8b37ca6 | ||
|
|
13aa1b2d1c | ||
|
|
92437c075e | ||
|
|
331c263587 | ||
|
|
7d4f6f636b | ||
|
|
5e90ef8e34 | ||
|
|
4885d680a7 | ||
|
|
97cbe0b894 | ||
|
|
301eea90ee | ||
|
|
ea4f9dd930 | ||
|
|
38928ab942 | ||
|
|
bfb67d10ec | ||
|
|
616bad8a3d | ||
|
|
e026ddf805 | ||
|
|
bec9ea101f | ||
|
|
54f3f755fb | ||
|
|
5b03109ba7 | ||
|
|
7ff9e810ff | ||
|
|
3036aff45e | ||
|
|
c366aa69a4 | ||
|
|
c8d225522d | ||
|
|
8ee5063669 | ||
|
|
9372e70747 | ||
|
|
4a4bd550a1 | ||
|
|
f53343d547 | ||
|
|
e4ed811b53 | ||
|
|
e9935dee31 | ||
|
|
2e1e6bb131 | ||
|
|
4a01ac4bd0 | ||
|
|
bd1a7c428f | ||
|
|
9a4a3dac47 | ||
|
|
4f2c35a0ee | ||
|
|
672df5d026 | ||
|
|
8cb648abc4 | ||
|
|
74f658c634 | ||
|
|
efbb54a232 | ||
|
|
3e2bd03e01 | ||
|
|
8dc92b70b9 | ||
|
|
4267be4e5a | ||
|
|
43ba6459eb | ||
|
|
8c5638915d | ||
|
|
3fab247c15 | ||
|
|
6ab0e2e9ab | ||
|
|
34369148d7 | ||
|
|
662ad1ed3a | ||
|
|
890cd74ee5 | ||
|
|
7ef616b31e | ||
|
|
bebcfa4bb5 | ||
|
|
25100c53aa | ||
|
|
fe20da87e7 | ||
|
|
18cb147c86 | ||
|
|
9c9e18586f | ||
|
|
0cd7192740 | ||
|
|
a2d9c4fb4b | ||
|
|
02fafd2ebc | ||
|
|
42577c915c | ||
|
|
73db9e11dd | ||
|
|
3125125c16 | ||
|
|
5335291408 | ||
|
|
36620559f9 | ||
|
|
360ea3a609 | ||
|
|
1cd362126c | ||
|
|
ae19fe95f6 | ||
|
|
3e34d402a2 | ||
|
|
cc7b97fbbb | ||
|
|
c1049985af | ||
|
|
214388ccf3 | ||
|
|
b1d6b7bfc0 | ||
|
|
ece3564dfd | ||
|
|
a88af1f077 | ||
|
|
d92f7b26c0 | ||
|
|
52198af750 | ||
|
|
d58bff2453 | ||
|
|
8c0a144ae4 | ||
|
|
106e4d5f36 | ||
|
|
66c28bd695 | ||
|
|
55e75296ff | ||
|
|
36cbb36737 | ||
|
|
978ca328ef | ||
|
|
7b383e0f7c | ||
|
|
faa5ab36aa | ||
|
|
c0a21969dd | ||
|
|
73d0a1723f | ||
|
|
7c515544ae | ||
|
|
b53c9a6299 | ||
|
|
35ff11e6e4 | ||
|
|
64ee4905cd | ||
|
|
e50dd7e50a | ||
|
|
6101ba9d95 | ||
|
|
8dc0c92c58 | ||
|
|
44c9f3765a | ||
|
|
92c05ca338 | ||
|
|
069b557906 | ||
|
|
692a0df909 | ||
|
|
aeafeb5441 | ||
|
|
a09d5fb355 | ||
|
|
d8017aa9aa | ||
|
|
702f56b9b5 | ||
|
|
183b117152 | ||
|
|
75b3fb4981 | ||
|
|
49e63de5fe | ||
|
|
4742cd32f2 | ||
|
|
377b73105d | ||
|
|
a5577efb3d | ||
|
|
2ec46dc5c9 | ||
|
|
42708ed93c | ||
|
|
2fabe95f6e | ||
|
|
ac1a3dab22 | ||
|
|
ad4011512d | ||
|
|
9ff1a25c8f | ||
|
|
8039b3d377 | ||
|
|
dd9017475c | ||
|
|
031499014f | ||
|
|
2a68d2a2ad | ||
|
|
31299fae6e | ||
|
|
4bac0db379 | ||
|
|
95e7d459d3 | ||
|
|
dd120b8d20 | ||
|
|
b6975676e5 | ||
|
|
a7951dae81 | ||
|
|
b0c918f7fb | ||
|
|
df54dc7dc9 | ||
|
|
0dd801ff6c | ||
|
|
398743ef95 | ||
|
|
337c74b81b | ||
|
|
680bb82ec3 | ||
|
|
17ed5411e3 | ||
|
|
d9bbcb6939 | ||
|
|
800e4de76f | ||
|
|
864dd468d9 | ||
|
|
ba833871bb | ||
|
|
e732bac78e | ||
|
|
28ea4015b4 | ||
|
|
a93d97cabd | ||
|
|
67f39f7c9b | ||
|
|
acd793b9e9 | ||
|
|
f74d61279d | ||
|
|
fcb8eacec0 | ||
|
|
c8fca2ba72 | ||
|
|
4feffa13eb | ||
|
|
3e330b25f4 | ||
|
|
9b2cae33af | ||
|
|
4b6371530c | ||
|
|
9e1d577fc0 | ||
|
|
cf2f542c71 | ||
|
|
e608861e4e | ||
|
|
a99b999209 | ||
|
|
fd9c6e7847 | ||
|
|
b2ad3a6147 | ||
|
|
997d3c2a30 | ||
|
|
ca575bf0a6 | ||
|
|
4c2e93ccef | ||
|
|
4d6437d235 | ||
|
|
0d8058d062 | ||
|
|
2b5cdfc0a7 | ||
|
|
69a41f78fb | ||
|
|
a013d59d62 | ||
|
|
173a29cfdb | ||
|
|
3f73451311 | ||
|
|
2fc706be43 | ||
|
|
0fb7eb6093 | ||
|
|
aa43c0bc87 | ||
|
|
3c5925a6e3 | ||
|
|
9fc7b047f5 | ||
|
|
ecae29457f | ||
|
|
77d445af71 | ||
|
|
79251ad180 | ||
|
|
a215dc9103 | ||
|
|
ea7d8f0f6c | ||
|
|
2232efc984 | ||
|
|
b64ce0f3c0 | ||
|
|
74233d50ad | ||
|
|
8aebec9fc3 | ||
|
|
54584b7763 | ||
|
|
d163fcbd71 | ||
|
|
04c2996c76 | ||
|
|
2b69b898ed | ||
|
|
846aa11d6a | ||
|
|
a314a74479 | ||
|
|
40f029a72c | ||
|
|
493d8a778f | ||
|
|
cb5dcb658f | ||
|
|
d77287d07b | ||
|
|
61d66bd957 | ||
|
|
ae73df9e3c | ||
|
|
cb09ae5bbf | ||
|
|
a28eeacdaa | ||
|
|
dd9d46d555 | ||
|
|
4472331ee0 | ||
|
|
ac69836b44 | ||
|
|
15949a4ab4 | ||
|
|
697ada9d73 | ||
|
|
cafbe30fa3 | ||
|
|
583ebcc526 | ||
|
|
52d1bd410c | ||
|
|
11d0753bc1 | ||
|
|
538710fe56 | ||
|
|
2828c89e8d | ||
|
|
e40b45a939 | ||
|
|
38ba8a36fc | ||
|
|
0323c505a3 | ||
|
|
17ee07f4f6 | ||
|
|
0a6ada77ac | ||
|
|
4d817dd67d | ||
|
|
9682a7cc0b | ||
|
|
3456f23b3e | ||
|
|
800ca2cb0e | ||
|
|
ba54b4d706 | ||
|
|
e9482d66a9 | ||
|
|
401b669363 | ||
|
|
a2a0ede1f6 | ||
|
|
3c9fcff743 | ||
|
|
e5aa526583 | ||
|
|
822b0ee3de | ||
|
|
d612e46233 | ||
|
|
77ee10cead | ||
|
|
fb2029c464 | ||
|
|
3b15755054 | ||
|
|
4f65cc3aa8 | ||
|
|
9936e35280 | ||
|
|
a04fd242b8 | ||
|
|
17bc6174a9 | ||
|
|
a7c2d9648a | ||
|
|
faa5ea9e21 | ||
|
|
c52f30c898 | ||
|
|
d675edf1dc | ||
|
|
f85c4f496f | ||
|
|
d52d7904c2 | ||
|
|
79232024bd | ||
|
|
660b787bc3 | ||
|
|
2dbf983ddb | ||
|
|
0866ba9391 | ||
|
|
d259a722a0 | ||
|
|
bf77c51f64 | ||
|
|
062b78130c | ||
|
|
fa70bc50cb | ||
|
|
08e22b35d1 | ||
|
|
9d8f3315a1 | ||
|
|
a737a99a9d | ||
|
|
ee92d92c9f | ||
|
|
34d3ebd289 | ||
|
|
785f187e5d | ||
|
|
44449f474e | ||
|
|
f44dae7f39 | ||
|
|
06f973f641 | ||
|
|
47bb47804e | ||
|
|
5df692979a | ||
|
|
cd02d5390f | ||
|
|
c93fd416c4 | ||
|
|
431db7a62d | ||
|
|
6f86c70313 | ||
|
|
0923fc9200 | ||
|
|
787675f462 | ||
|
|
977615720a | ||
|
|
1a4e64cd27 | ||
|
|
c9fc255002 | ||
|
|
e83d4d4249 | ||
|
|
d2ca763079 | ||
|
|
2a95388f89 | ||
|
|
be9fedfdc4 | ||
|
|
f0dee65f69 | ||
|
|
5514753c07 | ||
|
|
7028556919 | ||
|
|
a1f24853fc | ||
|
|
9804e82f8f | ||
|
|
e96596634b | ||
|
|
495c193a40 | ||
|
|
9d77d89513 | ||
|
|
bcffe0db91 | ||
|
|
228892fe9f | ||
|
|
c7d434c81d | ||
|
|
66bf4aa25a | ||
|
|
f328a53a3f | ||
|
|
168b4cf2bb | ||
|
|
a91e3da7d8 | ||
|
|
756c452203 | ||
|
|
80db1937f8 | ||
|
|
d9df4b0929 | ||
|
|
08c52ebbf2 | ||
|
|
fe8ead391a | ||
|
|
5d45f2df31 | ||
|
|
1385c1ca48 | ||
|
|
4c63009123 | ||
|
|
389c85cef5 | ||
|
|
227c239bbc | ||
|
|
b93736132a | ||
|
|
26849422ff | ||
|
|
31dd1ca491 | ||
|
|
04a1ce89b3 | ||
|
|
3e247fd16d | ||
|
|
9e127421ad | ||
|
|
337d0cb1ed | ||
|
|
ad3cdf46f4 | ||
|
|
9a93713158 | ||
|
|
3392ef5636 | ||
|
|
9236e2b584 | ||
|
|
ffc4686ab9 | ||
|
|
1ff4b6bb4b | ||
|
|
0e7d8c3ff1 | ||
|
|
86885b8c38 | ||
|
|
8563f2b8b5 | ||
|
|
c36938bd95 | ||
|
|
957ab6c4e3 | ||
|
|
069e9b4052 | ||
|
|
0b06704be6 | ||
|
|
cf5a49e699 | ||
|
|
b671b23d8d | ||
|
|
fa44c23225 | ||
|
|
ba9da3cec5 | ||
|
|
0909aaf2b3 | ||
|
|
e276342a6f | ||
|
|
f6e1bdf0a1 | ||
|
|
2c2189b661 | ||
|
|
61522e2bc7 | ||
|
|
a20f3a5920 | ||
|
|
a5e58c2a8d | ||
|
|
a58451548f | ||
|
|
8e846c50b5 | ||
|
|
7613dcf68b | ||
|
|
bea499cad3 | ||
|
|
0f20a0dbbf | ||
|
|
1ea30588a3 | ||
|
|
a3b670675a | ||
|
|
c4374fe5a8 | ||
|
|
c3983ba1c2 | ||
|
|
ae1be4b243 | ||
|
|
b4b1131619 | ||
|
|
5fd923a7e7 | ||
|
|
f4e95eb59c | ||
|
|
a6cdaccd34 | ||
|
|
4f5fe8eba8 | ||
|
|
7239013989 | ||
|
|
75000ee334 | ||
|
|
5319610d59 | ||
|
|
6df0d69afa | ||
|
|
65b191f6b9 | ||
|
|
115f62bbba | ||
|
|
cdf6a98a18 | ||
|
|
bb75dfd993 | ||
|
|
b63062cfc5 | ||
|
|
5ad6dabd96 | ||
|
|
30060bee07 | ||
|
|
dd48a1c6d3 | ||
|
|
24d45736f1 | ||
|
|
d839fdbe81 | ||
|
|
cfdcd2f8d0 | ||
|
|
b3cac2b372 | ||
|
|
9d27d69656 | ||
|
|
04197b250c | ||
|
|
ce6f43eae7 | ||
|
|
2cfafe4054 | ||
|
|
b195949881 |
12
.circleci/build.sh
Executable file
12
.circleci/build.sh
Executable file
@@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
circleci_dir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||||
|
needs_build="$(grep -rn '"build"' packages/*/package.json | cut -d: -f1)"
|
||||||
|
|
||||||
|
for pkg in $needs_build; do
|
||||||
|
dir="$(dirname "$pkg")"
|
||||||
|
cd "$circleci_dir/../$dir"
|
||||||
|
echo "Building \`$dir\`"
|
||||||
|
yarn build
|
||||||
|
done
|
||||||
@@ -4,6 +4,8 @@ jobs:
|
|||||||
docker:
|
docker:
|
||||||
- image: circleci/node:10
|
- image: circleci/node:10
|
||||||
working_directory: ~/repo
|
working_directory: ~/repo
|
||||||
|
environment:
|
||||||
|
GOPATH: $HOME/go
|
||||||
steps:
|
steps:
|
||||||
- checkout
|
- checkout
|
||||||
- run:
|
- run:
|
||||||
@@ -18,71 +20,17 @@ jobs:
|
|||||||
- run:
|
- run:
|
||||||
name: Bootstrapping
|
name: Bootstrapping
|
||||||
command: yarn bootstrap
|
command: yarn bootstrap
|
||||||
|
- run:
|
||||||
|
name: Building
|
||||||
|
command: ./.circleci/build.sh
|
||||||
- run:
|
- run:
|
||||||
name: Linting
|
name: Linting
|
||||||
command: yarn lint
|
command: yarn lint
|
||||||
- run:
|
- run:
|
||||||
name: Tests
|
name: Tests and Coverage
|
||||||
command: yarn test
|
command: yarn test-coverage
|
||||||
# publish-canary:
|
|
||||||
# docker:
|
|
||||||
# - image: circleci/node:10
|
|
||||||
# working_directory: ~/repo
|
|
||||||
# steps:
|
|
||||||
# - checkout
|
|
||||||
# - run:
|
|
||||||
# name: Updating apt-get list
|
|
||||||
# command: sudo apt-get update
|
|
||||||
# - run:
|
|
||||||
# name: Installing the latest version of Go
|
|
||||||
# command: sudo apt-get install golang-go
|
|
||||||
# - run: yarn install
|
|
||||||
# - run: yarn bootstrap
|
|
||||||
# - run:
|
|
||||||
# name: Saving Authentication Information
|
|
||||||
# command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
|
||||||
# - run:
|
|
||||||
# name: Publishing to Canary Channel
|
|
||||||
# command: yarn run lerna publish from-git --npm-tag canary --yes
|
|
||||||
publish-stable:
|
|
||||||
docker:
|
|
||||||
- image: circleci/node:10
|
|
||||||
working_directory: ~/repo
|
|
||||||
steps:
|
|
||||||
- checkout
|
|
||||||
- run:
|
|
||||||
name: Updating apt-get list
|
|
||||||
command: sudo apt-get update
|
|
||||||
- run:
|
|
||||||
name: Installing the latest version of Go
|
|
||||||
command: sudo apt-get install golang-go
|
|
||||||
- run: yarn install
|
|
||||||
- run: yarn bootstrap
|
|
||||||
- run:
|
|
||||||
name: Saving Authentication Information
|
|
||||||
command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
|
||||||
- run:
|
|
||||||
name: Publishing to Stable Channel
|
|
||||||
command: yarn run lerna publish from-git --yes
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
unscheduled:
|
build-and-test:
|
||||||
jobs:
|
jobs:
|
||||||
- build:
|
- build
|
||||||
filters:
|
|
||||||
tags:
|
|
||||||
only: /.*/
|
|
||||||
- publish-stable:
|
|
||||||
requires:
|
|
||||||
- build
|
|
||||||
filters:
|
|
||||||
tags:
|
|
||||||
only: /^.*(\d+\.)(\d+\.)(\*|\d+)$/
|
|
||||||
branches:
|
|
||||||
ignore: /.*/
|
|
||||||
# - publish-canary:
|
|
||||||
# requires:
|
|
||||||
# - build
|
|
||||||
# filters:
|
|
||||||
# tags:
|
|
||||||
# only: /^.*canary.*($|\b)/
|
|
||||||
|
|||||||
31
.circleci/publish.sh
Executable file
31
.circleci/publish.sh
Executable file
@@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [ -z "$NPM_TOKEN" ]; then
|
||||||
|
echo "NPM_TOKEN not found. Did you forget to assign the GitHub Action secret?"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||||
|
|
||||||
|
if [ ! -e ~/.npmrc ]; then
|
||||||
|
echo "~/.npmrc file does not exist, skipping publish"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
npm_tag=""
|
||||||
|
tag="$(git describe --tags --exact-match 2> /dev/null || :)"
|
||||||
|
|
||||||
|
if [ -z "$tag" ]; then
|
||||||
|
echo "Not a tagged commit, skipping publish"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$tag" =~ -canary ]]; then
|
||||||
|
echo "Publishing canary release"
|
||||||
|
npm_tag="--npm-tag canary"
|
||||||
|
else
|
||||||
|
echo "Publishing stable release"
|
||||||
|
fi
|
||||||
|
|
||||||
|
yarn run lerna publish from-git $npm_tag --yes
|
||||||
43
.editorconfig
Normal file
43
.editorconfig
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[{*.json,*.json.example,*.gyp,*.yml,*.yaml,*.workflow}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[{*.py,*.asm}]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.asm]
|
||||||
|
indent_size = 8
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
# Ideal settings - some plugins might support these
|
||||||
|
[*.js,*.jsx,*.ts,*.tsx]
|
||||||
|
quote_type = single
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.java,*.go,*.rs,*.php,*.ng,*.d,*.cs,*.swift}]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
tab_width = 4
|
||||||
|
|
||||||
|
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.tsx,*.d,*.cs,*.swift}]
|
||||||
|
curly_bracket_next_line = false
|
||||||
|
spaces_around_operators = true
|
||||||
|
spaces_around_brackets = outside
|
||||||
|
# close enough to 1TB
|
||||||
|
indent_brace_style = K&R
|
||||||
@@ -1,3 +1,17 @@
|
|||||||
/tmp/*
|
/tmp/*
|
||||||
/node_modules/*
|
/node_modules/*
|
||||||
/**/node_modules/*
|
/**/node_modules/*
|
||||||
|
/packages/now-go/go/*
|
||||||
|
/packages/now-build-utils/dist/*
|
||||||
|
/packages/now-build-utils/src/*.js
|
||||||
|
/packages/now-build-utils/src/fs/*.js
|
||||||
|
/packages/now-node/dist/*
|
||||||
|
/packages/now-layer-node/dist/*
|
||||||
|
/packages/now-layer-npm/dist/*
|
||||||
|
/packages/now-layer-yarn/dist/*
|
||||||
|
/packages/now-next/dist/*
|
||||||
|
/packages/now-node-bridge/*
|
||||||
|
/packages/now-python/dist/*
|
||||||
|
/packages/now-optipng/dist/*
|
||||||
|
/packages/now-go/*
|
||||||
|
/packages/now-rust/dist/*
|
||||||
|
|||||||
@@ -8,7 +8,7 @@
|
|||||||
},
|
},
|
||||||
"overrides": [
|
"overrides": [
|
||||||
{
|
{
|
||||||
"files": ["test/**"],
|
"files": ["**/test/**"],
|
||||||
"rules": {
|
"rules": {
|
||||||
"import/no-extraneous-dependencies": 0
|
"import/no-extraneous-dependencies": 0
|
||||||
},
|
},
|
||||||
|
|||||||
9
.github/CODEOWNERS
vendored
Normal file
9
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Documentation
|
||||||
|
# https://help.github.com/en/articles/about-code-owners
|
||||||
|
|
||||||
|
* @styfle
|
||||||
|
/packages/now-node @styfle @tootallnate
|
||||||
|
/packages/now-next @timer @dav-is
|
||||||
|
/packages/now-go @styfle @sophearak
|
||||||
|
/packages/now-python @styfle @sophearak
|
||||||
|
/packages/now-rust @styfle @mike-engel @anmonteiro
|
||||||
76
.github/main.workflow
vendored
Normal file
76
.github/main.workflow
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
workflow "Canary publish" {
|
||||||
|
on = "push"
|
||||||
|
resolves = ["3. Canary yarn run publish"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Canary filter" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
args = "branch canary"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Canary PR not deleted" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
needs = ["0. Canary filter"]
|
||||||
|
args = "not deleted"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "1. Canary yarn install" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["0. Canary PR not deleted"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "install"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "2. Canary yarn run build" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["1. Canary yarn install"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run build"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "3. Canary yarn run publish" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["2. Canary yarn run build"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run publish-from-github"
|
||||||
|
secrets = ["NPM_TOKEN"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
workflow "Master publish" {
|
||||||
|
on = "push"
|
||||||
|
resolves = ["3. Master yarn run publish"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Master filter" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
args = "branch master"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Master PR not deleted" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
needs = ["0. Master filter"]
|
||||||
|
args = "not deleted"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "1. Master yarn install" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["0. Master PR not deleted"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "install"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "2. Master yarn run build" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["1. Master yarn install"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run build"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "3. Master yarn run publish" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["2. Master yarn run build"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run publish-from-github"
|
||||||
|
secrets = ["NPM_TOKEN"]
|
||||||
|
}
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,2 +1,6 @@
|
|||||||
node_modules
|
node_modules
|
||||||
tmp
|
tmp
|
||||||
|
target/
|
||||||
|
.next
|
||||||
|
coverage
|
||||||
|
*.tgz
|
||||||
|
|||||||
3
.vscode/settings.json
vendored
Normal file
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"eslint.enable": false
|
||||||
|
}
|
||||||
74
CODE_OF_CONDUCT.md
Normal file
74
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
## Code of Conduct
|
||||||
|
|
||||||
|
### Our Pledge
|
||||||
|
|
||||||
|
In the interest of fostering an open and welcoming environment, we as
|
||||||
|
contributors and maintainers pledge to making participation in our project and
|
||||||
|
our community a harassment-free experience for everyone, regardless of age, body
|
||||||
|
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||||
|
nationality, personal appearance, race, religion, or sexual identity and
|
||||||
|
orientation.
|
||||||
|
|
||||||
|
### Our Standards
|
||||||
|
|
||||||
|
Examples of behavior that contributes to creating a positive environment
|
||||||
|
include:
|
||||||
|
|
||||||
|
- Using welcoming and inclusive language
|
||||||
|
- Being respectful of differing viewpoints and experiences
|
||||||
|
- Gracefully accepting constructive criticism
|
||||||
|
- Focusing on what is best for the community
|
||||||
|
- Showing empathy towards other community members
|
||||||
|
|
||||||
|
Examples of unacceptable behavior by participants include:
|
||||||
|
|
||||||
|
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||||
|
advances
|
||||||
|
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||||
|
- Public or private harassment
|
||||||
|
- Publishing others' private information, such as a physical or electronic
|
||||||
|
address, without explicit permission
|
||||||
|
- Other conduct which could reasonably be considered inappropriate in a
|
||||||
|
professional setting
|
||||||
|
|
||||||
|
### Our Responsibilities
|
||||||
|
|
||||||
|
Project maintainers are responsible for clarifying the standards of acceptable
|
||||||
|
behavior and are expected to take appropriate and fair corrective action in
|
||||||
|
response to any instances of unacceptable behavior.
|
||||||
|
|
||||||
|
Project maintainers have the right and responsibility to remove, edit, or
|
||||||
|
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||||
|
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||||
|
permanently any contributor for other behaviors that they deem inappropriate,
|
||||||
|
threatening, offensive, or harmful.
|
||||||
|
|
||||||
|
### Scope
|
||||||
|
|
||||||
|
This Code of Conduct applies both within project spaces and in public spaces
|
||||||
|
when an individual is representing the project or its community. Examples of
|
||||||
|
representing a project or community include using an official project e-mail
|
||||||
|
address, posting via an official social media account, or acting as an appointed
|
||||||
|
representative at an online or offline event. Representation of a project may be
|
||||||
|
further defined and clarified by project maintainers.
|
||||||
|
|
||||||
|
### Enforcement
|
||||||
|
|
||||||
|
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||||
|
reported by contacting the project team at [abuse@zeit.co](mailto:abuse@zeit.co). All
|
||||||
|
complaints will be reviewed and investigated and will result in a response that
|
||||||
|
is deemed necessary and appropriate to the circumstances. The project team is
|
||||||
|
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||||
|
Further details of specific enforcement policies may be posted separately.
|
||||||
|
|
||||||
|
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||||
|
faith may face temporary or permanent repercussions as determined by other
|
||||||
|
members of the project's leadership.
|
||||||
|
|
||||||
|
### Attribution
|
||||||
|
|
||||||
|
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||||
|
available at [http://contributor-covenant.org/version/1/4][version]
|
||||||
|
|
||||||
|
[homepage]: http://contributor-covenant.org
|
||||||
|
[version]: http://contributor-covenant.org/version/1/4/
|
||||||
13
README.md
13
README.md
@@ -1,8 +1,11 @@
|
|||||||
# now-builders
|
# now-builders
|
||||||
|
|
||||||
This is the full list of official Builders provided by the ZEIT team.
|
This is a monorepo containing the [Official Builders](https://zeit.co/docs/v2/deployments/builders/overview) provided by the ZEIT team.
|
||||||
|
|
||||||
More details here: http://zeit.co/docs
|
There are two branches:
|
||||||
|
|
||||||
|
- canary - published to npm as `canary` dist-tag, eg `@now/node@canary`
|
||||||
|
- master - published to npm as `latest` dist-tag, eg `@now/node@latest`
|
||||||
|
|
||||||
### Publishing to npm
|
### Publishing to npm
|
||||||
|
|
||||||
@@ -19,3 +22,9 @@ For the canary channel use:
|
|||||||
```
|
```
|
||||||
yarn publish-canary
|
yarn publish-canary
|
||||||
```
|
```
|
||||||
|
|
||||||
|
GitHub Actions will take care of publishing the updated packages to npm from there.
|
||||||
|
|
||||||
|
If for some reason GitHub Actions fails to publish the npm package, you may do so
|
||||||
|
manually by running `npm publish` from the package directory. Make sure to
|
||||||
|
use `npm publish --tag canary` if you are publishing a canary release!
|
||||||
|
|||||||
72
errors/now-next-legacy-mode.md
Normal file
72
errors/now-next-legacy-mode.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# `@now/next` Legacy Mode
|
||||||
|
|
||||||
|
#### Why This Warning Occurred
|
||||||
|
|
||||||
|
`@now/next` has two modes: `legacy` and `serverless`. You will always want to use the `serverless` mode. `legacy` is to provide backwards compatibility with previous `@now/next` versions.
|
||||||
|
|
||||||
|
The differences:
|
||||||
|
|
||||||
|
Legacy:
|
||||||
|
|
||||||
|
- Minimal lambda size of `2.2Mb` (approximately)
|
||||||
|
- Forces `next@v7.0.2-canary.49` and `next-server@v7.0.2-canary.49`
|
||||||
|
- Forces all `dependencies` to be `devDependencies`
|
||||||
|
- Loads `next.config.js` on bootup, breaking sometimes when users didn't use `phases` to load files
|
||||||
|
- Used `next-server` which is the full Next.js server with routing etc.
|
||||||
|
- Runs `npm install`
|
||||||
|
- Runs `npm run now-build`
|
||||||
|
- Runs `npm install --production` after build
|
||||||
|
|
||||||
|
Serverless:
|
||||||
|
|
||||||
|
- Minimal lambda size of `49Kb` (approximately)
|
||||||
|
- Uses Next.js build targets (`target: 'serverless'`) in `next.config.js`. [documentation](https://github.com/zeit/next.js#summary)
|
||||||
|
- Does not make changes to your application dependencies
|
||||||
|
- Does not load `next.config.js` ([as per the serverless target documentation](https://github.com/zeit/next.js#summary))
|
||||||
|
- Runs `npm install`
|
||||||
|
- Runs `npm run now-build`
|
||||||
|
- Does not run `npm install --production` as the output from the build is all that's needed to bundle lambdas.
|
||||||
|
- No runtime dependencies, meaning smaller lambda functions
|
||||||
|
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
||||||
|
|
||||||
|
#### Possible Ways to Fix It
|
||||||
|
|
||||||
|
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||||
|
|
||||||
|
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install next --save
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add the `now-build` script to your `package.json`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"now-build": "next build"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Add `target: 'serverless'` to `next.config.js`
|
||||||
|
|
||||||
|
```js
|
||||||
|
module.exports = {
|
||||||
|
target: 'serverless',
|
||||||
|
// Other options are still valid
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "package.json", "use": "@now/next" }]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Useful Links
|
||||||
|
|
||||||
|
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||||
45
errors/now-next-no-serverless-pages-built.md
Normal file
45
errors/now-next-no-serverless-pages-built.md
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# `@now/next` No Serverless Pages Built
|
||||||
|
|
||||||
|
#### Why This Error Occurred
|
||||||
|
|
||||||
|
This error occurs when you have your application is not configured for Serverless Next.js build output.
|
||||||
|
|
||||||
|
#### Possible Ways to Fix It
|
||||||
|
|
||||||
|
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||||
|
|
||||||
|
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||||
|
|
||||||
|
```
|
||||||
|
npm install next --save
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Add the `now-build` script to your `package.json`
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"now-build": "next build"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Add `target: 'serverless'` to `next.config.js`
|
||||||
|
|
||||||
|
```js
|
||||||
|
module.exports = {
|
||||||
|
target: 'serverless',
|
||||||
|
// Other options
|
||||||
|
};
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||||
|
|
||||||
|
5. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||||
|
|
||||||
|
```js
|
||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "package.json", "use": "@now/next" }]
|
||||||
|
}
|
||||||
|
```
|
||||||
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# `@now/static-build` Failed to detect a server running
|
||||||
|
|
||||||
|
#### Why This Warning Occurred
|
||||||
|
|
||||||
|
When running `now dev`, the `@now/static-build` builder proxies relevant HTTP
|
||||||
|
requests to the server that is created by the `now-dev` script in the
|
||||||
|
`package.json` file.
|
||||||
|
|
||||||
|
In order for `now dev` to know which port the server is running on, the builder
|
||||||
|
is provided a `$PORT` environment variable that the server _must_ bind to. The
|
||||||
|
error "Failed to detect a server running on port" is printed if the builder fails
|
||||||
|
to detect a server listening on that specific port within five minutes.
|
||||||
|
|
||||||
|
#### Possible Ways to Fix It
|
||||||
|
|
||||||
|
Please ensure that your `now-dev` script binds the spawned development server on
|
||||||
|
the provided `$PORT` that the builder expects the server to bind to.
|
||||||
|
|
||||||
|
For example, if you are using Gatsby, your `now-dev` script must use the `-p`
|
||||||
|
(port) option to bind to the `$PORT` specified from the builder:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
...
|
||||||
|
"scripts": {
|
||||||
|
...
|
||||||
|
"now-dev": "gatsby develop -p $PORT"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Consult your static builder program's `--help` or documentation to figure out what
|
||||||
|
the command line flag to bind to a specific port is (in many cases, it is one of:
|
||||||
|
`-p` / `-P` / `--port`).
|
||||||
|
|
||||||
|
### Useful Links
|
||||||
|
|
||||||
|
- [`@now/static-build` Local Development Documentation](https://zeit.co/docs/v2/deployments/official-builders/static-build-now-static-build#local-development)
|
||||||
@@ -1,5 +1,40 @@
|
|||||||
|
const childProcess = require('child_process');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const command = 'git diff HEAD~1 --name-only';
|
||||||
|
const diff = childProcess.execSync(command).toString();
|
||||||
|
|
||||||
|
const changed = diff
|
||||||
|
.split('\n')
|
||||||
|
.filter(item => Boolean(item) && item.includes('packages/'))
|
||||||
|
.map(item => path.relative('packages', item).split('/')[0]);
|
||||||
|
|
||||||
|
const matches = [];
|
||||||
|
|
||||||
|
if (changed.length > 0) {
|
||||||
|
console.log('The following packages have changed:');
|
||||||
|
|
||||||
|
changed.map((item) => {
|
||||||
|
matches.push(item);
|
||||||
|
console.log(item);
|
||||||
|
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
matches.push('now-node');
|
||||||
|
console.log(`No packages changed, defaulting to ${matches[0]}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const testMatch = Array.from(new Set(matches)).map(
|
||||||
|
item => `**/${item}/**/?(*.)+(spec|test).[jt]s?(x)`,
|
||||||
|
);
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
testEnvironment: 'node',
|
testEnvironment: 'node',
|
||||||
rootDir: 'test',
|
testMatch,
|
||||||
collectCoverageFrom: ['packages/**/*.{js,jsx}', '!**/node_modules/**'],
|
collectCoverageFrom: [
|
||||||
|
'packages/(!test)/**/*.{js,jsx}',
|
||||||
|
'!**/node_modules/**',
|
||||||
|
'!**/test/**',
|
||||||
|
],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
{
|
{
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"useWorkspaces": true,
|
"useWorkspaces": true,
|
||||||
"packages": [
|
"packages": ["packages/*"],
|
||||||
"packages/*"
|
|
||||||
],
|
|
||||||
"command": {
|
"command": {
|
||||||
"publish": {
|
"publish": {
|
||||||
"npmClient": "npm",
|
"npmClient": "npm",
|
||||||
|
|||||||
32
package.json
32
package.json
@@ -1,5 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "now-builders",
|
"name": "now-builders",
|
||||||
|
"version": "0.0.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
@@ -11,18 +12,26 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"lerna": "lerna",
|
"lerna": "lerna",
|
||||||
"bootstrap": "lerna bootstrap",
|
"bootstrap": "lerna bootstrap",
|
||||||
"publish-stable": "lerna version",
|
"publish-stable": "git checkout master && git pull && lerna version",
|
||||||
"publish-canary": "lerna version prerelease --preid canary",
|
"publish-canary": "git checkout canary && git pull && lerna version prerelease --preid canary",
|
||||||
"lint": "tsc && eslint .",
|
"publish-from-github": "./.circleci/publish.sh",
|
||||||
"test": "jest --runInBand",
|
"build": "./.circleci/build.sh",
|
||||||
|
"lint": "eslint .",
|
||||||
|
"codecov": "codecov",
|
||||||
|
"test": "jest --runInBand --verbose",
|
||||||
|
"test-coverage": "jest --runInBand --verbose --coverage --globals \"{\\\"coverage\\\":true}\" && codecov",
|
||||||
"lint-staged": "lint-staged"
|
"lint-staged": "lint-staged"
|
||||||
},
|
},
|
||||||
"pre-commit": "lint-staged",
|
"pre-commit": "lint-staged",
|
||||||
"lint-staged": {
|
"lint-staged": {
|
||||||
"*.js": [
|
"*.js": [
|
||||||
"prettier --write --single-quote",
|
"prettier --write",
|
||||||
"eslint --fix",
|
"eslint --fix",
|
||||||
"git add"
|
"git add"
|
||||||
|
],
|
||||||
|
"*.ts": [
|
||||||
|
"prettier --write",
|
||||||
|
"git add"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
@@ -30,14 +39,23 @@
|
|||||||
"@types/glob": "^7.1.1",
|
"@types/glob": "^7.1.1",
|
||||||
"@types/multistream": "^2.1.1",
|
"@types/multistream": "^2.1.1",
|
||||||
"@types/node": "^10.12.8",
|
"@types/node": "^10.12.8",
|
||||||
|
"async-retry": "1.2.3",
|
||||||
|
"buffer-replace": "^1.0.0",
|
||||||
|
"codecov": "^3.2.0",
|
||||||
"eslint": "^5.9.0",
|
"eslint": "^5.9.0",
|
||||||
"eslint-config-airbnb-base": "^13.1.0",
|
"eslint-config-airbnb-base": "^13.1.0",
|
||||||
"eslint-config-prettier": "^3.1.0",
|
"eslint-config-prettier": "^3.1.0",
|
||||||
"eslint-plugin-import": "^2.14.0",
|
"eslint-plugin-import": "^2.14.0",
|
||||||
|
"fs-extra": "^7.0.1",
|
||||||
|
"glob": "^7.1.3",
|
||||||
"jest": "^23.6.0",
|
"jest": "^23.6.0",
|
||||||
"lint-staged": "^8.0.4",
|
"lint-staged": "^8.0.4",
|
||||||
|
"node-fetch": "^2.3.0",
|
||||||
"pre-commit": "^1.2.2",
|
"pre-commit": "^1.2.2",
|
||||||
"prettier": "^1.15.2",
|
"prettier": "1.17.1"
|
||||||
"typescript": "^3.1.6"
|
},
|
||||||
|
"prettier": {
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
32
packages/now-bash/.editorconfig
Normal file
32
packages/now-bash/.editorconfig
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
tab_width = 4
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[{*.json,*.json.example,*.gyp,*.yml}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
|
||||||
|
# Ideal settings - some plugins might support these.
|
||||||
|
[*.js]
|
||||||
|
quote_type = single
|
||||||
|
|
||||||
|
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}]
|
||||||
|
curly_bracket_next_line = false
|
||||||
|
spaces_around_operators = true
|
||||||
|
spaces_around_brackets = outside
|
||||||
|
# close enough to 1TB
|
||||||
|
indent_brace_style = K&R
|
||||||
2
packages/now-bash/.gitignore
vendored
Normal file
2
packages/now-bash/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
node_modules
|
||||||
|
handler
|
||||||
16
packages/now-bash/bootstrap
Executable file
16
packages/now-bash/bootstrap
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
cd "$LAMBDA_TASK_ROOT"
|
||||||
|
|
||||||
|
# Configure `import`
|
||||||
|
export IMPORT_CACHE="$LAMBDA_TASK_ROOT/.import-cache"
|
||||||
|
export PATH="$IMPORT_CACHE/bin:$PATH"
|
||||||
|
|
||||||
|
# Load `import` and runtime
|
||||||
|
# shellcheck disable=SC1090
|
||||||
|
. "$(which import)"
|
||||||
|
# shellcheck disable=SC1090
|
||||||
|
. "$IMPORT_CACHE/runtime.sh"
|
||||||
|
|
||||||
|
# Load user code and process events in a loop forever
|
||||||
|
_lambda_runtime_init
|
||||||
40
packages/now-bash/builder.sh
Executable file
40
packages/now-bash/builder.sh
Executable file
@@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# `import` debug logs are always enabled during build
|
||||||
|
export IMPORT_DEBUG=1
|
||||||
|
|
||||||
|
# Install `import`
|
||||||
|
IMPORT_BIN="$IMPORT_CACHE/bin/import"
|
||||||
|
mkdir -p "$(dirname "$IMPORT_BIN")"
|
||||||
|
curl -sfLS https://import.pw > "$IMPORT_BIN"
|
||||||
|
chmod +x "$IMPORT_BIN"
|
||||||
|
|
||||||
|
# For now only the entrypoint file is copied into the lambda
|
||||||
|
mkdir -p "$(dirname "$ENTRYPOINT")"
|
||||||
|
cp "$SRC/$ENTRYPOINT" "$ENTRYPOINT"
|
||||||
|
|
||||||
|
# Copy in the runtime
|
||||||
|
cp "$BUILDER/runtime.sh" "$IMPORT_CACHE"
|
||||||
|
cp "$BUILDER/bootstrap" .
|
||||||
|
|
||||||
|
# Load `import`
|
||||||
|
. "$(which import)"
|
||||||
|
|
||||||
|
# Cache runtime and user dependencies
|
||||||
|
echo "Caching imports in \"$ENTRYPOINT\"…"
|
||||||
|
. "$IMPORT_CACHE/runtime.sh"
|
||||||
|
. "$ENTRYPOINT"
|
||||||
|
echo "Done caching imports"
|
||||||
|
|
||||||
|
# Run user build script
|
||||||
|
if declare -f build > /dev/null; then
|
||||||
|
echo "Running \`build\` function in \"$ENTRYPOINT\"…"
|
||||||
|
build "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Ensure the entrypoint defined a `handler` function
|
||||||
|
if ! declare -f handler > /dev/null; then
|
||||||
|
echo "ERROR: A \`handler\` function must be defined in \"$ENTRYPOINT\"!" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
62
packages/now-bash/index.js
Normal file
62
packages/now-bash/index.js
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
const execa = require('execa');
|
||||||
|
const { join } = require('path');
|
||||||
|
const snakeCase = require('snake-case');
|
||||||
|
const {
|
||||||
|
glob,
|
||||||
|
download,
|
||||||
|
createLambda,
|
||||||
|
getWriteableDirectory,
|
||||||
|
shouldServe,
|
||||||
|
} = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
|
||||||
|
exports.config = {
|
||||||
|
maxLambdaSize: '10mb',
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||||
|
|
||||||
|
exports.build = async ({
|
||||||
|
workPath, files, entrypoint, config,
|
||||||
|
}) => {
|
||||||
|
const srcDir = await getWriteableDirectory();
|
||||||
|
|
||||||
|
console.log('downloading files...');
|
||||||
|
await download(files, srcDir);
|
||||||
|
|
||||||
|
const configEnv = Object.keys(config).reduce((o, v) => {
|
||||||
|
o[`IMPORT_${snakeCase(v).toUpperCase()}`] = config[v]; // eslint-disable-line no-param-reassign
|
||||||
|
return o;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
const IMPORT_CACHE = `${workPath}/.import-cache`;
|
||||||
|
const env = Object.assign({}, process.env, configEnv, {
|
||||||
|
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
||||||
|
IMPORT_CACHE,
|
||||||
|
SRC: srcDir,
|
||||||
|
BUILDER: __dirname,
|
||||||
|
ENTRYPOINT: entrypoint,
|
||||||
|
});
|
||||||
|
|
||||||
|
const builderPath = join(__dirname, 'builder.sh');
|
||||||
|
|
||||||
|
await execa(builderPath, [entrypoint], {
|
||||||
|
env,
|
||||||
|
cwd: workPath,
|
||||||
|
stdio: 'inherit',
|
||||||
|
});
|
||||||
|
|
||||||
|
const lambda = await createLambda({
|
||||||
|
files: await glob('**', workPath),
|
||||||
|
handler: entrypoint, // not actually used in `bootstrap`
|
||||||
|
runtime: 'provided',
|
||||||
|
environment: Object.assign({}, configEnv, {
|
||||||
|
SCRIPT_FILENAME: entrypoint,
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
[entrypoint]: lambda,
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
exports.shouldServe = shouldServe;
|
||||||
24
packages/now-bash/package.json
Normal file
24
packages/now-bash/package.json
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"name": "@now/bash",
|
||||||
|
"version": "0.2.3",
|
||||||
|
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
||||||
|
"main": "index.js",
|
||||||
|
"author": "Nathan Rajlich <nate@zeit.co>",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
|
"directory": "packages/now-bash"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"builder.sh",
|
||||||
|
"runtime.sh",
|
||||||
|
"bootstrap",
|
||||||
|
"index.js",
|
||||||
|
"package.json"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"execa": "^1.0.0",
|
||||||
|
"snake-case": "^2.1.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
119
packages/now-bash/runtime.sh
Normal file
119
packages/now-bash/runtime.sh
Normal file
@@ -0,0 +1,119 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
import "static-binaries@1.0.0"
|
||||||
|
static_binaries jq
|
||||||
|
|
||||||
|
# These get reset upon each request
|
||||||
|
_STATUS_CODE="$(mktemp)"
|
||||||
|
_HEADERS="$(mktemp)"
|
||||||
|
|
||||||
|
_lambda_runtime_api() {
|
||||||
|
local endpoint="$1"
|
||||||
|
shift
|
||||||
|
curl -sfLS "http://$AWS_LAMBDA_RUNTIME_API/2018-06-01/runtime/$endpoint" "$@"
|
||||||
|
}
|
||||||
|
|
||||||
|
_lambda_runtime_init() {
|
||||||
|
# Initialize user code
|
||||||
|
# shellcheck disable=SC1090
|
||||||
|
. "$SCRIPT_FILENAME" || {
|
||||||
|
local exit_code="$?"
|
||||||
|
local error_message="Initialization failed for '$SCRIPT_FILENAME' (exit code $exit_code)"
|
||||||
|
echo "$error_message" >&2
|
||||||
|
local error='{"errorMessage":"'"$error_message"'"}'
|
||||||
|
_lambda_runtime_api "init/error" -X POST -d "$error"
|
||||||
|
exit "$exit_code"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Process events
|
||||||
|
while true; do _lambda_runtime_next; done
|
||||||
|
}
|
||||||
|
|
||||||
|
_lambda_runtime_next() {
|
||||||
|
echo 200 > "$_STATUS_CODE"
|
||||||
|
echo '{"content-type":"text/plain; charset=utf8"}' > "$_HEADERS"
|
||||||
|
|
||||||
|
local headers
|
||||||
|
headers="$(mktemp)"
|
||||||
|
|
||||||
|
# Get an event
|
||||||
|
local event
|
||||||
|
event="$(mktemp)"
|
||||||
|
_lambda_runtime_api invocation/next -D "$headers" | jq --raw-output --monochrome-output '.body' > "$event"
|
||||||
|
|
||||||
|
local request_id
|
||||||
|
request_id="$(grep -Fi Lambda-Runtime-Aws-Request-Id "$headers" | tr -d '[:space:]' | cut -d: -f2)"
|
||||||
|
rm -f "$headers"
|
||||||
|
|
||||||
|
# Execute the handler function from the script
|
||||||
|
local body
|
||||||
|
body="$(mktemp)"
|
||||||
|
|
||||||
|
# Stdin of the `handler` function is the HTTP request body.
|
||||||
|
# Need to use a fifo here instead of bash <() because Lambda
|
||||||
|
# errors with "/dev/fd/63 not found" for some reason :/
|
||||||
|
local stdin
|
||||||
|
stdin="$(mktemp -u)"
|
||||||
|
mkfifo "$stdin"
|
||||||
|
_lambda_runtime_body < "$event" > "$stdin" &
|
||||||
|
|
||||||
|
local exit_code=0
|
||||||
|
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
||||||
|
|
||||||
|
rm -f "$event" "$stdin"
|
||||||
|
|
||||||
|
if [ "$exit_code" -eq 0 ]; then
|
||||||
|
# Send the response
|
||||||
|
jq --raw-input --raw-output --compact-output --slurp --monochrome-output \
|
||||||
|
--arg statusCode "$(cat "$_STATUS_CODE")" \
|
||||||
|
--argjson headers "$(cat "$_HEADERS")" \
|
||||||
|
'{statusCode:$statusCode|tonumber, headers:$headers, encoding:"base64", body:.|@base64}' < "$body" \
|
||||||
|
| _lambda_runtime_api "invocation/$request_id/response" -X POST -d @- > /dev/null
|
||||||
|
rm -f "$body" "$_HEADERS"
|
||||||
|
else
|
||||||
|
local error_message="Invocation failed for 'handler' function in '$SCRIPT_FILENAME' (exit code $exit_code)"
|
||||||
|
echo "$error_message" >&2
|
||||||
|
_lambda_runtime_api "invocation/$request_id/error" -X POST -d '{"errorMessage":"'"$error_message"'"}' > /dev/null
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_lambda_runtime_body() {
|
||||||
|
local event
|
||||||
|
event="$(cat)"
|
||||||
|
if [ "$(jq --raw-output '.body | type' <<< "$event")" = "string" ]; then
|
||||||
|
if [ "$(jq --raw-output '.encoding' <<< "$event")" = "base64" ]; then
|
||||||
|
jq --raw-output '.body' <<< "$event" | base64 --decode
|
||||||
|
else
|
||||||
|
# assume plain-text body
|
||||||
|
jq --raw-output '.body' <<< "$event"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Set the response status code.
|
||||||
|
http_response_code() {
|
||||||
|
echo "$1" > "$_STATUS_CODE"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sets a response header.
|
||||||
|
# Overrides existing header if it has already been set.
|
||||||
|
http_response_header() {
|
||||||
|
local name="$1"
|
||||||
|
local value="$2"
|
||||||
|
local tmp
|
||||||
|
tmp="$(mktemp)"
|
||||||
|
jq \
|
||||||
|
--arg name "$name" \
|
||||||
|
--arg value "$value" \
|
||||||
|
'.[$name] = $value' < "$_HEADERS" > "$tmp"
|
||||||
|
mv -f "$tmp" "$_HEADERS"
|
||||||
|
}
|
||||||
|
|
||||||
|
http_response_redirect() {
|
||||||
|
http_response_code "${2:-302}"
|
||||||
|
http_response_header "location" "$1"
|
||||||
|
}
|
||||||
|
|
||||||
|
http_response_json() {
|
||||||
|
http_response_header "content-type" "application/json; charset=utf8"
|
||||||
|
}
|
||||||
146
packages/now-bash/yarn.lock
Normal file
146
packages/now-bash/yarn.lock
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||||
|
# yarn lockfile v1
|
||||||
|
|
||||||
|
|
||||||
|
cross-spawn@^6.0.0:
|
||||||
|
version "6.0.5"
|
||||||
|
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
|
||||||
|
integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ==
|
||||||
|
dependencies:
|
||||||
|
nice-try "^1.0.4"
|
||||||
|
path-key "^2.0.1"
|
||||||
|
semver "^5.5.0"
|
||||||
|
shebang-command "^1.2.0"
|
||||||
|
which "^1.2.9"
|
||||||
|
|
||||||
|
end-of-stream@^1.1.0:
|
||||||
|
version "1.4.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.1.tgz#ed29634d19baba463b6ce6b80a37213eab71ec43"
|
||||||
|
integrity sha512-1MkrZNvWTKCaigbn+W15elq2BB/L22nqrSY5DKlo3X6+vclJm8Bb5djXJBmEX6fS3+zCh/F4VBK5Z2KxJt4s2Q==
|
||||||
|
dependencies:
|
||||||
|
once "^1.4.0"
|
||||||
|
|
||||||
|
execa@^1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8"
|
||||||
|
integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA==
|
||||||
|
dependencies:
|
||||||
|
cross-spawn "^6.0.0"
|
||||||
|
get-stream "^4.0.0"
|
||||||
|
is-stream "^1.1.0"
|
||||||
|
npm-run-path "^2.0.0"
|
||||||
|
p-finally "^1.0.0"
|
||||||
|
signal-exit "^3.0.0"
|
||||||
|
strip-eof "^1.0.0"
|
||||||
|
|
||||||
|
get-stream@^4.0.0:
|
||||||
|
version "4.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
||||||
|
integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==
|
||||||
|
dependencies:
|
||||||
|
pump "^3.0.0"
|
||||||
|
|
||||||
|
is-stream@^1.1.0:
|
||||||
|
version "1.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44"
|
||||||
|
integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ=
|
||||||
|
|
||||||
|
isexe@^2.0.0:
|
||||||
|
version "2.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10"
|
||||||
|
integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA=
|
||||||
|
|
||||||
|
lower-case@^1.1.1:
|
||||||
|
version "1.1.4"
|
||||||
|
resolved "https://registry.yarnpkg.com/lower-case/-/lower-case-1.1.4.tgz#9a2cabd1b9e8e0ae993a4bf7d5875c39c42e8eac"
|
||||||
|
integrity sha1-miyr0bno4K6ZOkv31YdcOcQujqw=
|
||||||
|
|
||||||
|
nice-try@^1.0.4:
|
||||||
|
version "1.0.5"
|
||||||
|
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
|
||||||
|
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
|
||||||
|
|
||||||
|
no-case@^2.2.0:
|
||||||
|
version "2.3.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/no-case/-/no-case-2.3.2.tgz#60b813396be39b3f1288a4c1ed5d1e7d28b464ac"
|
||||||
|
integrity sha512-rmTZ9kz+f3rCvK2TD1Ue/oZlns7OGoIWP4fc3llxxRXlOkHKoWPPWJOfFYpITabSow43QJbRIoHQXtt10VldyQ==
|
||||||
|
dependencies:
|
||||||
|
lower-case "^1.1.1"
|
||||||
|
|
||||||
|
npm-run-path@^2.0.0:
|
||||||
|
version "2.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
|
||||||
|
integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8=
|
||||||
|
dependencies:
|
||||||
|
path-key "^2.0.0"
|
||||||
|
|
||||||
|
once@^1.3.1, once@^1.4.0:
|
||||||
|
version "1.4.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1"
|
||||||
|
integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E=
|
||||||
|
dependencies:
|
||||||
|
wrappy "1"
|
||||||
|
|
||||||
|
p-finally@^1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae"
|
||||||
|
integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4=
|
||||||
|
|
||||||
|
path-key@^2.0.0, path-key@^2.0.1:
|
||||||
|
version "2.0.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40"
|
||||||
|
integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A=
|
||||||
|
|
||||||
|
pump@^3.0.0:
|
||||||
|
version "3.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64"
|
||||||
|
integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==
|
||||||
|
dependencies:
|
||||||
|
end-of-stream "^1.1.0"
|
||||||
|
once "^1.3.1"
|
||||||
|
|
||||||
|
semver@^5.5.0:
|
||||||
|
version "5.6.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/semver/-/semver-5.6.0.tgz#7e74256fbaa49c75aa7c7a205cc22799cac80004"
|
||||||
|
integrity sha512-RS9R6R35NYgQn++fkDWaOmqGoj4Ek9gGs+DPxNUZKuwE183xjJroKvyo1IzVFeXvUrvmALy6FWD5xrdJT25gMg==
|
||||||
|
|
||||||
|
shebang-command@^1.2.0:
|
||||||
|
version "1.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea"
|
||||||
|
integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo=
|
||||||
|
dependencies:
|
||||||
|
shebang-regex "^1.0.0"
|
||||||
|
|
||||||
|
shebang-regex@^1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3"
|
||||||
|
integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM=
|
||||||
|
|
||||||
|
signal-exit@^3.0.0:
|
||||||
|
version "3.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.2.tgz#b5fdc08f1287ea1178628e415e25132b73646c6d"
|
||||||
|
integrity sha1-tf3AjxKH6hF4Yo5BXiUTK3NkbG0=
|
||||||
|
|
||||||
|
snake-case@^2.1.0:
|
||||||
|
version "2.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/snake-case/-/snake-case-2.1.0.tgz#41bdb1b73f30ec66a04d4e2cad1b76387d4d6d9f"
|
||||||
|
integrity sha1-Qb2xtz8w7GagTU4srRt2OH1NbZ8=
|
||||||
|
dependencies:
|
||||||
|
no-case "^2.2.0"
|
||||||
|
|
||||||
|
strip-eof@^1.0.0:
|
||||||
|
version "1.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
|
||||||
|
integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=
|
||||||
|
|
||||||
|
which@^1.2.9:
|
||||||
|
version "1.3.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a"
|
||||||
|
integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==
|
||||||
|
dependencies:
|
||||||
|
isexe "^2.0.0"
|
||||||
|
|
||||||
|
wrappy@1:
|
||||||
|
version "1.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||||
|
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
|
||||||
3
packages/now-build-utils/.gitignore
vendored
Normal file
3
packages/now-build-utils/.gitignore
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
dist
|
||||||
|
test/symlinks-out
|
||||||
|
test/symlinks.zip
|
||||||
3
packages/now-build-utils/.npmignore
Normal file
3
packages/now-build-utils/.npmignore
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
/src
|
||||||
|
/test
|
||||||
|
tmp
|
||||||
@@ -1,33 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index').FileBlob;
|
||||||
const intoStream = require('into-stream');
|
|
||||||
|
|
||||||
class FileBlob {
|
|
||||||
constructor({ mode = 0o100644, data }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
|
||||||
this.type = 'FileBlob';
|
|
||||||
this.mode = mode;
|
|
||||||
this.data = data;
|
|
||||||
}
|
|
||||||
|
|
||||||
static async fromStream({ mode = 0o100644, stream }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof stream.pipe === 'function'); // is-stream
|
|
||||||
const chunks = [];
|
|
||||||
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
|
|
||||||
stream.on('error', error => reject(error));
|
|
||||||
stream.on('end', () => resolve());
|
|
||||||
});
|
|
||||||
|
|
||||||
const data = Buffer.concat(chunks);
|
|
||||||
return new FileBlob({ mode, data });
|
|
||||||
}
|
|
||||||
|
|
||||||
toStream() {
|
|
||||||
return intoStream(this.data);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = FileBlob;
|
|
||||||
|
|||||||
@@ -1,88 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index').FileFsRef;
|
||||||
const fs = require('fs-extra');
|
|
||||||
const multiStream = require('multistream');
|
|
||||||
const path = require('path');
|
|
||||||
const Sema = require('async-sema');
|
|
||||||
|
|
||||||
/** @typedef {{[filePath: string]: FileFsRef}} FsFiles */
|
|
||||||
|
|
||||||
const semaToPreventEMFILE = new Sema(30);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @argument {Object} options
|
|
||||||
* @argument {number} [options.mode=0o100644]
|
|
||||||
* @argument {string} options.fsPath
|
|
||||||
*/
|
|
||||||
class FileFsRef {
|
|
||||||
constructor({ mode = 0o100644, fsPath }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof fsPath === 'string');
|
|
||||||
/** @type {string} */
|
|
||||||
this.type = 'FileFsRef';
|
|
||||||
/** @type {number} */
|
|
||||||
this.mode = mode;
|
|
||||||
/** @type {string} */
|
|
||||||
this.fsPath = fsPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @argument {Object} options
|
|
||||||
* @argument {number} [options.mode=0o100644]
|
|
||||||
* @argument {NodeJS.ReadableStream} options.stream
|
|
||||||
* @argument {string} options.fsPath
|
|
||||||
* @returns {Promise<FileFsRef>}
|
|
||||||
*/
|
|
||||||
static async fromStream({ mode = 0o100644, stream, fsPath }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof stream.pipe === 'function'); // is-stream
|
|
||||||
assert(typeof fsPath === 'string');
|
|
||||||
await fs.mkdirp(path.dirname(fsPath));
|
|
||||||
|
|
||||||
await new Promise((resolve, reject) => {
|
|
||||||
const dest = fs.createWriteStream(fsPath);
|
|
||||||
stream.pipe(dest);
|
|
||||||
stream.on('error', reject);
|
|
||||||
dest.on('finish', resolve);
|
|
||||||
dest.on('error', reject);
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.chmod(fsPath, mode.toString(8).slice(-3));
|
|
||||||
return new FileFsRef({ mode, fsPath });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {Promise<NodeJS.ReadableStream>}
|
|
||||||
*/
|
|
||||||
async toStreamAsync() {
|
|
||||||
await semaToPreventEMFILE.acquire();
|
|
||||||
const release = () => semaToPreventEMFILE.release();
|
|
||||||
const stream = fs.createReadStream(this.fsPath);
|
|
||||||
stream.on('close', release);
|
|
||||||
stream.on('error', release);
|
|
||||||
return stream;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {NodeJS.ReadableStream}
|
|
||||||
*/
|
|
||||||
toStream() {
|
|
||||||
let flag;
|
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
|
||||||
return multiStream((cb) => {
|
|
||||||
if (flag) return cb(null, null);
|
|
||||||
flag = true;
|
|
||||||
|
|
||||||
this.toStreamAsync()
|
|
||||||
.then((stream) => {
|
|
||||||
cb(null, stream);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
cb(error, null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = FileFsRef;
|
|
||||||
|
|||||||
@@ -1,96 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index').FileRef;
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const multiStream = require('multistream');
|
|
||||||
const retry = require('async-retry');
|
|
||||||
const Sema = require('async-sema');
|
|
||||||
|
|
||||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
|
||||||
|
|
||||||
const semaToDownloadFromS3 = new Sema(10);
|
|
||||||
|
|
||||||
class BailableError extends Error {
|
|
||||||
constructor(...args) {
|
|
||||||
super(...args);
|
|
||||||
/** @type {boolean} */
|
|
||||||
this.bail = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @constructor
|
|
||||||
* @argument {Object} options
|
|
||||||
* @argument {number} [options.mode=0o100644]
|
|
||||||
* @argument {string} options.digest
|
|
||||||
*/
|
|
||||||
class FileRef {
|
|
||||||
constructor({ mode = 0o100644, digest }) {
|
|
||||||
assert(typeof mode === 'number');
|
|
||||||
assert(typeof digest === 'string');
|
|
||||||
/** @type {string} */
|
|
||||||
this.type = 'FileRef';
|
|
||||||
/** @type {number} */
|
|
||||||
this.mode = mode;
|
|
||||||
/** @type {string} */
|
|
||||||
this.digest = digest;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {Promise<NodeJS.ReadableStream>}
|
|
||||||
*/
|
|
||||||
async toStreamAsync() {
|
|
||||||
let url;
|
|
||||||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
|
||||||
const digestParts = this.digest.split(':');
|
|
||||||
if (digestParts[0] === 'sha') {
|
|
||||||
// url = `https://s3.amazonaws.com/now-files/${digestParts[1]}`;
|
|
||||||
url = `https://dmmcy0pwk6bqi.cloudfront.net/${digestParts[1]}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(url);
|
|
||||||
|
|
||||||
await semaToDownloadFromS3.acquire();
|
|
||||||
console.time(`downloading ${url}`);
|
|
||||||
try {
|
|
||||||
return await retry(
|
|
||||||
async () => {
|
|
||||||
const resp = await fetch(url);
|
|
||||||
if (!resp.ok) {
|
|
||||||
const error = new BailableError(
|
|
||||||
`${resp.status} ${resp.statusText}`,
|
|
||||||
);
|
|
||||||
if (resp.status === 403) error.bail = true;
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
return resp.body;
|
|
||||||
},
|
|
||||||
{ factor: 1, retries: 3 },
|
|
||||||
);
|
|
||||||
} finally {
|
|
||||||
console.timeEnd(`downloading ${url}`);
|
|
||||||
semaToDownloadFromS3.release();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @returns {NodeJS.ReadableStream}
|
|
||||||
*/
|
|
||||||
toStream() {
|
|
||||||
let flag;
|
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
|
||||||
return multiStream((cb) => {
|
|
||||||
if (flag) return cb(null, null);
|
|
||||||
flag = true;
|
|
||||||
|
|
||||||
this.toStreamAsync()
|
|
||||||
.then((stream) => {
|
|
||||||
cb(null, stream);
|
|
||||||
})
|
|
||||||
.catch((error) => {
|
|
||||||
cb(error, null);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = FileRef;
|
|
||||||
|
|||||||
@@ -1,38 +1 @@
|
|||||||
const path = require('path');
|
module.exports = require('../dist/fs/download').default;
|
||||||
const FileFsRef = require('../file-fs-ref.js');
|
|
||||||
|
|
||||||
/** @typedef {import('../file-ref')} FileRef */
|
|
||||||
/** @typedef {import('../file-fs-ref')} FileFsRef */
|
|
||||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
|
||||||
/** @typedef {{[filePath: string]: FileFsRef}|{}} DownloadedFiles */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param {FileRef|FileFsRef} file
|
|
||||||
* @param {string} fsPath
|
|
||||||
* @returns {Promise<FileFsRef>}
|
|
||||||
*/
|
|
||||||
async function downloadFile(file, fsPath) {
|
|
||||||
const { mode } = file;
|
|
||||||
const stream = file.toStream();
|
|
||||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Download files to disk
|
|
||||||
* @argument {Files} files
|
|
||||||
* @argument {string} basePath
|
|
||||||
* @returns {Promise<DownloadedFiles>}
|
|
||||||
*/
|
|
||||||
module.exports = async function download(files, basePath) {
|
|
||||||
const files2 = {};
|
|
||||||
|
|
||||||
await Promise.all(
|
|
||||||
Object.keys(files).map(async (name) => {
|
|
||||||
const file = files[name];
|
|
||||||
const fsPath = path.join(basePath, name);
|
|
||||||
files2[name] = await downloadFile(file, fsPath);
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
return files2;
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,12 +1 @@
|
|||||||
const path = require('path');
|
module.exports = require('../dist/fs/get-writable-directory').default;
|
||||||
const fs = require('fs-extra');
|
|
||||||
|
|
||||||
const dev = !process.env.AWS_EXECUTION_ENV;
|
|
||||||
const TMP_PATH = dev ? path.join(process.cwd(), 'tmp') : '/tmp';
|
|
||||||
|
|
||||||
module.exports = async function getWritableDirectory() {
|
|
||||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
|
||||||
const directory = path.join(TMP_PATH, name);
|
|
||||||
await fs.mkdirp(directory);
|
|
||||||
return directory;
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,67 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('../dist/fs/glob').default;
|
||||||
const path = require('path');
|
|
||||||
const vanillaGlob = require('glob');
|
|
||||||
const FileFsRef = require('../file-fs-ref.js');
|
|
||||||
|
|
||||||
/** @typedef {import('fs').Stats} Stats */
|
|
||||||
/** @typedef {import('glob').IOptions} GlobOptions */
|
|
||||||
/** @typedef {import('../file-fs-ref').FsFiles|{}} GlobFiles */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @argument {string} pattern
|
|
||||||
* @argument {GlobOptions|string} opts
|
|
||||||
* @argument {string} [mountpoint]
|
|
||||||
* @returns {Promise<GlobFiles>}
|
|
||||||
*/
|
|
||||||
module.exports = function glob(pattern, opts = {}, mountpoint) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
/** @type {GlobOptions} */
|
|
||||||
let options;
|
|
||||||
if (typeof opts === 'string') {
|
|
||||||
options = { cwd: opts };
|
|
||||||
} else {
|
|
||||||
options = opts;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!options.cwd) {
|
|
||||||
throw new Error(
|
|
||||||
'Second argument (basePath) must be specified for names of resulting files',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!path.isAbsolute(options.cwd)) {
|
|
||||||
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`);
|
|
||||||
}
|
|
||||||
|
|
||||||
options.statCache = {};
|
|
||||||
options.stat = true;
|
|
||||||
options.dot = true;
|
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
|
||||||
vanillaGlob(pattern, options, (error, files) => {
|
|
||||||
if (error) return reject(error);
|
|
||||||
|
|
||||||
resolve(
|
|
||||||
files.reduce((files2, relativePath) => {
|
|
||||||
const fsPath = path.join(options.cwd, relativePath);
|
|
||||||
/** @type {Stats|any} */
|
|
||||||
const stat = options.statCache[fsPath];
|
|
||||||
assert(
|
|
||||||
stat,
|
|
||||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
|
||||||
);
|
|
||||||
if (stat && stat.isFile()) {
|
|
||||||
let finalPath = relativePath;
|
|
||||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
|
||||||
return {
|
|
||||||
...files2,
|
|
||||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return files2;
|
|
||||||
}, {}),
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,25 +1 @@
|
|||||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
module.exports = require('../dist/fs/rename').default;
|
||||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
|
||||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @callback delegate
|
|
||||||
* @argument {string} name
|
|
||||||
* @returns {string}
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Rename files using delegate function
|
|
||||||
* @argument {Files} files
|
|
||||||
* @argument {delegate} delegate
|
|
||||||
* @returns {Files}
|
|
||||||
*/
|
|
||||||
module.exports = function rename(files, delegate) {
|
|
||||||
return Object.keys(files).reduce(
|
|
||||||
(newFiles, name) => ({
|
|
||||||
...newFiles,
|
|
||||||
[delegate(name)]: files[name],
|
|
||||||
}),
|
|
||||||
{},
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,80 +1 @@
|
|||||||
const fs = require('fs-extra');
|
module.exports = require('../dist/fs/run-user-scripts');
|
||||||
const path = require('path');
|
|
||||||
const { spawn } = require('child_process');
|
|
||||||
|
|
||||||
function spawnAsync(command, args, cwd) {
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
|
||||||
child.on('error', reject);
|
|
||||||
child.on('close', (code, signal) => (code !== 0
|
|
||||||
? reject(new Error(`Exited with ${code || signal}`))
|
|
||||||
: resolve()));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runShellScript(fsPath) {
|
|
||||||
const destPath = path.dirname(fsPath);
|
|
||||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function shouldUseNpm(destPath) {
|
|
||||||
let currentDestPath = destPath;
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
|
||||||
while (true) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
if (await fs.exists(path.join(currentDestPath, 'package.json'))) {
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
|
||||||
if (await fs.exists(path.join(currentDestPath, 'package-lock.json'))) {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const newDestPath = path.dirname(currentDestPath);
|
|
||||||
if (currentDestPath === newDestPath) break;
|
|
||||||
currentDestPath = newDestPath;
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runNpmInstall(destPath, args = []) {
|
|
||||||
let commandArgs = args;
|
|
||||||
console.log(`installing to ${destPath}`);
|
|
||||||
if (await shouldUseNpm(destPath)) {
|
|
||||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
|
||||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath);
|
|
||||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath);
|
|
||||||
} else {
|
|
||||||
await spawnAsync('yarn', ['--cwd', destPath].concat(commandArgs), destPath);
|
|
||||||
await spawnAsync('yarn', ['cache', 'clean'], destPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runPackageJsonScript(destPath, scriptName) {
|
|
||||||
try {
|
|
||||||
if (await shouldUseNpm(destPath)) {
|
|
||||||
console.log(`running "npm run ${scriptName}"`);
|
|
||||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
|
||||||
} else {
|
|
||||||
console.log(`running "yarn run ${scriptName}"`);
|
|
||||||
await spawnAsync(
|
|
||||||
'yarn',
|
|
||||||
['--cwd', destPath, 'run', scriptName],
|
|
||||||
destPath,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.log(error.message);
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
runShellScript,
|
|
||||||
runNpmInstall,
|
|
||||||
runPackageJsonScript,
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,4 +1 @@
|
|||||||
const fastStreamToBuffer = require('fast-stream-to-buffer');
|
module.exports = require('../dist/fs/stream-to-buffer').default;
|
||||||
const { promisify } = require('util');
|
|
||||||
|
|
||||||
module.exports = promisify(fastStreamToBuffer);
|
|
||||||
|
|||||||
@@ -1,49 +1 @@
|
|||||||
const assert = require('assert');
|
module.exports = require('./dist/index');
|
||||||
const { ZipFile } = require('yazl');
|
|
||||||
const streamToBuffer = require('./fs/stream-to-buffer.js');
|
|
||||||
|
|
||||||
class Lambda {
|
|
||||||
constructor({
|
|
||||||
zipBuffer, handler, runtime, environment,
|
|
||||||
}) {
|
|
||||||
this.type = 'Lambda';
|
|
||||||
this.zipBuffer = zipBuffer;
|
|
||||||
this.handler = handler;
|
|
||||||
this.runtime = runtime;
|
|
||||||
this.environment = environment;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const mtime = new Date(1540000000000);
|
|
||||||
|
|
||||||
async function createLambda({
|
|
||||||
files, handler, runtime, environment = {},
|
|
||||||
}) {
|
|
||||||
assert(typeof files === 'object', '"files" must be an object');
|
|
||||||
assert(typeof handler === 'string', '"handler" is not a string');
|
|
||||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
|
||||||
assert(typeof environment === 'object', '"environment" is not an object');
|
|
||||||
const zipFile = new ZipFile();
|
|
||||||
|
|
||||||
Object.keys(files)
|
|
||||||
.sort()
|
|
||||||
.forEach((name) => {
|
|
||||||
const file = files[name];
|
|
||||||
const stream = file.toStream();
|
|
||||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
|
||||||
});
|
|
||||||
|
|
||||||
zipFile.end();
|
|
||||||
const zipBuffer = await streamToBuffer(zipFile.outputStream);
|
|
||||||
return new Lambda({
|
|
||||||
zipBuffer,
|
|
||||||
handler,
|
|
||||||
runtime,
|
|
||||||
environment,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
Lambda,
|
|
||||||
createLambda,
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -1,15 +1,41 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/build-utils",
|
"name": "@now/build-utils",
|
||||||
"version": "0.4.29",
|
"version": "0.5.6",
|
||||||
|
"license": "MIT",
|
||||||
|
"main": "./dist/index.js",
|
||||||
|
"types": "./dist/index.d.js",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
|
"directory": "packages/now-build-utils"
|
||||||
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@types/cross-spawn": "6.0.0",
|
||||||
"async-retry": "1.2.3",
|
"async-retry": "1.2.3",
|
||||||
"async-sema": "2.1.4",
|
"async-sema": "2.1.4",
|
||||||
"fast-stream-to-buffer": "1.0.0",
|
"cross-spawn": "6.0.5",
|
||||||
|
"end-of-stream": "1.4.1",
|
||||||
"fs-extra": "7.0.0",
|
"fs-extra": "7.0.0",
|
||||||
"glob": "7.1.3",
|
"glob": "7.1.3",
|
||||||
"into-stream": "4.0.0",
|
"into-stream": "5.0.0",
|
||||||
|
"memory-fs": "0.4.1",
|
||||||
"multistream": "2.1.1",
|
"multistream": "2.1.1",
|
||||||
"node-fetch": "2.2.0",
|
"node-fetch": "2.2.0",
|
||||||
"yazl": "2.4.3"
|
"yazl": "2.4.3"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"test": "tsc && jest",
|
||||||
|
"prepublish": "tsc"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/async-retry": "^1.2.1",
|
||||||
|
"@types/end-of-stream": "^1.4.0",
|
||||||
|
"@types/fs-extra": "^5.0.5",
|
||||||
|
"@types/glob": "^7.1.1",
|
||||||
|
"@types/node-fetch": "^2.1.6",
|
||||||
|
"@types/yazl": "^2.4.1",
|
||||||
|
"execa": "^1.0.0",
|
||||||
|
"typescript": "3.3.4000"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
46
packages/now-build-utils/src/file-blob.ts
Normal file
46
packages/now-build-utils/src/file-blob.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import intoStream from 'into-stream';
|
||||||
|
import { File } from './types';
|
||||||
|
|
||||||
|
interface FileBlobOptions {
|
||||||
|
mode?: number;
|
||||||
|
data: string | Buffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FromStreamOptions {
|
||||||
|
mode?: number;
|
||||||
|
stream: NodeJS.ReadableStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class FileBlob implements File {
|
||||||
|
public type: 'FileBlob';
|
||||||
|
public mode: number;
|
||||||
|
public data: string | Buffer;
|
||||||
|
|
||||||
|
constructor({ mode = 0o100644, data }: FileBlobOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
||||||
|
this.type = 'FileBlob';
|
||||||
|
this.mode = mode;
|
||||||
|
this.data = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fromStream({ mode = 0o100644, stream }: FromStreamOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof stream.pipe === 'function'); // is-stream
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
|
||||||
|
stream.on('error', error => reject(error));
|
||||||
|
stream.on('end', () => resolve());
|
||||||
|
});
|
||||||
|
|
||||||
|
const data = Buffer.concat(chunks);
|
||||||
|
return new FileBlob({ mode, data });
|
||||||
|
}
|
||||||
|
|
||||||
|
toStream(): NodeJS.ReadableStream {
|
||||||
|
return intoStream(this.data);
|
||||||
|
}
|
||||||
|
}
|
||||||
97
packages/now-build-utils/src/file-fs-ref.ts
Normal file
97
packages/now-build-utils/src/file-fs-ref.ts
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import multiStream from 'multistream';
|
||||||
|
import path from 'path';
|
||||||
|
import Sema from 'async-sema';
|
||||||
|
import { File } from './types';
|
||||||
|
|
||||||
|
const semaToPreventEMFILE = new Sema(20);
|
||||||
|
|
||||||
|
interface FileFsRefOptions {
|
||||||
|
mode?: number;
|
||||||
|
fsPath: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FromStreamOptions {
|
||||||
|
mode: number;
|
||||||
|
stream: NodeJS.ReadableStream;
|
||||||
|
fsPath: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
class FileFsRef implements File {
|
||||||
|
public type: 'FileFsRef';
|
||||||
|
public mode: number;
|
||||||
|
public fsPath: string;
|
||||||
|
|
||||||
|
constructor({ mode = 0o100644, fsPath }: FileFsRefOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof fsPath === 'string');
|
||||||
|
this.type = 'FileFsRef';
|
||||||
|
this.mode = mode;
|
||||||
|
this.fsPath = fsPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fromFsPath({
|
||||||
|
mode,
|
||||||
|
fsPath,
|
||||||
|
}: FileFsRefOptions): Promise<FileFsRef> {
|
||||||
|
let m = mode;
|
||||||
|
if (!m) {
|
||||||
|
const stat = await fs.lstat(fsPath);
|
||||||
|
m = stat.mode;
|
||||||
|
}
|
||||||
|
return new FileFsRef({ mode: m, fsPath });
|
||||||
|
}
|
||||||
|
|
||||||
|
static async fromStream({
|
||||||
|
mode = 0o100644,
|
||||||
|
stream,
|
||||||
|
fsPath,
|
||||||
|
}: FromStreamOptions): Promise<FileFsRef> {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof stream.pipe === 'function'); // is-stream
|
||||||
|
assert(typeof fsPath === 'string');
|
||||||
|
await fs.mkdirp(path.dirname(fsPath));
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
const dest = fs.createWriteStream(fsPath, {
|
||||||
|
mode: mode & 0o777,
|
||||||
|
});
|
||||||
|
stream.pipe(dest);
|
||||||
|
stream.on('error', reject);
|
||||||
|
dest.on('finish', resolve);
|
||||||
|
dest.on('error', reject);
|
||||||
|
});
|
||||||
|
|
||||||
|
return new FileFsRef({ mode, fsPath });
|
||||||
|
}
|
||||||
|
|
||||||
|
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||||
|
await semaToPreventEMFILE.acquire();
|
||||||
|
const release = () => semaToPreventEMFILE.release();
|
||||||
|
const stream = fs.createReadStream(this.fsPath);
|
||||||
|
stream.on('close', release);
|
||||||
|
stream.on('error', release);
|
||||||
|
return stream;
|
||||||
|
}
|
||||||
|
|
||||||
|
toStream(): NodeJS.ReadableStream {
|
||||||
|
let flag = false;
|
||||||
|
|
||||||
|
// eslint-disable-next-line consistent-return
|
||||||
|
return multiStream(cb => {
|
||||||
|
if (flag) return cb(null, null);
|
||||||
|
flag = true;
|
||||||
|
|
||||||
|
this.toStreamAsync()
|
||||||
|
.then(stream => {
|
||||||
|
cb(null, stream);
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
cb(error, null);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export = FileFsRef;
|
||||||
95
packages/now-build-utils/src/file-ref.ts
Normal file
95
packages/now-build-utils/src/file-ref.ts
Normal file
@@ -0,0 +1,95 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import multiStream from 'multistream';
|
||||||
|
import retry from 'async-retry';
|
||||||
|
import Sema from 'async-sema';
|
||||||
|
import { File } from './types';
|
||||||
|
|
||||||
|
interface FileRefOptions {
|
||||||
|
mode?: number;
|
||||||
|
digest: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
const semaToDownloadFromS3 = new Sema(5);
|
||||||
|
|
||||||
|
class BailableError extends Error {
|
||||||
|
public bail: boolean;
|
||||||
|
|
||||||
|
constructor(...args: string[]) {
|
||||||
|
super(...args);
|
||||||
|
this.bail = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class FileRef implements File {
|
||||||
|
public type: 'FileRef';
|
||||||
|
public mode: number;
|
||||||
|
public digest: string;
|
||||||
|
|
||||||
|
constructor({ mode = 0o100644, digest }: FileRefOptions) {
|
||||||
|
assert(typeof mode === 'number');
|
||||||
|
assert(typeof digest === 'string');
|
||||||
|
this.type = 'FileRef';
|
||||||
|
this.mode = mode;
|
||||||
|
this.digest = digest;
|
||||||
|
}
|
||||||
|
|
||||||
|
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||||
|
let url = '';
|
||||||
|
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
||||||
|
const [digestType, digestHash] = this.digest.split(':');
|
||||||
|
if (digestType === 'sha') {
|
||||||
|
// This CloudFront URL edge caches the `now-files` S3 bucket to prevent
|
||||||
|
// overloading it
|
||||||
|
// `https://now-files.s3.amazonaws.com/${digestHash}`
|
||||||
|
url = `https://dmmcy0pwk6bqi.cloudfront.net/${digestHash}`;
|
||||||
|
} else if (digestType === 'sha+ephemeral') {
|
||||||
|
// This URL is currently only used for cache files that constantly
|
||||||
|
// change. We shouldn't cache it on CloudFront because it'd always be a
|
||||||
|
// MISS.
|
||||||
|
url = `https://now-ephemeral-files.s3.amazonaws.com/${digestHash}`;
|
||||||
|
} else {
|
||||||
|
throw new Error('Expected digest to be sha');
|
||||||
|
}
|
||||||
|
|
||||||
|
await semaToDownloadFromS3.acquire();
|
||||||
|
// console.time(`downloading ${url}`);
|
||||||
|
try {
|
||||||
|
return await retry(
|
||||||
|
async () => {
|
||||||
|
const resp = await fetch(url);
|
||||||
|
if (!resp.ok) {
|
||||||
|
const error = new BailableError(
|
||||||
|
`download: ${resp.status} ${resp.statusText} for ${url}`
|
||||||
|
);
|
||||||
|
if (resp.status === 403) error.bail = true;
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
return resp.body;
|
||||||
|
},
|
||||||
|
{ factor: 1, retries: 3 }
|
||||||
|
);
|
||||||
|
} finally {
|
||||||
|
// console.timeEnd(`downloading ${url}`);
|
||||||
|
semaToDownloadFromS3.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
toStream(): NodeJS.ReadableStream {
|
||||||
|
let flag = false;
|
||||||
|
|
||||||
|
// eslint-disable-next-line consistent-return
|
||||||
|
return multiStream(cb => {
|
||||||
|
if (flag) return cb(null, null);
|
||||||
|
flag = true;
|
||||||
|
|
||||||
|
this.toStreamAsync()
|
||||||
|
.then(stream => {
|
||||||
|
cb(null, stream);
|
||||||
|
})
|
||||||
|
.catch(error => {
|
||||||
|
cb(error, null);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
75
packages/now-build-utils/src/fs/download.ts
Normal file
75
packages/now-build-utils/src/fs/download.ts
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import path from 'path';
|
||||||
|
import FileFsRef from '../file-fs-ref';
|
||||||
|
import { File, Files, Meta } from '../types';
|
||||||
|
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
||||||
|
|
||||||
|
export interface DownloadedFiles {
|
||||||
|
[filePath: string]: FileFsRef;
|
||||||
|
}
|
||||||
|
|
||||||
|
const S_IFMT = 61440; /* 0170000 type of file */
|
||||||
|
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||||
|
|
||||||
|
export function isSymbolicLink(mode: number): boolean {
|
||||||
|
return (mode & S_IFMT) === S_IFLNK;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||||
|
const { mode } = file;
|
||||||
|
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
||||||
|
const [target] = await Promise.all([
|
||||||
|
readlink((file as FileFsRef).fsPath),
|
||||||
|
mkdirp(path.dirname(fsPath)),
|
||||||
|
]);
|
||||||
|
await symlink(target, fsPath);
|
||||||
|
return FileFsRef.fromFsPath({ mode, fsPath });
|
||||||
|
} else {
|
||||||
|
const stream = file.toStream();
|
||||||
|
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function removeFile(basePath: string, fileMatched: string) {
|
||||||
|
const file = path.join(basePath, fileMatched);
|
||||||
|
await remove(file);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default async function download(
|
||||||
|
files: Files,
|
||||||
|
basePath: string,
|
||||||
|
meta?: Meta
|
||||||
|
): Promise<DownloadedFiles> {
|
||||||
|
const { isDev = false, filesChanged = null, filesRemoved = null } =
|
||||||
|
meta || {};
|
||||||
|
|
||||||
|
if (isDev) {
|
||||||
|
// In `now dev`, the `download()` function is a no-op because
|
||||||
|
// the `basePath` matches the `cwd` of the dev server, so the
|
||||||
|
// source files are already available.
|
||||||
|
return files as DownloadedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
|
const files2: DownloadedFiles = {};
|
||||||
|
|
||||||
|
await Promise.all(
|
||||||
|
Object.keys(files).map(async name => {
|
||||||
|
// If the file does not exist anymore, remove it.
|
||||||
|
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
||||||
|
await removeFile(basePath, name);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If a file didn't change, do not re-download it.
|
||||||
|
if (Array.isArray(filesChanged) && !filesChanged.includes(name)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const file = files[name];
|
||||||
|
const fsPath = path.join(basePath, name);
|
||||||
|
|
||||||
|
files2[name] = await downloadFile(file, fsPath);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
return files2;
|
||||||
|
}
|
||||||
10
packages/now-build-utils/src/fs/get-writable-directory.ts
Normal file
10
packages/now-build-utils/src/fs/get-writable-directory.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { join } from 'path';
|
||||||
|
import { tmpdir } from 'os';
|
||||||
|
import { mkdirp } from 'fs-extra';
|
||||||
|
|
||||||
|
export default async function getWritableDirectory() {
|
||||||
|
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||||
|
const directory = join(tmpdir(), name);
|
||||||
|
await mkdirp(directory);
|
||||||
|
return directory;
|
||||||
|
}
|
||||||
70
packages/now-build-utils/src/fs/glob.ts
Normal file
70
packages/now-build-utils/src/fs/glob.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
import path from 'path';
|
||||||
|
import assert from 'assert';
|
||||||
|
import vanillaGlob_ from 'glob';
|
||||||
|
import { promisify } from 'util';
|
||||||
|
import { lstat, Stats } from 'fs-extra';
|
||||||
|
import FileFsRef from '../file-fs-ref';
|
||||||
|
|
||||||
|
type GlobOptions = vanillaGlob_.IOptions;
|
||||||
|
|
||||||
|
interface FsFiles {
|
||||||
|
[filePath: string]: FileFsRef;
|
||||||
|
}
|
||||||
|
|
||||||
|
const vanillaGlob = promisify(vanillaGlob_);
|
||||||
|
|
||||||
|
export default async function glob(
|
||||||
|
pattern: string,
|
||||||
|
opts: GlobOptions | string,
|
||||||
|
mountpoint?: string
|
||||||
|
): Promise<FsFiles> {
|
||||||
|
let options: GlobOptions;
|
||||||
|
if (typeof opts === 'string') {
|
||||||
|
options = { cwd: opts };
|
||||||
|
} else {
|
||||||
|
options = opts;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!options.cwd) {
|
||||||
|
throw new Error(
|
||||||
|
'Second argument (basePath) must be specified for names of resulting files'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!path.isAbsolute(options.cwd)) {
|
||||||
|
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const results: FsFiles = {};
|
||||||
|
|
||||||
|
options.symlinks = {};
|
||||||
|
options.statCache = {};
|
||||||
|
options.stat = true;
|
||||||
|
options.dot = true;
|
||||||
|
|
||||||
|
const files = await vanillaGlob(pattern, options);
|
||||||
|
|
||||||
|
for (const relativePath of files) {
|
||||||
|
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||||
|
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||||
|
assert(
|
||||||
|
stat,
|
||||||
|
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
|
||||||
|
);
|
||||||
|
if (stat.isFile()) {
|
||||||
|
const isSymlink = options.symlinks![fsPath];
|
||||||
|
if (isSymlink) {
|
||||||
|
stat = await lstat(fsPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
let finalPath = relativePath;
|
||||||
|
if (mountpoint) {
|
||||||
|
finalPath = path.join(mountpoint, finalPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
results[finalPath] = new FileFsRef({ mode: stat.mode, fsPath });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return results;
|
||||||
|
}
|
||||||
12
packages/now-build-utils/src/fs/rename.ts
Normal file
12
packages/now-build-utils/src/fs/rename.ts
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import { Files } from '../types';
|
||||||
|
type Delegate = (name: string) => string;
|
||||||
|
|
||||||
|
export default function rename(files: Files, delegate: Delegate): Files {
|
||||||
|
return Object.keys(files).reduce(
|
||||||
|
(newFiles, name) => ({
|
||||||
|
...newFiles,
|
||||||
|
[delegate(name)]: files[name],
|
||||||
|
}),
|
||||||
|
{}
|
||||||
|
);
|
||||||
|
}
|
||||||
154
packages/now-build-utils/src/fs/run-user-scripts.ts
Normal file
154
packages/now-build-utils/src/fs/run-user-scripts.ts
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import fs from 'fs-extra';
|
||||||
|
import path from 'path';
|
||||||
|
import spawn from 'cross-spawn';
|
||||||
|
import { SpawnOptions } from 'child_process';
|
||||||
|
|
||||||
|
function spawnAsync(
|
||||||
|
command: string,
|
||||||
|
args: string[],
|
||||||
|
cwd: string,
|
||||||
|
opts: SpawnOptions = {}
|
||||||
|
) {
|
||||||
|
return new Promise<void>((resolve, reject) => {
|
||||||
|
const stderrLogs: Buffer[] = [];
|
||||||
|
opts = { stdio: 'inherit', cwd, ...opts };
|
||||||
|
const child = spawn(command, args, opts);
|
||||||
|
|
||||||
|
if (opts.stdio === 'pipe') {
|
||||||
|
child.stderr.on('data', data => stderrLogs.push(data));
|
||||||
|
}
|
||||||
|
|
||||||
|
child.on('error', reject);
|
||||||
|
child.on('close', (code, signal) => {
|
||||||
|
if (code === 0) {
|
||||||
|
return resolve();
|
||||||
|
}
|
||||||
|
|
||||||
|
const errorLogs = stderrLogs.map(line => line.toString()).join('');
|
||||||
|
if (opts.stdio !== 'inherit') {
|
||||||
|
reject(new Error(`Exited with ${code || signal}\n${errorLogs}`));
|
||||||
|
} else {
|
||||||
|
reject(new Error(`Exited with ${code || signal}`));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function chmodPlusX(fsPath: string) {
|
||||||
|
const s = await fs.stat(fsPath);
|
||||||
|
const newMode = s.mode | 64 | 8 | 1; // eslint-disable-line no-bitwise
|
||||||
|
if (s.mode === newMode) return;
|
||||||
|
const base8 = newMode.toString(8).slice(-3);
|
||||||
|
await fs.chmod(fsPath, base8);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runShellScript(fsPath: string) {
|
||||||
|
assert(path.isAbsolute(fsPath));
|
||||||
|
const destPath = path.dirname(fsPath);
|
||||||
|
await chmodPlusX(fsPath);
|
||||||
|
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function scanParentDirs(destPath: string, scriptName?: string) {
|
||||||
|
assert(path.isAbsolute(destPath));
|
||||||
|
|
||||||
|
let hasScript = false;
|
||||||
|
let hasPackageLockJson = false;
|
||||||
|
let currentDestPath = destPath;
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-constant-condition
|
||||||
|
while (true) {
|
||||||
|
const packageJsonPath = path.join(currentDestPath, 'package.json');
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
if (await fs.pathExists(packageJsonPath)) {
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
const packageJson = JSON.parse(
|
||||||
|
await fs.readFile(packageJsonPath, 'utf8')
|
||||||
|
);
|
||||||
|
hasScript = Boolean(
|
||||||
|
packageJson.scripts && scriptName && packageJson.scripts[scriptName]
|
||||||
|
);
|
||||||
|
// eslint-disable-next-line no-await-in-loop
|
||||||
|
hasPackageLockJson = await fs.pathExists(
|
||||||
|
path.join(currentDestPath, 'package-lock.json')
|
||||||
|
);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
const newDestPath = path.dirname(currentDestPath);
|
||||||
|
if (currentDestPath === newDestPath) break;
|
||||||
|
currentDestPath = newDestPath;
|
||||||
|
}
|
||||||
|
|
||||||
|
return { hasScript, hasPackageLockJson };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function installDependencies(
|
||||||
|
destPath: string,
|
||||||
|
args: string[] = []
|
||||||
|
) {
|
||||||
|
assert(path.isAbsolute(destPath));
|
||||||
|
|
||||||
|
let commandArgs = args;
|
||||||
|
console.log(`installing to ${destPath}`);
|
||||||
|
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||||
|
|
||||||
|
const opts = {
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
// This is a little hack to force `node-gyp` to build for the
|
||||||
|
// Node.js version that `@now/node` and `@now/node-server` use
|
||||||
|
npm_config_target: '8.10.0',
|
||||||
|
},
|
||||||
|
stdio: 'pipe',
|
||||||
|
};
|
||||||
|
|
||||||
|
if (hasPackageLockJson) {
|
||||||
|
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||||
|
await spawnAsync(
|
||||||
|
'npm',
|
||||||
|
['install', '--unsafe-perm'].concat(commandArgs),
|
||||||
|
destPath,
|
||||||
|
opts as SpawnOptions
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
await spawnAsync(
|
||||||
|
'yarn',
|
||||||
|
['--ignore-engines', '--cwd', destPath].concat(commandArgs),
|
||||||
|
destPath,
|
||||||
|
opts as SpawnOptions
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runPackageJsonScript(
|
||||||
|
destPath: string,
|
||||||
|
scriptName: string,
|
||||||
|
opts?: SpawnOptions
|
||||||
|
) {
|
||||||
|
assert(path.isAbsolute(destPath));
|
||||||
|
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
||||||
|
destPath,
|
||||||
|
scriptName
|
||||||
|
);
|
||||||
|
if (!hasScript) return false;
|
||||||
|
|
||||||
|
if (hasPackageLockJson) {
|
||||||
|
console.log(`running "npm run ${scriptName}"`);
|
||||||
|
await spawnAsync('npm', ['run', scriptName], destPath, opts);
|
||||||
|
} else {
|
||||||
|
console.log(`running "yarn run ${scriptName}"`);
|
||||||
|
await spawnAsync(
|
||||||
|
'yarn',
|
||||||
|
['--cwd', destPath, 'run', scriptName],
|
||||||
|
destPath,
|
||||||
|
opts
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
export const runNpmInstall = installDependencies;
|
||||||
28
packages/now-build-utils/src/fs/stream-to-buffer.ts
Normal file
28
packages/now-build-utils/src/fs/stream-to-buffer.ts
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
import eos from 'end-of-stream';
|
||||||
|
|
||||||
|
export default function streamToBuffer(
|
||||||
|
stream: NodeJS.ReadableStream
|
||||||
|
): Promise<Buffer> {
|
||||||
|
return new Promise<Buffer>((resolve, reject) => {
|
||||||
|
const buffers: Buffer[] = [];
|
||||||
|
|
||||||
|
stream.on('data', buffers.push.bind(buffers));
|
||||||
|
|
||||||
|
eos(stream, err => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
switch (buffers.length) {
|
||||||
|
case 0:
|
||||||
|
resolve(Buffer.allocUnsafe(0));
|
||||||
|
break;
|
||||||
|
case 1:
|
||||||
|
resolve(buffers[0]);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
resolve(Buffer.concat(buffers));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
51
packages/now-build-utils/src/index.ts
Normal file
51
packages/now-build-utils/src/index.ts
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
import FileBlob from './file-blob';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
import FileRef from './file-ref';
|
||||||
|
import {
|
||||||
|
File,
|
||||||
|
Files,
|
||||||
|
AnalyzeOptions,
|
||||||
|
BuildOptions,
|
||||||
|
PrepareCacheOptions,
|
||||||
|
ShouldServeOptions,
|
||||||
|
Meta,
|
||||||
|
} from './types';
|
||||||
|
import { Lambda, createLambda } from './lambda';
|
||||||
|
import download, { DownloadedFiles } from './fs/download';
|
||||||
|
import getWriteableDirectory from './fs/get-writable-directory';
|
||||||
|
import glob from './fs/glob';
|
||||||
|
import rename from './fs/rename';
|
||||||
|
import {
|
||||||
|
installDependencies,
|
||||||
|
runPackageJsonScript,
|
||||||
|
runNpmInstall,
|
||||||
|
runShellScript,
|
||||||
|
} from './fs/run-user-scripts';
|
||||||
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
|
import shouldServe from './should-serve';
|
||||||
|
|
||||||
|
export {
|
||||||
|
FileBlob,
|
||||||
|
FileFsRef,
|
||||||
|
FileRef,
|
||||||
|
Files,
|
||||||
|
File,
|
||||||
|
Meta,
|
||||||
|
Lambda,
|
||||||
|
createLambda,
|
||||||
|
download,
|
||||||
|
DownloadedFiles,
|
||||||
|
getWriteableDirectory,
|
||||||
|
glob,
|
||||||
|
rename,
|
||||||
|
installDependencies,
|
||||||
|
runPackageJsonScript,
|
||||||
|
runNpmInstall,
|
||||||
|
runShellScript,
|
||||||
|
streamToBuffer,
|
||||||
|
AnalyzeOptions,
|
||||||
|
BuildOptions,
|
||||||
|
PrepareCacheOptions,
|
||||||
|
ShouldServeOptions,
|
||||||
|
shouldServe,
|
||||||
|
};
|
||||||
107
packages/now-build-utils/src/lambda.ts
Normal file
107
packages/now-build-utils/src/lambda.ts
Normal file
@@ -0,0 +1,107 @@
|
|||||||
|
import assert from 'assert';
|
||||||
|
import Sema from 'async-sema';
|
||||||
|
import { ZipFile } from 'yazl';
|
||||||
|
import { readlink } from 'fs-extra';
|
||||||
|
import { Files } from './types';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
import { isSymbolicLink } from './fs/download';
|
||||||
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
|
|
||||||
|
interface Environment {
|
||||||
|
[key: string]: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface LambdaOptions {
|
||||||
|
zipBuffer: Buffer;
|
||||||
|
handler: string;
|
||||||
|
runtime: string;
|
||||||
|
environment: Environment;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface CreateLambdaOptions {
|
||||||
|
files: Files;
|
||||||
|
handler: string;
|
||||||
|
runtime: string;
|
||||||
|
environment?: Environment;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Lambda {
|
||||||
|
public type: 'Lambda';
|
||||||
|
public zipBuffer: Buffer;
|
||||||
|
public handler: string;
|
||||||
|
public runtime: string;
|
||||||
|
public environment: Environment;
|
||||||
|
|
||||||
|
constructor({ zipBuffer, handler, runtime, environment }: LambdaOptions) {
|
||||||
|
this.type = 'Lambda';
|
||||||
|
this.zipBuffer = zipBuffer;
|
||||||
|
this.handler = handler;
|
||||||
|
this.runtime = runtime;
|
||||||
|
this.environment = environment;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const sema = new Sema(10);
|
||||||
|
const mtime = new Date(1540000000000);
|
||||||
|
|
||||||
|
export async function createLambda({
|
||||||
|
files,
|
||||||
|
handler,
|
||||||
|
runtime,
|
||||||
|
environment = {},
|
||||||
|
}: CreateLambdaOptions): Promise<Lambda> {
|
||||||
|
assert(typeof files === 'object', '"files" must be an object');
|
||||||
|
assert(typeof handler === 'string', '"handler" is not a string');
|
||||||
|
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||||
|
assert(typeof environment === 'object', '"environment" is not an object');
|
||||||
|
|
||||||
|
await sema.acquire();
|
||||||
|
|
||||||
|
try {
|
||||||
|
const zipBuffer = await createZip(files);
|
||||||
|
return new Lambda({
|
||||||
|
zipBuffer,
|
||||||
|
handler,
|
||||||
|
runtime,
|
||||||
|
environment,
|
||||||
|
});
|
||||||
|
} finally {
|
||||||
|
sema.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createZip(files: Files): Promise<Buffer> {
|
||||||
|
const names = Object.keys(files).sort();
|
||||||
|
|
||||||
|
const symlinkTargets = new Map<string, string>();
|
||||||
|
for (const name of names) {
|
||||||
|
const file = files[name];
|
||||||
|
if (file.mode && isSymbolicLink(file.mode) && file.type === 'FileFsRef') {
|
||||||
|
const symlinkTarget = await readlink((file as FileFsRef).fsPath);
|
||||||
|
symlinkTargets.set(name, symlinkTarget);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const zipFile = new ZipFile();
|
||||||
|
const zipBuffer = await new Promise<Buffer>((resolve, reject) => {
|
||||||
|
for (const name of names) {
|
||||||
|
const file = files[name];
|
||||||
|
const opts = { mode: file.mode, mtime };
|
||||||
|
const symlinkTarget = symlinkTargets.get(name);
|
||||||
|
if (typeof symlinkTarget === 'string') {
|
||||||
|
zipFile.addBuffer(Buffer.from(symlinkTarget, 'utf8'), name, opts);
|
||||||
|
} else {
|
||||||
|
const stream = file.toStream() as import('stream').Readable;
|
||||||
|
stream.on('error', reject);
|
||||||
|
zipFile.addReadStream(stream, name, opts);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
zipFile.end();
|
||||||
|
streamToBuffer(zipFile.outputStream)
|
||||||
|
.then(resolve)
|
||||||
|
.catch(reject);
|
||||||
|
});
|
||||||
|
|
||||||
|
return zipBuffer;
|
||||||
|
}
|
||||||
27
packages/now-build-utils/src/should-serve.ts
Normal file
27
packages/now-build-utils/src/should-serve.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { parse } from 'path';
|
||||||
|
import { ShouldServeOptions } from './types';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
|
||||||
|
export default function shouldServe({
|
||||||
|
entrypoint,
|
||||||
|
files,
|
||||||
|
requestPath,
|
||||||
|
}: ShouldServeOptions): boolean {
|
||||||
|
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
||||||
|
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
||||||
|
|
||||||
|
if (entrypoint === requestPath && hasProp(files, entrypoint)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { dir, name } = parse(entrypoint);
|
||||||
|
if (name === 'index' && dir === requestPath && hasProp(files, entrypoint)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
||||||
|
return Object.hasOwnProperty.call(obj, key);
|
||||||
|
}
|
||||||
157
packages/now-build-utils/src/types.ts
Normal file
157
packages/now-build-utils/src/types.ts
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
import FileRef from './file-ref';
|
||||||
|
import FileFsRef from './file-fs-ref';
|
||||||
|
|
||||||
|
export interface File {
|
||||||
|
type: string;
|
||||||
|
mode: number;
|
||||||
|
toStream: () => NodeJS.ReadableStream;
|
||||||
|
/**
|
||||||
|
* The absolute path to the file in the filesystem
|
||||||
|
*/
|
||||||
|
fsPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Files {
|
||||||
|
[filePath: string]: File;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Config {
|
||||||
|
[key: string]: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface Meta {
|
||||||
|
isDev?: boolean;
|
||||||
|
requestPath?: string;
|
||||||
|
filesChanged?: string[];
|
||||||
|
filesRemoved?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface AnalyzeOptions {
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: {
|
||||||
|
[filePath: string]: FileRef;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process. This directory will be populated with the restored cache.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BuildOptions {
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: Files;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process. This directory will be populated with the restored cache.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Metadata related to the invoker of the builder, used by `now dev`.
|
||||||
|
* Builders may use the properties on this object to change behavior based
|
||||||
|
* on the build environment.
|
||||||
|
*/
|
||||||
|
meta?: Meta;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface PrepareCacheOptions {
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: Files;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you can build a cache to use for
|
||||||
|
* the next run.
|
||||||
|
*/
|
||||||
|
cachePath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface ShouldServeOptions {
|
||||||
|
/**
|
||||||
|
* A path string from a request.
|
||||||
|
*/
|
||||||
|
requestPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Name of entrypoint file for this particular build job. Value
|
||||||
|
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||||
|
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||||
|
* expanded into separate builds at deployment time.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* All source files of the project
|
||||||
|
*/
|
||||||
|
files: {
|
||||||
|
[path: string]: FileFsRef;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A writable temporary directory where you are encouraged to perform your
|
||||||
|
* build process. This directory will be populated with the restored cache.
|
||||||
|
*/
|
||||||
|
workPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
|
* in `now.json`.
|
||||||
|
*/
|
||||||
|
config: Config;
|
||||||
|
}
|
||||||
5
packages/now-build-utils/test/fixtures/07-cross-install/api/index.js
vendored
Normal file
5
packages/now-build-utils/test/fixtures/07-cross-install/api/index.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
const cowsay = require('cowsay').say;
|
||||||
|
|
||||||
|
module.exports = (req, resp) => {
|
||||||
|
resp.end(cowsay({ text: 'cross-cow:RANDOMNESS_PLACEHOLDER' }));
|
||||||
|
};
|
||||||
5
packages/now-build-utils/test/fixtures/07-cross-install/api/package.json
vendored
Normal file
5
packages/now-build-utils/test/fixtures/07-cross-install/api/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"dependencies": {
|
||||||
|
"lib": "../lib"
|
||||||
|
}
|
||||||
|
}
|
||||||
7
packages/now-build-utils/test/fixtures/07-cross-install/lib/package.json
vendored
Normal file
7
packages/now-build-utils/test/fixtures/07-cross-install/lib/package.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"name": "lib",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"dependencies": {
|
||||||
|
"cowsay": "*"
|
||||||
|
}
|
||||||
|
}
|
||||||
10
packages/now-build-utils/test/fixtures/07-cross-install/now.json
vendored
Normal file
10
packages/now-build-utils/test/fixtures/07-cross-install/now.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "api/index.js", "use": "@now/node" }],
|
||||||
|
"probes": [
|
||||||
|
{
|
||||||
|
"path": "/api/index.js",
|
||||||
|
"mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
11
packages/now-build-utils/test/fixtures/08-yarn-npm/now.json
vendored
Normal file
11
packages/now-build-utils/test/fixtures/08-yarn-npm/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{ "src": "with-npm/index.js", "use": "@now/node" },
|
||||||
|
{ "src": "with-yarn/index.js", "use": "@now/node" }
|
||||||
|
],
|
||||||
|
"probes": [
|
||||||
|
{ "path": "/with-npm", "mustContain": "npm:RANDOMNESS_PLACEHOLDER" },
|
||||||
|
{ "path": "/with-yarn", "mustContain": "yarn:RANDOMNESS_PLACEHOLDER" }
|
||||||
|
]
|
||||||
|
}
|
||||||
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/index.js
vendored
Normal file
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/index.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/must-be-npm.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/must-be-npm.js
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const execpath = path.basename(process.env.npm_execpath);
|
||||||
|
console.log('execpath', execpath);
|
||||||
|
|
||||||
|
if (execpath === 'npm-cli.js') {
|
||||||
|
fs.writeFileSync(
|
||||||
|
'index.js',
|
||||||
|
'module.exports = (_, resp) => resp.end("npm:RANDOMNESS_PLACEHOLDER");',
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
throw new Error('npm is expected');
|
||||||
|
}
|
||||||
3
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package-lock.json
generated
vendored
Normal file
3
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"lockfileVersion": 1
|
||||||
|
}
|
||||||
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package.json
vendored
Normal file
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-npm/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"now-build": "node must-be-npm.js"
|
||||||
|
}
|
||||||
|
}
|
||||||
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/index.js
vendored
Normal file
0
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/index.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/must-be-yarn.js
vendored
Normal file
14
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/must-be-yarn.js
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
const fs = require('fs');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const execpath = path.basename(process.env.npm_execpath);
|
||||||
|
console.log('execpath', execpath);
|
||||||
|
|
||||||
|
if (execpath === 'yarn.js' || execpath === 'yarn') {
|
||||||
|
fs.writeFileSync(
|
||||||
|
'index.js',
|
||||||
|
'module.exports = (_, resp) => resp.end("yarn:RANDOMNESS_PLACEHOLDER");',
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
throw new Error('yarn is expected');
|
||||||
|
}
|
||||||
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/package.json
vendored
Normal file
5
packages/now-build-utils/test/fixtures/08-yarn-npm/with-yarn/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"now-build": "node must-be-yarn.js"
|
||||||
|
}
|
||||||
|
}
|
||||||
9
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/index.js
vendored
Normal file
9
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/index.js
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
const scheduler = require('@google-cloud/scheduler');
|
||||||
|
|
||||||
|
module.exports = (_, res) => {
|
||||||
|
if (scheduler) {
|
||||||
|
res.end('found:RANDOMNESS_PLACEHOLDER');
|
||||||
|
} else {
|
||||||
|
res.end('nope:RANDOMNESS_PLACEHOLDER');
|
||||||
|
}
|
||||||
|
};
|
||||||
11
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/now.json
vendored
Normal file
11
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{
|
||||||
|
"src": "index.js",
|
||||||
|
"use": "@now/node",
|
||||||
|
"config": { "maxLambdaSize": "15mb" }
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"probes": [{ "path": "/", "mustContain": "found:RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
8
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/package.json
vendored
Normal file
8
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/package.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"name": "15-yarn-ignore-engines",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"main": "index.js",
|
||||||
|
"dependencies": {
|
||||||
|
"@google-cloud/scheduler": "0.3.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
1
packages/now-build-utils/test/symlinks/a.txt
Normal file
1
packages/now-build-utils/test/symlinks/a.txt
Normal file
@@ -0,0 +1 @@
|
|||||||
|
contents
|
||||||
1
packages/now-build-utils/test/symlinks/link.txt
Symbolic link
1
packages/now-build-utils/test/symlinks/link.txt
Symbolic link
@@ -0,0 +1 @@
|
|||||||
|
./a.txt
|
||||||
110
packages/now-build-utils/test/test.js
Normal file
110
packages/now-build-utils/test/test.js
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
/* global beforeAll, expect, it, jest */
|
||||||
|
const path = require('path');
|
||||||
|
const fs = require('fs-extra');
|
||||||
|
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||||
|
const execa = require('execa');
|
||||||
|
const assert = require('assert');
|
||||||
|
const { glob, download } = require('../');
|
||||||
|
const { createZip } = require('../dist/lambda');
|
||||||
|
|
||||||
|
const {
|
||||||
|
packAndDeploy,
|
||||||
|
testDeployment,
|
||||||
|
} = require('../../../test/lib/deployment/test-deployment.js');
|
||||||
|
|
||||||
|
jest.setTimeout(4 * 60 * 1000);
|
||||||
|
const builderUrl = '@canary';
|
||||||
|
let buildUtilsUrl;
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
const buildUtilsPath = path.resolve(__dirname, '..');
|
||||||
|
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
|
||||||
|
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||||
|
});
|
||||||
|
|
||||||
|
// unit tests
|
||||||
|
|
||||||
|
it('should re-create symlinks properly', async () => {
|
||||||
|
const files = await glob('**', path.join(__dirname, 'symlinks'));
|
||||||
|
assert.equal(Object.keys(files).length, 2);
|
||||||
|
|
||||||
|
const outDir = path.join(__dirname, 'symlinks-out');
|
||||||
|
await fs.remove(outDir);
|
||||||
|
|
||||||
|
const files2 = await download(files, outDir);
|
||||||
|
assert.equal(Object.keys(files2).length, 2);
|
||||||
|
|
||||||
|
const [linkStat, aStat] = await Promise.all([
|
||||||
|
fs.lstat(path.join(outDir, 'link.txt')),
|
||||||
|
fs.lstat(path.join(outDir, 'a.txt')),
|
||||||
|
]);
|
||||||
|
assert(linkStat.isSymbolicLink());
|
||||||
|
assert(aStat.isFile());
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should create zip files with symlinks properly', async () => {
|
||||||
|
const files = await glob('**', path.join(__dirname, 'symlinks'));
|
||||||
|
assert.equal(Object.keys(files).length, 2);
|
||||||
|
|
||||||
|
const outFile = path.join(__dirname, 'symlinks.zip');
|
||||||
|
await fs.remove(outFile);
|
||||||
|
|
||||||
|
const outDir = path.join(__dirname, 'symlinks-out');
|
||||||
|
await fs.remove(outDir);
|
||||||
|
await fs.mkdirp(outDir);
|
||||||
|
|
||||||
|
await fs.writeFile(outFile, await createZip(files));
|
||||||
|
await execa('unzip', [outFile], { cwd: outDir });
|
||||||
|
|
||||||
|
const [linkStat, aStat] = await Promise.all([
|
||||||
|
fs.lstat(path.join(outDir, 'link.txt')),
|
||||||
|
fs.lstat(path.join(outDir, 'a.txt')),
|
||||||
|
]);
|
||||||
|
assert(linkStat.isSymbolicLink());
|
||||||
|
assert(aStat.isFile());
|
||||||
|
});
|
||||||
|
|
||||||
|
// own fixtures
|
||||||
|
|
||||||
|
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-restricted-syntax
|
||||||
|
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||||
|
// eslint-disable-next-line no-loop-func
|
||||||
|
it(`should build ${fixture}`, async () => {
|
||||||
|
await expect(
|
||||||
|
testDeployment(
|
||||||
|
{ builderUrl, buildUtilsUrl },
|
||||||
|
path.join(fixturesPath, fixture),
|
||||||
|
),
|
||||||
|
).resolves.toBeDefined();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// few foreign tests
|
||||||
|
|
||||||
|
const buildersToTestWith = ['now-node-server', 'now-static-build'];
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-restricted-syntax
|
||||||
|
for (const builder of buildersToTestWith) {
|
||||||
|
const fixturesPath2 = path.resolve(
|
||||||
|
__dirname,
|
||||||
|
`../../${builder}/test/fixtures`,
|
||||||
|
);
|
||||||
|
|
||||||
|
// eslint-disable-next-line no-restricted-syntax
|
||||||
|
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
||||||
|
// don't run all foreign fixtures, just some
|
||||||
|
if (['01-cowsay', '03-env-vars'].includes(fixture)) {
|
||||||
|
// eslint-disable-next-line no-loop-func
|
||||||
|
it(`should build ${builder}/${fixture}`, async () => {
|
||||||
|
await expect(
|
||||||
|
testDeployment(
|
||||||
|
{ builderUrl, buildUtilsUrl },
|
||||||
|
path.join(fixturesPath2, fixture),
|
||||||
|
),
|
||||||
|
).resolves.toBeDefined();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
20
packages/now-build-utils/tsconfig.json
Normal file
20
packages/now-build-utils/tsconfig.json
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"declaration": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"lib": ["esnext"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"noEmitOnError": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"outDir": "./dist",
|
||||||
|
"types": ["node"],
|
||||||
|
"strict": true,
|
||||||
|
"target": "esnext"
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules"]
|
||||||
|
}
|
||||||
@@ -1,4 +1,7 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
export GOOS=linux
|
export GOOS=linux
|
||||||
export GOARCH=amd64
|
export GOARCH=amd64
|
||||||
|
export GOPATH=$HOME/go
|
||||||
|
go get github.com/aws/aws-lambda-go/events
|
||||||
|
go get github.com/aws/aws-lambda-go/lambda
|
||||||
go build -o handler main.go
|
go build -o handler main.go
|
||||||
|
|||||||
@@ -1,19 +1,21 @@
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const { mkdirp, copyFile } = require('fs-extra');
|
const { mkdirp, copyFile } = require('fs-extra');
|
||||||
|
|
||||||
const glob = require('@now/build-utils/fs/glob');
|
const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const download = require('@now/build-utils/fs/download');
|
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const { createLambda } = require('@now/build-utils/lambda');
|
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory');
|
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
const { shouldServe } = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
|
||||||
|
|
||||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||||
|
|
||||||
exports.build = async ({ files, entrypoint }) => {
|
exports.build = async ({
|
||||||
|
workPath, files, entrypoint, meta,
|
||||||
|
}) => {
|
||||||
console.log('downloading files...');
|
console.log('downloading files...');
|
||||||
const srcDir = await getWritableDirectory();
|
|
||||||
const outDir = await getWritableDirectory();
|
const outDir = await getWritableDirectory();
|
||||||
|
|
||||||
await download(files, srcDir);
|
await download(files, workPath, meta);
|
||||||
|
|
||||||
const handlerPath = path.join(__dirname, 'handler');
|
const handlerPath = path.join(__dirname, 'handler');
|
||||||
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
||||||
@@ -23,7 +25,7 @@ exports.build = async ({ files, entrypoint }) => {
|
|||||||
|
|
||||||
// For now only the entrypoint file is copied into the lambda
|
// For now only the entrypoint file is copied into the lambda
|
||||||
await copyFile(
|
await copyFile(
|
||||||
path.join(srcDir, entrypoint),
|
path.join(workPath, entrypoint),
|
||||||
path.join(outDir, entrypoint),
|
path.join(outDir, entrypoint),
|
||||||
);
|
);
|
||||||
|
|
||||||
@@ -40,3 +42,5 @@ exports.build = async ({ files, entrypoint }) => {
|
|||||||
[entrypoint]: lambda,
|
[entrypoint]: lambda,
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
exports.shouldServe = shouldServe;
|
||||||
|
|||||||
@@ -1,152 +1,36 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"os"
|
now "../../utils/go/bridge"
|
||||||
"fmt"
|
|
||||||
"net"
|
|
||||||
"strings"
|
|
||||||
"io/ioutil"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/http/cgi"
|
"net/http/cgi"
|
||||||
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"encoding/json"
|
|
||||||
b64 "encoding/base64"
|
|
||||||
"github.com/aws/aws-lambda-go/events"
|
|
||||||
"github.com/aws/aws-lambda-go/lambda"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type Request struct {
|
|
||||||
Host string `json:"host"`
|
|
||||||
Path string `json:"path"`
|
|
||||||
Method string `json:"method"`
|
|
||||||
Headers map[string]string `json:"headers"`
|
|
||||||
Encoding string `json:"encoding,omitempty"`
|
|
||||||
Body string `json:"body"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Response struct {
|
|
||||||
StatusCode int `json:"statusCode"`
|
|
||||||
Headers map[string]string `json:"headers"`
|
|
||||||
Encoding string `json:"encoding,omitemtpy"`
|
|
||||||
Body string `json:"body"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ResponseError struct {
|
|
||||||
Code string `json:"code"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ResponseErrorWrapper struct {
|
|
||||||
Error ResponseError `json:"error"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type CgiHandler struct {
|
type CgiHandler struct {
|
||||||
http.Handler
|
http.Handler
|
||||||
Dir string
|
Dir string
|
||||||
Script string
|
Script string
|
||||||
}
|
}
|
||||||
|
|
||||||
func createErrorResponse(message string, code string, statusCode int) (Response, error) {
|
|
||||||
obj := ResponseErrorWrapper{
|
|
||||||
Error: ResponseError{
|
|
||||||
Code: code,
|
|
||||||
Message: message,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
body, _ := json.Marshal(obj)
|
|
||||||
|
|
||||||
return Response{
|
|
||||||
StatusCode: statusCode,
|
|
||||||
Headers: map[string]string{
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
Body: string(body),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *CgiHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
func (h *CgiHandler) ServeHTTP(w http.ResponseWriter, r *http.Request) {
|
||||||
cgih := cgi.Handler{
|
handler := cgi.Handler{
|
||||||
Path: h.Script,
|
Path: h.Script,
|
||||||
Root: "/" + h.Script,
|
Root: "/" + h.Script,
|
||||||
Dir: h.Dir,
|
Dir: h.Dir,
|
||||||
Env: []string{"SERVER_PORT=443"},
|
Env: []string{
|
||||||
|
"HTTPS=on",
|
||||||
|
"SERVER_PORT=443",
|
||||||
|
"SERVER_SOFTWARE=@now/cgi",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
cgih.ServeHTTP(w, r)
|
handler.ServeHTTP(w, r)
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
l, err := net.Listen("tcp", ":0")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
workdir, _ := filepath.Abs(".")
|
workdir, _ := filepath.Abs(".")
|
||||||
script := os.Getenv("SCRIPT_FILENAME")
|
script := os.Getenv("SCRIPT_FILENAME")
|
||||||
h := &CgiHandler{nil, workdir, script}
|
handler := &CgiHandler{nil, workdir, script}
|
||||||
|
now.Start(handler)
|
||||||
http.Handle("/", h)
|
|
||||||
go http.Serve(l, nil)
|
|
||||||
|
|
||||||
handler := func(_req events.APIGatewayProxyRequest) (Response, error) {
|
|
||||||
var req Request
|
|
||||||
|
|
||||||
err := json.Unmarshal([]byte(_req.Body), &req)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
return createErrorResponse("Invalid payload", "bad_request", 400)
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.Encoding == "base64" {
|
|
||||||
decoded, _ := b64.StdEncoding.DecodeString(req.Body)
|
|
||||||
req.Body = string(decoded)
|
|
||||||
}
|
|
||||||
|
|
||||||
url := "http://" + l.Addr().String() + req.Path
|
|
||||||
|
|
||||||
internalReq, err := http.NewRequest(req.Method, url, strings.NewReader(req.Body))
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
return createErrorResponse("Bad gateway internal req failed", "bad_gateway", 502)
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range req.Headers {
|
|
||||||
internalReq.Header.Add(k, v)
|
|
||||||
if strings.ToLower(k) == "host" {
|
|
||||||
internalReq.Host = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &http.Client{}
|
|
||||||
internalRes, err := client.Do(internalReq)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
return createErrorResponse("Bad gateway internal req Do failed", "bad_gateway", 502)
|
|
||||||
}
|
|
||||||
defer internalRes.Body.Close()
|
|
||||||
|
|
||||||
resHeaders := make(map[string]string, len(internalRes.Header))
|
|
||||||
for k, v := range internalRes.Header {
|
|
||||||
// FIXME: support multiple values via concatenating with ','
|
|
||||||
// see RFC 7230, section 3.2.2
|
|
||||||
resHeaders[k] = v[0]
|
|
||||||
}
|
|
||||||
|
|
||||||
bodyBytes, err := ioutil.ReadAll(internalRes.Body)
|
|
||||||
if err != nil {
|
|
||||||
return createErrorResponse("Bad gateway ReadAll bytes from response failed", "bad_gateway", 502)
|
|
||||||
}
|
|
||||||
|
|
||||||
resBody := b64.StdEncoding.EncodeToString(bodyBytes)
|
|
||||||
|
|
||||||
return Response{
|
|
||||||
StatusCode: internalRes.StatusCode,
|
|
||||||
Headers: resHeaders,
|
|
||||||
Encoding: "base64",
|
|
||||||
Body: resBody,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
lambda.Start(handler)
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/cgi",
|
"name": "@now/cgi",
|
||||||
"version": "0.0.12",
|
"version": "0.1.4",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
|
"directory": "packages/now-cgi"
|
||||||
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "best -I test/*.js",
|
"test": "best -I test/*.js",
|
||||||
"prepublish": "./build.sh"
|
"prepublish": "./build.sh"
|
||||||
@@ -15,8 +21,5 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@zeit/best": "0.4.3",
|
"@zeit/best": "0.4.3",
|
||||||
"rmfr": "2.0.0"
|
"rmfr": "2.0.0"
|
||||||
},
|
|
||||||
"peerDependencies": {
|
|
||||||
"@now/build-utils": ">=0.0.1"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
5
packages/now-go/.gitignore
vendored
5
packages/now-go/.gitignore
vendored
@@ -1,3 +1,6 @@
|
|||||||
node_modules
|
node_modules
|
||||||
*.log
|
*.log
|
||||||
bin
|
/go
|
||||||
|
/analyze
|
||||||
|
*.js
|
||||||
|
!util/install.js
|
||||||
|
|||||||
5
packages/now-go/.npmignore
Normal file
5
packages/now-go/.npmignore
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
*.ts
|
||||||
|
test
|
||||||
|
tsconfig.json
|
||||||
|
package-lock.json
|
||||||
|
yarn.lock
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
mkdir -p bin
|
|
||||||
cd util
|
|
||||||
GOOS=linux GOARCH=amd64 go build get-exported-function-name.go
|
|
||||||
mv get-exported-function-name ../bin/
|
|
||||||
|
|
||||||
@@ -1,23 +0,0 @@
|
|||||||
const path = require('path');
|
|
||||||
|
|
||||||
const fetch = require('node-fetch');
|
|
||||||
const tar = require('tar');
|
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
|
||||||
|
|
||||||
const url = 'https://dl.google.com/go/go1.11.1.linux-amd64.tar.gz';
|
|
||||||
|
|
||||||
module.exports = async () => {
|
|
||||||
const res = await fetch(url);
|
|
||||||
const dir = await getWritableDirectory();
|
|
||||||
|
|
||||||
if (!res.ok) {
|
|
||||||
throw new Error(`Failed to download: ${url}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return new Promise((resolve, reject) => {
|
|
||||||
res.body
|
|
||||||
.on('error', reject)
|
|
||||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
|
||||||
.on('finish', () => resolve(path.join(dir, 'bin', 'go')));
|
|
||||||
});
|
|
||||||
};
|
|
||||||
170
packages/now-go/go-helpers.ts
Normal file
170
packages/now-go/go-helpers.ts
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
import tar from 'tar';
|
||||||
|
import execa from 'execa';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import { mkdirp, pathExists } from 'fs-extra';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { homedir } from 'os';
|
||||||
|
import Debug from 'debug';
|
||||||
|
|
||||||
|
const debug = Debug('@now/go:go-helpers');
|
||||||
|
const archMap = new Map([['x64', 'amd64'], ['x86', '386']]);
|
||||||
|
const platformMap = new Map([['win32', 'windows']]);
|
||||||
|
|
||||||
|
// Location where the `go` binary will be installed after `postinstall`
|
||||||
|
const GO_DIR = join(__dirname, 'go');
|
||||||
|
const GO_BIN = join(GO_DIR, 'bin/go');
|
||||||
|
|
||||||
|
const getPlatform = (p: string) => platformMap.get(p) || p;
|
||||||
|
const getArch = (a: string) => archMap.get(a) || a;
|
||||||
|
const getGoUrl = (version: string, platform: string, arch: string) => {
|
||||||
|
const goArch = getArch(arch);
|
||||||
|
const goPlatform = getPlatform(platform);
|
||||||
|
const ext = platform === 'win32' ? 'zip' : 'tar.gz';
|
||||||
|
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function getAnalyzedEntrypoint(filePath: string, modulePath = '') {
|
||||||
|
debug('Analyzing entrypoint %o', filePath);
|
||||||
|
const bin = join(__dirname, 'analyze');
|
||||||
|
|
||||||
|
const isAnalyzeExist = await pathExists(bin);
|
||||||
|
if (!isAnalyzeExist) {
|
||||||
|
const src = join(__dirname, 'util', 'analyze.go');
|
||||||
|
const dest = join(__dirname, 'analyze');
|
||||||
|
const go = await downloadGo();
|
||||||
|
await go.build(src, dest);
|
||||||
|
}
|
||||||
|
|
||||||
|
const args = [`-modpath=${modulePath}`, filePath];
|
||||||
|
|
||||||
|
const analyzed = await execa.stdout(bin, args);
|
||||||
|
debug('Analyzed entrypoint %o', analyzed);
|
||||||
|
return analyzed;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Creates a `$GOPATH` directory tree, as per `go help gopath` instructions.
|
||||||
|
// Without this, `go` won't recognize the `$GOPATH`.
|
||||||
|
function createGoPathTree(goPath: string, platform: string, arch: string) {
|
||||||
|
const tuple = `${getPlatform(platform)}_${getArch(arch)}`;
|
||||||
|
debug('Creating GOPATH directory structure for %o (%s)', goPath, tuple);
|
||||||
|
return Promise.all([
|
||||||
|
mkdirp(join(goPath, 'bin')),
|
||||||
|
mkdirp(join(goPath, 'pkg', tuple)),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
class GoWrapper {
|
||||||
|
private env: { [key: string]: string };
|
||||||
|
private opts: execa.Options;
|
||||||
|
|
||||||
|
constructor(env: { [key: string]: string }, opts: execa.Options = {}) {
|
||||||
|
if (!opts.cwd) {
|
||||||
|
opts.cwd = process.cwd();
|
||||||
|
}
|
||||||
|
this.env = env;
|
||||||
|
this.opts = opts;
|
||||||
|
}
|
||||||
|
|
||||||
|
private execute(...args: string[]) {
|
||||||
|
const { opts, env } = this;
|
||||||
|
debug('Exec %o', `go ${args.join(' ')}`);
|
||||||
|
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||||
|
}
|
||||||
|
|
||||||
|
mod() {
|
||||||
|
return this.execute('mod', 'tidy');
|
||||||
|
}
|
||||||
|
|
||||||
|
get(src?: string) {
|
||||||
|
const args = ['get'];
|
||||||
|
if (src) {
|
||||||
|
debug('Fetching `go` dependencies for file %o', src);
|
||||||
|
args.push(src);
|
||||||
|
} else {
|
||||||
|
debug('Fetching `go` dependencies for cwd %o', this.opts.cwd);
|
||||||
|
}
|
||||||
|
return this.execute(...args);
|
||||||
|
}
|
||||||
|
|
||||||
|
build(src: string | string[], dest: string, ldsflags = '-s -w') {
|
||||||
|
debug('Building optimized `go` binary %o -> %o', src, dest);
|
||||||
|
const sources = Array.isArray(src) ? src : [src];
|
||||||
|
return this.execute('build', '-ldflags', ldsflags, '-o', dest, ...sources);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createGo(
|
||||||
|
goPath: string,
|
||||||
|
platform = process.platform,
|
||||||
|
arch = process.arch,
|
||||||
|
opts: execa.Options = {},
|
||||||
|
goMod = false
|
||||||
|
) {
|
||||||
|
const path = `${dirname(GO_BIN)}:${process.env.PATH}`;
|
||||||
|
const env: { [key: string]: string } = {
|
||||||
|
...process.env,
|
||||||
|
PATH: path,
|
||||||
|
GOPATH: goPath,
|
||||||
|
...opts.env,
|
||||||
|
};
|
||||||
|
if (goMod) {
|
||||||
|
env.GO111MODULE = 'on';
|
||||||
|
}
|
||||||
|
await createGoPathTree(goPath, platform, arch);
|
||||||
|
return new GoWrapper(env, opts);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function downloadGo(
|
||||||
|
dir = GO_DIR,
|
||||||
|
version = '1.12',
|
||||||
|
platform = process.platform,
|
||||||
|
arch = process.arch
|
||||||
|
) {
|
||||||
|
// Check default `Go` in user machine
|
||||||
|
const isUserGo = await pathExists(join(homedir(), 'go'));
|
||||||
|
|
||||||
|
// If we found GOPATH in ENV, or default `Go` path exists
|
||||||
|
// asssume that user have `Go` installed
|
||||||
|
if (isUserGo || process.env.GOPATH !== undefined) {
|
||||||
|
const { stdout } = await execa('go', ['version']);
|
||||||
|
|
||||||
|
if (parseInt(stdout.split('.')[1]) >= 11) {
|
||||||
|
return createGo(dir, platform, arch);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(
|
||||||
|
`Your current ${stdout} doesn't support Go Modules. Please update.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
// Check `Go` bin in builder CWD
|
||||||
|
const isGoExist = await pathExists(join(dir, 'bin'));
|
||||||
|
if (!isGoExist) {
|
||||||
|
debug(
|
||||||
|
'Installing `go` v%s to %o for %s %s',
|
||||||
|
version,
|
||||||
|
dir,
|
||||||
|
platform,
|
||||||
|
arch
|
||||||
|
);
|
||||||
|
const url = getGoUrl(version, platform, arch);
|
||||||
|
debug('Downloading `go` URL: %o', url);
|
||||||
|
console.log('Downloading Go ...');
|
||||||
|
const res = await fetch(url);
|
||||||
|
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`Failed to download: ${url} (${res.status})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: use a zip extractor when `ext === "zip"`
|
||||||
|
await mkdirp(dir);
|
||||||
|
await new Promise((resolve, reject) => {
|
||||||
|
res.body
|
||||||
|
.on('error', reject)
|
||||||
|
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||||
|
.on('error', reject)
|
||||||
|
.on('finish', resolve);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return createGo(dir, platform, arch);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,136 +0,0 @@
|
|||||||
const path = require('path');
|
|
||||||
const { mkdirp, readFile, writeFile } = require('fs-extra');
|
|
||||||
|
|
||||||
const execa = require('execa');
|
|
||||||
const { createLambda } = require('@now/build-utils/lambda.js');
|
|
||||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
|
||||||
const download = require('@now/build-utils/fs/download.js');
|
|
||||||
const downloadGit = require('lambda-git');
|
|
||||||
const glob = require('@now/build-utils/fs/glob.js');
|
|
||||||
const downloadGoBin = require('./download-go-bin');
|
|
||||||
|
|
||||||
// creates a `$GOPATH` direcotry tree, as per
|
|
||||||
// `go help gopath`'s instructions.
|
|
||||||
// without this, Go won't recognize the `$GOPATH`
|
|
||||||
async function createGoPathTree(goPath) {
|
|
||||||
await mkdirp(path.join(goPath, 'bin'));
|
|
||||||
await mkdirp(path.join(goPath, 'pkg', 'linux_amd64'));
|
|
||||||
}
|
|
||||||
|
|
||||||
exports.config = {
|
|
||||||
maxLambdaSize: '10mb',
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.build = async ({ files, entrypoint }) => {
|
|
||||||
console.log('downloading files...');
|
|
||||||
|
|
||||||
const gitPath = await getWritableDirectory();
|
|
||||||
const goPath = await getWritableDirectory();
|
|
||||||
const srcPath = path.join(goPath, 'src', 'lambda');
|
|
||||||
const outDir = await getWritableDirectory();
|
|
||||||
|
|
||||||
await createGoPathTree(goPath);
|
|
||||||
|
|
||||||
const downloadedFiles = await download(files, srcPath);
|
|
||||||
|
|
||||||
console.log('downloading go binary...');
|
|
||||||
const goBin = await downloadGoBin();
|
|
||||||
|
|
||||||
console.log('downloading git binary...');
|
|
||||||
// downloads a git binary that works on Amazon Linux and sets
|
|
||||||
// `process.env.GIT_EXEC_PATH` so `go(1)` can see it
|
|
||||||
await downloadGit({ targetDirectory: gitPath });
|
|
||||||
|
|
||||||
const goEnv = {
|
|
||||||
...process.env,
|
|
||||||
GOOS: 'linux',
|
|
||||||
GOARCH: 'amd64',
|
|
||||||
GOPATH: goPath,
|
|
||||||
};
|
|
||||||
|
|
||||||
console.log(`parsing AST for "${entrypoint}"`);
|
|
||||||
let handlerFunctionName = '';
|
|
||||||
try {
|
|
||||||
handlerFunctionName = await execa.stdout(
|
|
||||||
path.join(__dirname, 'bin', 'get-exported-function-name'),
|
|
||||||
[downloadedFiles[entrypoint].fsPath],
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
console.log(`failed to parse AST for "${entrypoint}"`);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (handlerFunctionName === '') {
|
|
||||||
const e = new Error(
|
|
||||||
`Could not find an exported function on "${entrypoint}"`,
|
|
||||||
);
|
|
||||||
console.log(e.message);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
`Found exported function "${handlerFunctionName}" on "${entrypoint}"`,
|
|
||||||
);
|
|
||||||
|
|
||||||
const origianlMainGoContents = await readFile(
|
|
||||||
path.join(__dirname, 'main.go'),
|
|
||||||
'utf8',
|
|
||||||
);
|
|
||||||
const mainGoContents = origianlMainGoContents.replace(
|
|
||||||
'__NOW_HANDLER_FUNC_NAME',
|
|
||||||
handlerFunctionName,
|
|
||||||
);
|
|
||||||
// in order to allow the user to have `main.go`, we need our `main.go` to be called something else
|
|
||||||
const mainGoFileName = 'main__now__go__.go';
|
|
||||||
|
|
||||||
// we need `main.go` in the same dir as the entrypoint,
|
|
||||||
// otherwise `go build` will refuse to build
|
|
||||||
const entrypointDirname = path.dirname(downloadedFiles[entrypoint].fsPath);
|
|
||||||
|
|
||||||
// Go doesn't like to build files in different directories,
|
|
||||||
// so now we place `main.go` together with the user code
|
|
||||||
await writeFile(path.join(entrypointDirname, mainGoFileName), mainGoContents);
|
|
||||||
|
|
||||||
console.log('installing dependencies');
|
|
||||||
// `go get` will look at `*.go` (note we set `cwd`), parse
|
|
||||||
// the `import`s and download any packages that aren't part of the stdlib
|
|
||||||
try {
|
|
||||||
await execa(goBin, ['get'], {
|
|
||||||
env: goEnv,
|
|
||||||
cwd: entrypointDirname,
|
|
||||||
stdio: 'inherit',
|
|
||||||
});
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to `go get`');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
console.log('running go build...');
|
|
||||||
try {
|
|
||||||
await execa(
|
|
||||||
goBin,
|
|
||||||
[
|
|
||||||
'build',
|
|
||||||
'-o',
|
|
||||||
path.join(outDir, 'handler'),
|
|
||||||
path.join(entrypointDirname, mainGoFileName),
|
|
||||||
downloadedFiles[entrypoint].fsPath,
|
|
||||||
],
|
|
||||||
{ env: goEnv, cwd: entrypointDirname, stdio: 'inherit' },
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
console.log('failed to `go build`');
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
const lambda = await createLambda({
|
|
||||||
files: await glob('**', outDir),
|
|
||||||
handler: 'handler',
|
|
||||||
runtime: 'go1.x',
|
|
||||||
environment: {},
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
[entrypoint]: lambda,
|
|
||||||
};
|
|
||||||
};
|
|
||||||
388
packages/now-go/index.ts
Normal file
388
packages/now-go/index.ts
Normal file
@@ -0,0 +1,388 @@
|
|||||||
|
import { join, sep, dirname, basename } from 'path';
|
||||||
|
import { readFile, writeFile, pathExists, move } from 'fs-extra';
|
||||||
|
import { homedir } from 'os';
|
||||||
|
import execa from 'execa';
|
||||||
|
|
||||||
|
import {
|
||||||
|
glob,
|
||||||
|
download,
|
||||||
|
createLambda,
|
||||||
|
getWriteableDirectory,
|
||||||
|
BuildOptions,
|
||||||
|
shouldServe,
|
||||||
|
Files,
|
||||||
|
} from '@now/build-utils';
|
||||||
|
|
||||||
|
import { createGo, getAnalyzedEntrypoint } from './go-helpers';
|
||||||
|
|
||||||
|
interface Analyzed {
|
||||||
|
found?: boolean;
|
||||||
|
packageName: string;
|
||||||
|
functionName: string;
|
||||||
|
watch: string[];
|
||||||
|
}
|
||||||
|
interface BuildParamsMeta {
|
||||||
|
isDev: boolean | undefined;
|
||||||
|
}
|
||||||
|
interface BuildParamsType extends BuildOptions {
|
||||||
|
files: Files;
|
||||||
|
entrypoint: string;
|
||||||
|
workPath: string;
|
||||||
|
meta: BuildParamsMeta;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize private git repo for Go Modules
|
||||||
|
async function initPrivateGit(credentials: string) {
|
||||||
|
await execa('git', [
|
||||||
|
'config',
|
||||||
|
'--global',
|
||||||
|
'credential.helper',
|
||||||
|
`store --file ${join(homedir(), '.git-credentials')}`,
|
||||||
|
]);
|
||||||
|
|
||||||
|
await writeFile(join(homedir(), '.git-credentials'), credentials);
|
||||||
|
}
|
||||||
|
|
||||||
|
export const version = 2;
|
||||||
|
|
||||||
|
export const config = {
|
||||||
|
maxLambdaSize: '10mb',
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function build({
|
||||||
|
files,
|
||||||
|
entrypoint,
|
||||||
|
config,
|
||||||
|
workPath,
|
||||||
|
meta = {} as BuildParamsMeta,
|
||||||
|
}: BuildParamsType) {
|
||||||
|
if (process.env.GIT_CREDENTIALS && !meta.isDev) {
|
||||||
|
console.log('Initialize Git credentials...');
|
||||||
|
await initPrivateGit(process.env.GIT_CREDENTIALS);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Downloading user files...');
|
||||||
|
const entrypointArr = entrypoint.split(sep);
|
||||||
|
|
||||||
|
let [goPath, outDir] = await Promise.all([
|
||||||
|
getWriteableDirectory(),
|
||||||
|
getWriteableDirectory(),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const srcPath = join(goPath, 'src', 'lambda');
|
||||||
|
let downloadedFiles;
|
||||||
|
if (meta.isDev) {
|
||||||
|
downloadedFiles = await download(files, workPath, meta);
|
||||||
|
} else {
|
||||||
|
downloadedFiles = await download(files, srcPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Parsing AST for "${entrypoint}"`);
|
||||||
|
let analyzed: string;
|
||||||
|
try {
|
||||||
|
let goModAbsPathDir = '';
|
||||||
|
for (const file of Object.keys(downloadedFiles)) {
|
||||||
|
if (file === 'go.mod') {
|
||||||
|
goModAbsPathDir = dirname(downloadedFiles[file].fsPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
analyzed = await getAnalyzedEntrypoint(
|
||||||
|
downloadedFiles[entrypoint].fsPath,
|
||||||
|
goModAbsPathDir
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!analyzed) {
|
||||||
|
const err = new Error(
|
||||||
|
`Could not find an exported function in "${entrypoint}"
|
||||||
|
Learn more: https://zeit.co/docs/v2/deployments/official-builders/go-now-go/#entrypoint
|
||||||
|
`
|
||||||
|
);
|
||||||
|
console.log(err.message);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedAnalyzed = JSON.parse(analyzed) as Analyzed;
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
const base = dirname(downloadedFiles['now.json'].fsPath);
|
||||||
|
const destNow = join(
|
||||||
|
base,
|
||||||
|
'.now',
|
||||||
|
'cache',
|
||||||
|
basename(entrypoint, '.go'),
|
||||||
|
'src',
|
||||||
|
'lambda'
|
||||||
|
);
|
||||||
|
// this will ensure Go rebuilt fast
|
||||||
|
goPath = join(base, '.now', 'cache', basename(entrypoint, '.go'));
|
||||||
|
await download(downloadedFiles, destNow);
|
||||||
|
|
||||||
|
downloadedFiles = await glob('**', destNow);
|
||||||
|
}
|
||||||
|
|
||||||
|
// find `go.mod` in downloadedFiles
|
||||||
|
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||||
|
let isGoModExist = false;
|
||||||
|
let goModPath = '';
|
||||||
|
let goModPathArr: string[] = [];
|
||||||
|
for (const file of Object.keys(downloadedFiles)) {
|
||||||
|
const fileDirname = dirname(downloadedFiles[file].fsPath);
|
||||||
|
if (file === 'go.mod') {
|
||||||
|
isGoModExist = true;
|
||||||
|
goModPath = fileDirname;
|
||||||
|
goModPathArr = goModPath.split(sep);
|
||||||
|
} else if (file.includes('go.mod')) {
|
||||||
|
isGoModExist = true;
|
||||||
|
if (entrypointDirname === fileDirname) {
|
||||||
|
goModPath = fileDirname;
|
||||||
|
goModPathArr = goModPath.split(sep);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const input = entrypointDirname;
|
||||||
|
var includedFiles: Files = {};
|
||||||
|
|
||||||
|
if (config && config.includeFiles) {
|
||||||
|
for (const pattern of config.includeFiles) {
|
||||||
|
const files = await glob(pattern, input);
|
||||||
|
for (const assetName of Object.keys(files)) {
|
||||||
|
includedFiles[assetName] = files[assetName];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const handlerFunctionName = parsedAnalyzed.functionName;
|
||||||
|
console.log(
|
||||||
|
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`
|
||||||
|
);
|
||||||
|
|
||||||
|
// check if package name other than main
|
||||||
|
// using `go.mod` way building the handler
|
||||||
|
const packageName = parsedAnalyzed.packageName;
|
||||||
|
|
||||||
|
if (isGoModExist && packageName === 'main') {
|
||||||
|
throw new Error('Please change `package main` to `package handler`');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (packageName !== 'main') {
|
||||||
|
const go = await createGo(
|
||||||
|
goPath,
|
||||||
|
process.platform,
|
||||||
|
process.arch,
|
||||||
|
{
|
||||||
|
cwd: entrypointDirname,
|
||||||
|
},
|
||||||
|
true
|
||||||
|
);
|
||||||
|
if (!isGoModExist) {
|
||||||
|
try {
|
||||||
|
const defaultGoModContent = `module ${packageName}`;
|
||||||
|
|
||||||
|
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(`failed to create default go.mod for ${packageName}`);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const mainModGoFileName = 'main__mod__.go';
|
||||||
|
const modMainGoContents = await readFile(
|
||||||
|
join(__dirname, mainModGoFileName),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
|
||||||
|
let goPackageName = `${packageName}/${packageName}`;
|
||||||
|
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||||
|
|
||||||
|
if (isGoModExist) {
|
||||||
|
const goModContents = await readFile(join(goModPath, 'go.mod'), 'utf8');
|
||||||
|
const usrModName = goModContents.split('\n')[0].split(' ')[1];
|
||||||
|
goPackageName = `${usrModName}/${packageName}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mainModGoContents = modMainGoContents
|
||||||
|
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||||
|
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||||
|
|
||||||
|
if (goModPathArr.length > 1) {
|
||||||
|
// using `go.mod` path to write main__mod__.go
|
||||||
|
await writeFile(join(goModPath, mainModGoFileName), mainModGoContents);
|
||||||
|
} else {
|
||||||
|
// using `entrypointDirname` to write main__mod__.go
|
||||||
|
await writeFile(
|
||||||
|
join(entrypointDirname, mainModGoFileName),
|
||||||
|
mainModGoContents
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// move user go file to folder
|
||||||
|
try {
|
||||||
|
// default path
|
||||||
|
let finalDestination = join(entrypointDirname, packageName, entrypoint);
|
||||||
|
let forceMove = false;
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
forceMove = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if `entrypoint` include folder, only use filename
|
||||||
|
if (entrypointArr.length > 1) {
|
||||||
|
finalDestination = join(
|
||||||
|
entrypointDirname,
|
||||||
|
packageName,
|
||||||
|
entrypointArr[entrypointArr.length - 1]
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
dirname(downloadedFiles[entrypoint].fsPath) === goModPath ||
|
||||||
|
!isGoModExist
|
||||||
|
) {
|
||||||
|
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
||||||
|
overwrite: forceMove,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to move entry to package folder');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
let entrypointDir = entrypointDirname;
|
||||||
|
if (goModPathArr.length > 1) {
|
||||||
|
entrypointDir = goModPath;
|
||||||
|
}
|
||||||
|
const isGoModBk = await pathExists(join(entrypointDir, 'go.mod.bk'));
|
||||||
|
if (isGoModBk) {
|
||||||
|
await move(
|
||||||
|
join(entrypointDir, 'go.mod.bk'),
|
||||||
|
join(entrypointDir, 'go.mod'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
await move(
|
||||||
|
join(entrypointDir, 'go.sum.bk'),
|
||||||
|
join(entrypointDir, 'go.sum'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Tidy `go.mod` file...');
|
||||||
|
try {
|
||||||
|
// ensure go.mod up-to-date
|
||||||
|
await go.mod();
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go mod tidy`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Running `go build`...');
|
||||||
|
const destPath = join(outDir, 'handler');
|
||||||
|
const isGoModInRootDir = goModPathArr.length === 1;
|
||||||
|
const baseGoModPath = isGoModInRootDir ? entrypointDirname : goModPath;
|
||||||
|
try {
|
||||||
|
let src = [join(baseGoModPath, mainModGoFileName)];
|
||||||
|
|
||||||
|
await go.build(src, destPath, config.ldsflags);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go build`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
if (meta.isDev) {
|
||||||
|
// caching for `now dev`
|
||||||
|
await move(
|
||||||
|
join(baseGoModPath, 'go.mod'),
|
||||||
|
join(baseGoModPath, 'go.mod.bk'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
await move(
|
||||||
|
join(baseGoModPath, 'go.sum'),
|
||||||
|
join(baseGoModPath, 'go.sum.bk'),
|
||||||
|
{ overwrite: true }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// legacy mode
|
||||||
|
// we need `main.go` in the same dir as the entrypoint,
|
||||||
|
// otherwise `go build` will refuse to build
|
||||||
|
const go = await createGo(
|
||||||
|
goPath,
|
||||||
|
process.platform,
|
||||||
|
process.arch,
|
||||||
|
{
|
||||||
|
cwd: entrypointDirname,
|
||||||
|
},
|
||||||
|
false
|
||||||
|
);
|
||||||
|
const origianlMainGoContents = await readFile(
|
||||||
|
join(__dirname, 'main.go'),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
const mainGoContents = origianlMainGoContents.replace(
|
||||||
|
'__NOW_HANDLER_FUNC_NAME',
|
||||||
|
handlerFunctionName
|
||||||
|
);
|
||||||
|
|
||||||
|
// in order to allow the user to have `main.go`,
|
||||||
|
// we need our `main.go` to be called something else
|
||||||
|
const mainGoFileName = 'main__now__go__.go';
|
||||||
|
|
||||||
|
// Go doesn't like to build files in different directories,
|
||||||
|
// so now we place `main.go` together with the user code
|
||||||
|
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||||
|
|
||||||
|
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||||
|
// and download any packages that aren't part of the stdlib
|
||||||
|
console.log('Running `go get`...');
|
||||||
|
try {
|
||||||
|
await go.get();
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go get`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Running `go build`...');
|
||||||
|
const destPath = join(outDir, 'handler');
|
||||||
|
try {
|
||||||
|
const src = [
|
||||||
|
join(entrypointDirname, mainGoFileName),
|
||||||
|
downloadedFiles[entrypoint].fsPath,
|
||||||
|
];
|
||||||
|
await go.build(src, destPath);
|
||||||
|
} catch (err) {
|
||||||
|
console.log('failed to `go build`');
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const lambda = await createLambda({
|
||||||
|
files: { ...(await glob('**', outDir)), ...includedFiles },
|
||||||
|
handler: 'handler',
|
||||||
|
runtime: 'go1.x',
|
||||||
|
environment: {},
|
||||||
|
});
|
||||||
|
const output = {
|
||||||
|
[entrypoint]: lambda,
|
||||||
|
};
|
||||||
|
|
||||||
|
let watch = parsedAnalyzed.watch;
|
||||||
|
let watchSub: string[] = [];
|
||||||
|
// if `entrypoint` located in subdirectory
|
||||||
|
// we will need to concat it with return watch array
|
||||||
|
if (entrypointArr.length > 1) {
|
||||||
|
entrypointArr.pop();
|
||||||
|
watchSub = parsedAnalyzed.watch.map(file => join(...entrypointArr, file));
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
output,
|
||||||
|
watch: watch.concat(watchSub),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export { shouldServe };
|
||||||
@@ -1,140 +1,10 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
b64 "encoding/base64"
|
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
|
||||||
"github.com/aws/aws-lambda-go/events"
|
|
||||||
"github.com/aws/aws-lambda-go/lambda"
|
|
||||||
"io/ioutil"
|
|
||||||
"net"
|
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Request struct {
|
|
||||||
Host string `json:"host"`
|
|
||||||
Path string `json:"path"`
|
|
||||||
Method string `json`
|
|
||||||
Headers map[string]string `json:"headers"`
|
|
||||||
Encoding string `json"encoding,omitempty"`
|
|
||||||
Body string `json:"body"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type Response struct {
|
|
||||||
StatusCode int `json:"statusCode"`
|
|
||||||
Headers map[string]string `json:"headers"`
|
|
||||||
Encoding string `json:"encoding,omitemtpy"`
|
|
||||||
Body string `json:"body"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ResponseError struct {
|
|
||||||
Code string `json:"code"`
|
|
||||||
Message string `json:"message"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type ResponseErrorWrapper struct {
|
|
||||||
Error ResponseError `json:"error"`
|
|
||||||
}
|
|
||||||
|
|
||||||
func createErrorResponse(message string, code string, statusCode int) (Response, error) {
|
|
||||||
obj := ResponseErrorWrapper{
|
|
||||||
Error: ResponseError{
|
|
||||||
Code: code,
|
|
||||||
Message: message,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
body, _ := json.Marshal(obj)
|
|
||||||
|
|
||||||
return Response{
|
|
||||||
StatusCode: statusCode,
|
|
||||||
Headers: map[string]string{
|
|
||||||
"Content-Type": "application/json",
|
|
||||||
},
|
|
||||||
Body: string(body),
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
l, err := net.Listen("tcp", ":0")
|
now.Start(http.HandlerFunc(__NOW_HANDLER_FUNC_NAME))
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
http.HandleFunc("/", __NOW_HANDLER_FUNC_NAME)
|
|
||||||
go http.Serve(l, nil)
|
|
||||||
|
|
||||||
handler := func(_req events.APIGatewayProxyRequest) (Response, error) {
|
|
||||||
var req Request
|
|
||||||
|
|
||||||
err := json.Unmarshal([]byte(_req.Body), &req)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
return createErrorResponse("Invalid payload", "bad_request", 400)
|
|
||||||
}
|
|
||||||
|
|
||||||
if req.Encoding == "base64" {
|
|
||||||
decoded, _ := b64.StdEncoding.DecodeString(req.Body)
|
|
||||||
req.Body = string(decoded)
|
|
||||||
}
|
|
||||||
|
|
||||||
url := "http://" + l.Addr().String() + req.Path
|
|
||||||
|
|
||||||
internalReq, err := http.NewRequest(req.Method, url, strings.NewReader(req.Body))
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
return createErrorResponse("Bad gateway", "bad_gateway", 502)
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range req.Headers {
|
|
||||||
internalReq.Header.Add(k, v)
|
|
||||||
if strings.ToLower(k) == "host" {
|
|
||||||
req.Host = v
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
client := &http.Client{}
|
|
||||||
internalRes, err := client.Do(internalReq)
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
return createErrorResponse("Bad gateway", "bad_gateway", 502)
|
|
||||||
}
|
|
||||||
defer internalRes.Body.Close()
|
|
||||||
|
|
||||||
resHeaders := make(map[string]string, len(internalRes.Header))
|
|
||||||
var resEncoding string
|
|
||||||
for k, v := range internalRes.Header {
|
|
||||||
// FIXME: support multiple values via concatenating with ','
|
|
||||||
// see RFC 7230, section 3.2.2
|
|
||||||
if strings.ToLower(k) == "x-now-response-encoding" {
|
|
||||||
// we don't want to send this header down
|
|
||||||
resEncoding = v[0]
|
|
||||||
} else {
|
|
||||||
resHeaders[k] = v[0]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
bodyBytes, err := ioutil.ReadAll(internalRes.Body)
|
|
||||||
if err != nil {
|
|
||||||
return createErrorResponse("Bad gateway", "bad_gateway", 502)
|
|
||||||
}
|
|
||||||
|
|
||||||
var resBody string
|
|
||||||
if resEncoding == "base64" {
|
|
||||||
resBody = b64.StdEncoding.EncodeToString(bodyBytes)
|
|
||||||
} else {
|
|
||||||
resBody = string(bodyBytes)
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response{
|
|
||||||
StatusCode: internalRes.StatusCode,
|
|
||||||
Headers: resHeaders,
|
|
||||||
Encoding: resEncoding,
|
|
||||||
Body: resBody,
|
|
||||||
}, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
lambda.Start(handler)
|
|
||||||
}
|
}
|
||||||
|
|||||||
12
packages/now-go/main__mod__.go
Normal file
12
packages/now-go/main__mod__.go
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"__NOW_HANDLER_PACKAGE_NAME"
|
||||||
|
|
||||||
|
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
now.Start(http.HandlerFunc(__NOW_HANDLER_FUNC_NAME))
|
||||||
|
}
|
||||||
@@ -1,29 +1,36 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/go",
|
"name": "@now/go",
|
||||||
"version": "0.2.9",
|
"version": "0.5.1",
|
||||||
|
"license": "MIT",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
|
"directory": "packages/now-go"
|
||||||
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "best -I test/*.js",
|
"build": "tsc",
|
||||||
"prepublish": "./build.sh"
|
"test": "tsc && jest",
|
||||||
|
"prepublish": "tsc"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"bin",
|
"*.js",
|
||||||
"download-go-bin.js",
|
"main.go",
|
||||||
"index.js",
|
"main__mod__.go",
|
||||||
"main.go"
|
"util"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"debug": "^4.1.1",
|
||||||
"execa": "^1.0.0",
|
"execa": "^1.0.0",
|
||||||
"fs-extra": "^7.0.0",
|
"fs-extra": "^7.0.0",
|
||||||
"lambda-git": "^0.1.2",
|
|
||||||
"mkdirp-promise": "5.0.1",
|
|
||||||
"node-fetch": "^2.2.1",
|
"node-fetch": "^2.2.1",
|
||||||
"tar": "4.4.6"
|
"tar": "4.4.6"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@zeit/best": "0.4.3",
|
"@types/debug": "^4.1.3",
|
||||||
"rmfr": "2.0.0"
|
"@types/execa": "^0.9.0",
|
||||||
},
|
"@types/fs-extra": "^5.0.5",
|
||||||
"peerDependencies": {
|
"@types/node-fetch": "^2.3.0",
|
||||||
"@now/build-utils": ">=0.0.1"
|
"@types/tar": "^4.0.0",
|
||||||
|
"typescript": "^3.4.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
13
packages/now-go/test/fixtures/01-cowsay/index.go
vendored
Normal file
13
packages/now-go/test/fixtures/01-cowsay/index.go
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package cowsay
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
say "github.com/dhruvbird/go-cowsay"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, say.Format("cow:RANDOMNESS_PLACEHOLDER"))
|
||||||
|
}
|
||||||
11
packages/now-go/test/fixtures/01-cowsay/now.json
vendored
Normal file
11
packages/now-go/test/fixtures/01-cowsay/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{ "src": "index.go", "use": "@now/go" },
|
||||||
|
{ "src": "subdirectory/index.go", "use": "@now/go" }
|
||||||
|
],
|
||||||
|
"probes": [
|
||||||
|
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||||
|
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
||||||
|
]
|
||||||
|
}
|
||||||
13
packages/now-go/test/fixtures/01-cowsay/subdirectory/index.go
vendored
Normal file
13
packages/now-go/test/fixtures/01-cowsay/subdirectory/index.go
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package subcow
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
say "github.com/dhruvbird/go-cowsay"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, say.Format("subcow:RANDOMNESS_PLACEHOLDER"))
|
||||||
|
}
|
||||||
1
packages/now-go/test/fixtures/03-env-vars/build-env/go.mod
vendored
Normal file
1
packages/now-go/test/fixtures/03-env-vars/build-env/go.mod
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module build-env
|
||||||
17
packages/now-go/test/fixtures/03-env-vars/build-env/index.go
vendored
Normal file
17
packages/now-go/test/fixtures/03-env-vars/build-env/index.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package buildenv
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rdm := os.Getenv("RANDOMNESS_BUILD_ENV")
|
||||||
|
if rdm == "" {
|
||||||
|
fmt.Println("No build env received")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, rdm+":build-env")
|
||||||
|
}
|
||||||
1
packages/now-go/test/fixtures/03-env-vars/env/go.mod
vendored
Normal file
1
packages/now-go/test/fixtures/03-env-vars/env/go.mod
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
module env
|
||||||
17
packages/now-go/test/fixtures/03-env-vars/env/index.go
vendored
Normal file
17
packages/now-go/test/fixtures/03-env-vars/env/index.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
package env
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rdm := os.Getenv("RANDOMNESS_ENV")
|
||||||
|
if rdm == "" {
|
||||||
|
fmt.Println("No env received")
|
||||||
|
}
|
||||||
|
|
||||||
|
fmt.Fprintf(w, rdm)
|
||||||
|
}
|
||||||
18
packages/now-go/test/fixtures/03-env-vars/now.json
vendored
Normal file
18
packages/now-go/test/fixtures/03-env-vars/now.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [
|
||||||
|
{
|
||||||
|
"src": "env/index.go",
|
||||||
|
"use": "@now/go"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"RANDOMNESS_ENV": "RANDOMNESS_PLACEHOLDER"
|
||||||
|
},
|
||||||
|
"probes": [
|
||||||
|
{
|
||||||
|
"path": "/env",
|
||||||
|
"mustContain": "RANDOMNESS_PLACEHOLDER"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
11
packages/now-go/test/fixtures/06-content-type/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/06-content-type/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler function
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
4
packages/now-go/test/fixtures/06-content-type/now.json
vendored
Normal file
4
packages/now-go/test/fixtures/06-content-type/now.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "index.go", "use": "@now/go" }]
|
||||||
|
}
|
||||||
7
packages/now-go/test/fixtures/07-content-length/now.json
vendored
Normal file
7
packages/now-go/test/fixtures/07-content-length/now.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "*.go", "use": "@now/go" }],
|
||||||
|
"env": {
|
||||||
|
"RANDOMNESS_ENV_VAR": "RANDOMNESS_PLACEHOLDER"
|
||||||
|
}
|
||||||
|
}
|
||||||
16
packages/now-go/test/fixtures/07-content-length/test1.go
vendored
Normal file
16
packages/now-go/test/fixtures/07-content-length/test1.go
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
"strconv"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HandlerTest1 function
|
||||||
|
func HandlerTest1(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rdm := os.Getenv("RANDOMNESS_ENV_VAR")
|
||||||
|
|
||||||
|
w.WriteHeader(401)
|
||||||
|
w.Header().Set("content-length", strconv.Itoa(len(rdm+":content-length")))
|
||||||
|
w.Write([]byte(rdm + ":content-length"))
|
||||||
|
}
|
||||||
12
packages/now-go/test/fixtures/07-content-length/test2.go
vendored
Normal file
12
packages/now-go/test/fixtures/07-content-length/test2.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HandlerTest2 function
|
||||||
|
func HandlerTest2(w http.ResponseWriter, r *http.Request) {
|
||||||
|
w.Header().Set("Content-Length", "2")
|
||||||
|
w.WriteHeader(401)
|
||||||
|
w.Write([]byte(""))
|
||||||
|
}
|
||||||
13
packages/now-go/test/fixtures/07-content-length/test3.go
vendored
Normal file
13
packages/now-go/test/fixtures/07-content-length/test3.go
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
|
||||||
|
// HandlerTest3 function
|
||||||
|
func HandlerTest3(w http.ResponseWriter, r *http.Request) {
|
||||||
|
rev := os.Getenv("RANDOMNESS_ENV_VAR")
|
||||||
|
w.WriteHeader(401)
|
||||||
|
w.Write([]byte(rev + ":content-length"))
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user