mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
92 Commits
@now/pytho
...
@now/build
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e186f89cfd | ||
|
|
50cade8bba | ||
|
|
13866e61f6 | ||
|
|
b72f902271 | ||
|
|
159cfe99dd | ||
|
|
1d9a96d104 | ||
|
|
245f846d3e | ||
|
|
c5ef7f3f35 | ||
|
|
ccba15a5aa | ||
|
|
f49aefa8e4 | ||
|
|
d6b36df4ce | ||
|
|
3e4dd10a79 | ||
|
|
73956706bd | ||
|
|
bd8da5360d | ||
|
|
6d5a2a4438 | ||
|
|
c88dc78e33 | ||
|
|
63ac11e9f7 | ||
|
|
1840632729 | ||
|
|
00d8eb0f65 | ||
|
|
3db58ac373 | ||
|
|
92a1720eea | ||
|
|
9abbfbe3f3 | ||
|
|
11ef8aa816 | ||
|
|
3a122ea950 | ||
|
|
737e50630a | ||
|
|
fb27b7b9be | ||
|
|
d1a4aecd2f | ||
|
|
5ef7014ed8 | ||
|
|
0ff2c9950e | ||
|
|
ddcdcdf3e2 | ||
|
|
bfc99f19d2 | ||
|
|
de2c08cfe8 | ||
|
|
9679f07124 | ||
|
|
6ce24d6a4e | ||
|
|
e3e029f5f6 | ||
|
|
89172a6e89 | ||
|
|
e8f1dbaa46 | ||
|
|
16b5b6fdf3 | ||
|
|
3bab29ff76 | ||
|
|
d675d2e668 | ||
|
|
2dda88e676 | ||
|
|
5a0090eb1f | ||
|
|
d438b4ec4e | ||
|
|
f8810fd7e6 | ||
|
|
a642cfea96 | ||
|
|
2daa20a9f2 | ||
|
|
4d5c0c40f0 | ||
|
|
29051681df | ||
|
|
96d5e81538 | ||
|
|
9ba9dd6949 | ||
|
|
b362d57270 | ||
|
|
4ff95e1718 | ||
|
|
ef02bedd4d | ||
|
|
ed68a09c3e | ||
|
|
ac7ae5fc5d | ||
|
|
9727b1f020 | ||
|
|
2dc454f15f | ||
|
|
4463af5c7a | ||
|
|
c00fb37cf6 | ||
|
|
4deb426f9c | ||
|
|
008b04413a | ||
|
|
f177ba46e9 | ||
|
|
c030fce589 | ||
|
|
50a5150bb5 | ||
|
|
0578ccf47e | ||
|
|
e32cd36ded | ||
|
|
6ac0ab121c | ||
|
|
05db2e6a73 | ||
|
|
0b89d30d6c | ||
|
|
8a021c9417 | ||
|
|
f218771382 | ||
|
|
17309291ed | ||
|
|
86300577ae | ||
|
|
f9594e0d61 | ||
|
|
20fd4b2e12 | ||
|
|
718e4d0e0c | ||
|
|
dc3584cd08 | ||
|
|
b41788b241 | ||
|
|
af9a2f9792 | ||
|
|
f8b8e760de | ||
|
|
93d6ec8024 | ||
|
|
7ed6b84056 | ||
|
|
31da488365 | ||
|
|
8eaf05f782 | ||
|
|
9311e90f27 | ||
|
|
c0de970de2 | ||
|
|
465ac2093d | ||
|
|
19ab0e8698 | ||
|
|
02fa98e5e3 | ||
|
|
4aef9d48b0 | ||
|
|
bd2d05344e | ||
|
|
edc7696623 |
@@ -29,14 +29,8 @@ jobs:
|
|||||||
- run:
|
- run:
|
||||||
name: Tests and Coverage
|
name: Tests and Coverage
|
||||||
command: yarn test-coverage
|
command: yarn test-coverage
|
||||||
- run:
|
|
||||||
name: Potentially save npm token
|
|
||||||
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
|
||||||
- run:
|
|
||||||
name: Potentially publish releases to npm
|
|
||||||
command: ./.circleci/publish.sh
|
|
||||||
workflows:
|
workflows:
|
||||||
version: 2
|
version: 2
|
||||||
build-and-deploy:
|
build-and-test:
|
||||||
jobs:
|
jobs:
|
||||||
- build
|
- build
|
||||||
|
|||||||
@@ -1,6 +1,13 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [ -z "$NPM_TOKEN" ]; then
|
||||||
|
echo "NPM_TOKEN not found. Did you forget to assign the GitHub Action secret?"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||||
|
|
||||||
if [ ! -e ~/.npmrc ]; then
|
if [ ! -e ~/.npmrc ]; then
|
||||||
echo "~/.npmrc file does not exist, skipping publish"
|
echo "~/.npmrc file does not exist, skipping publish"
|
||||||
exit 0
|
exit 0
|
||||||
|
|||||||
43
.editorconfig
Normal file
43
.editorconfig
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*]
|
||||||
|
end_of_line = lf
|
||||||
|
charset = utf-8
|
||||||
|
trim_trailing_whitespace = true
|
||||||
|
insert_final_newline = true
|
||||||
|
|
||||||
|
[{*.json,*.json.example,*.gyp,*.yml,*.yaml,*.workflow}]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[{*.py,*.asm}]
|
||||||
|
indent_style = space
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.asm]
|
||||||
|
indent_size = 8
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
trim_trailing_whitespace = false
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
# Ideal settings - some plugins might support these
|
||||||
|
[*.js,*.jsx,*.ts,*.tsx]
|
||||||
|
quote_type = single
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.java,*.go,*.rs,*.php,*.ng,*.d,*.cs,*.swift}]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
tab_width = 4
|
||||||
|
|
||||||
|
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.tsx,*.d,*.cs,*.swift}]
|
||||||
|
curly_bracket_next_line = false
|
||||||
|
spaces_around_operators = true
|
||||||
|
spaces_around_brackets = outside
|
||||||
|
# close enough to 1TB
|
||||||
|
indent_brace_style = K&R
|
||||||
@@ -3,9 +3,15 @@
|
|||||||
/**/node_modules/*
|
/**/node_modules/*
|
||||||
/packages/now-go/go/*
|
/packages/now-go/go/*
|
||||||
/packages/now-build-utils/dist/*
|
/packages/now-build-utils/dist/*
|
||||||
|
/packages/now-build-utils/src/*.js
|
||||||
|
/packages/now-build-utils/src/fs/*.js
|
||||||
/packages/now-node/dist/*
|
/packages/now-node/dist/*
|
||||||
|
/packages/now-layer-node/dist/*
|
||||||
|
/packages/now-layer-npm/dist/*
|
||||||
|
/packages/now-layer-yarn/dist/*
|
||||||
/packages/now-next/dist/*
|
/packages/now-next/dist/*
|
||||||
/packages/now-node-bridge/*
|
/packages/now-node-bridge/*
|
||||||
/packages/now-python/*
|
/packages/now-python/dist/*
|
||||||
/packages/now-optipng/dist/*
|
/packages/now-optipng/dist/*
|
||||||
/packages/now-go/*
|
/packages/now-go/*
|
||||||
|
/packages/now-rust/dist/*
|
||||||
|
|||||||
9
.github/CODEOWNERS
vendored
Normal file
9
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# Documentation
|
||||||
|
# https://help.github.com/en/articles/about-code-owners
|
||||||
|
|
||||||
|
* @styfle
|
||||||
|
/packages/now-node @styfle @tootallnate
|
||||||
|
/packages/now-next @timer @dav-is
|
||||||
|
/packages/now-go @styfle @sophearak
|
||||||
|
/packages/now-python @styfle @sophearak
|
||||||
|
/packages/now-rust @styfle @mike-engel @anmonteiro
|
||||||
76
.github/main.workflow
vendored
Normal file
76
.github/main.workflow
vendored
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
workflow "Canary publish" {
|
||||||
|
on = "push"
|
||||||
|
resolves = ["3. Canary yarn run publish"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Canary filter" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
args = "branch canary"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Canary PR not deleted" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
needs = ["0. Canary filter"]
|
||||||
|
args = "not deleted"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "1. Canary yarn install" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["0. Canary PR not deleted"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "install"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "2. Canary yarn run build" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["1. Canary yarn install"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run build"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "3. Canary yarn run publish" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["2. Canary yarn run build"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run publish-from-github"
|
||||||
|
secrets = ["NPM_TOKEN"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
workflow "Master publish" {
|
||||||
|
on = "push"
|
||||||
|
resolves = ["3. Master yarn run publish"]
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Master filter" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
args = "branch master"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "0. Master PR not deleted" {
|
||||||
|
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||||
|
needs = ["0. Master filter"]
|
||||||
|
args = "not deleted"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "1. Master yarn install" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["0. Master PR not deleted"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "install"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "2. Master yarn run build" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["1. Master yarn install"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run build"
|
||||||
|
}
|
||||||
|
|
||||||
|
action "3. Master yarn run publish" {
|
||||||
|
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||||
|
needs = ["2. Master yarn run build"]
|
||||||
|
runs = "yarn"
|
||||||
|
args = "run publish-from-github"
|
||||||
|
secrets = ["NPM_TOKEN"]
|
||||||
|
}
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
|||||||
node_modules
|
node_modules
|
||||||
tmp
|
tmp
|
||||||
target/
|
target/
|
||||||
.next
|
.next
|
||||||
|
coverage
|
||||||
|
*.tgz
|
||||||
|
|||||||
@@ -1,4 +0,0 @@
|
|||||||
{
|
|
||||||
"singleQuote": true,
|
|
||||||
"trailingComma": "es5"
|
|
||||||
}
|
|
||||||
13
README.md
13
README.md
@@ -1,8 +1,11 @@
|
|||||||
# now-builders
|
# now-builders
|
||||||
|
|
||||||
This is the full list of official Builders provided by the ZEIT team.
|
This is a monorepo containing the [Official Builders](https://zeit.co/docs/v2/deployments/builders/overview) provided by the ZEIT team.
|
||||||
|
|
||||||
More details here: https://zeit.co/docs/v2/deployments/builders/overview/
|
There are two branches:
|
||||||
|
|
||||||
|
- canary - published to npm as `canary` dist-tag, eg `@now/node@canary`
|
||||||
|
- master - published to npm as `latest` dist-tag, eg `@now/node@latest`
|
||||||
|
|
||||||
### Publishing to npm
|
### Publishing to npm
|
||||||
|
|
||||||
@@ -20,8 +23,8 @@ For the canary channel use:
|
|||||||
yarn publish-canary
|
yarn publish-canary
|
||||||
```
|
```
|
||||||
|
|
||||||
CircleCI will take care of publishing the updated packages to npm from there.
|
GitHub Actions will take care of publishing the updated packages to npm from there.
|
||||||
|
|
||||||
If for some reason CircleCI fails to publish the npm package, you may do so
|
If for some reason GitHub Actions fails to publish the npm package, you may do so
|
||||||
manually by running `npm publish` from the package directory. Make sure to
|
manually by running `npm publish` from the package directory. Make sure to
|
||||||
include the `--tag canary` parameter if you are publishing a canary release!
|
use `npm publish --tag canary` if you are publishing a canary release!
|
||||||
|
|||||||
@@ -29,12 +29,11 @@ Serverless:
|
|||||||
- No runtime dependencies, meaning smaller lambda functions
|
- No runtime dependencies, meaning smaller lambda functions
|
||||||
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
||||||
|
|
||||||
|
|
||||||
#### Possible Ways to Fix It
|
#### Possible Ways to Fix It
|
||||||
|
|
||||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||||
|
|
||||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install next --save
|
npm install next --save
|
||||||
@@ -46,7 +45,7 @@ npm install next --save
|
|||||||
{
|
{
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"now-build": "next build"
|
"now-build": "next build"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -54,9 +53,9 @@ npm install next --save
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
module.exports = {
|
module.exports = {
|
||||||
target: 'serverless'
|
target: 'serverless',
|
||||||
// Other options are still valid
|
// Other options are still valid
|
||||||
}
|
};
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||||
@@ -70,4 +69,4 @@ module.exports = {
|
|||||||
|
|
||||||
### Useful Links
|
### Useful Links
|
||||||
|
|
||||||
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ npm install next --save
|
|||||||
{
|
{
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"now-build": "next build"
|
"now-build": "next build"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -28,9 +28,9 @@ npm install next --save
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
module.exports = {
|
module.exports = {
|
||||||
target: 'serverless'
|
target: 'serverless',
|
||||||
// Other options
|
// Other options
|
||||||
}
|
};
|
||||||
```
|
```
|
||||||
|
|
||||||
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||||
|
|||||||
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
# `@now/static-build` Failed to detect a server running
|
||||||
|
|
||||||
|
#### Why This Warning Occurred
|
||||||
|
|
||||||
|
When running `now dev`, the `@now/static-build` builder proxies relevant HTTP
|
||||||
|
requests to the server that is created by the `now-dev` script in the
|
||||||
|
`package.json` file.
|
||||||
|
|
||||||
|
In order for `now dev` to know which port the server is running on, the builder
|
||||||
|
is provided a `$PORT` environment variable that the server _must_ bind to. The
|
||||||
|
error "Failed to detect a server running on port" is printed if the builder fails
|
||||||
|
to detect a server listening on that specific port within five minutes.
|
||||||
|
|
||||||
|
#### Possible Ways to Fix It
|
||||||
|
|
||||||
|
Please ensure that your `now-dev` script binds the spawned development server on
|
||||||
|
the provided `$PORT` that the builder expects the server to bind to.
|
||||||
|
|
||||||
|
For example, if you are using Gatsby, your `now-dev` script must use the `-p`
|
||||||
|
(port) option to bind to the `$PORT` specified from the builder:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
...
|
||||||
|
"scripts": {
|
||||||
|
...
|
||||||
|
"now-dev": "gatsby develop -p $PORT"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Consult your static builder program's `--help` or documentation to figure out what
|
||||||
|
the command line flag to bind to a specific port is (in many cases, it is one of:
|
||||||
|
`-p` / `-P` / `--port`).
|
||||||
|
|
||||||
|
### Useful Links
|
||||||
|
|
||||||
|
- [`@now/static-build` Local Development Documentation](https://zeit.co/docs/v2/deployments/official-builders/static-build-now-static-build#local-development)
|
||||||
@@ -1,5 +1,37 @@
|
|||||||
|
const childProcess = require('child_process');
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
const command = 'git diff HEAD~1 --name-only';
|
||||||
|
const diff = childProcess.execSync(command).toString();
|
||||||
|
|
||||||
|
const changed = diff
|
||||||
|
.split('\n')
|
||||||
|
.filter(item => Boolean(item) && item.includes('packages/'))
|
||||||
|
.map(item => path.relative('packages', item).split('/')[0]);
|
||||||
|
|
||||||
|
const matches = [];
|
||||||
|
|
||||||
|
if (changed.length > 0) {
|
||||||
|
console.log('The following packages have changed:');
|
||||||
|
|
||||||
|
changed.map((item) => {
|
||||||
|
matches.push(item);
|
||||||
|
console.log(item);
|
||||||
|
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
matches.push('now-node');
|
||||||
|
console.log(`No packages changed, defaulting to ${matches[0]}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const testMatch = Array.from(new Set(matches)).map(
|
||||||
|
item => `**/${item}/**/?(*.)+(spec|test).[jt]s?(x)`,
|
||||||
|
);
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
testEnvironment: 'node',
|
testEnvironment: 'node',
|
||||||
|
testMatch,
|
||||||
collectCoverageFrom: [
|
collectCoverageFrom: [
|
||||||
'packages/(!test)/**/*.{js,jsx}',
|
'packages/(!test)/**/*.{js,jsx}',
|
||||||
'!**/node_modules/**',
|
'!**/node_modules/**',
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
{
|
{
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"useWorkspaces": true,
|
"useWorkspaces": true,
|
||||||
"packages": [
|
"packages": ["packages/*"],
|
||||||
"packages/*"
|
|
||||||
],
|
|
||||||
"command": {
|
"command": {
|
||||||
"publish": {
|
"publish": {
|
||||||
"npmClient": "npm",
|
"npmClient": "npm",
|
||||||
|
|||||||
11
package.json
11
package.json
@@ -12,8 +12,9 @@
|
|||||||
"scripts": {
|
"scripts": {
|
||||||
"lerna": "lerna",
|
"lerna": "lerna",
|
||||||
"bootstrap": "lerna bootstrap",
|
"bootstrap": "lerna bootstrap",
|
||||||
"publish-stable": "lerna version",
|
"publish-stable": "git checkout master && git pull && lerna version",
|
||||||
"publish-canary": "lerna version prerelease --preid canary",
|
"publish-canary": "git checkout canary && git pull && lerna version prerelease --preid canary",
|
||||||
|
"publish-from-github": "./.circleci/publish.sh",
|
||||||
"build": "./.circleci/build.sh",
|
"build": "./.circleci/build.sh",
|
||||||
"lint": "eslint .",
|
"lint": "eslint .",
|
||||||
"codecov": "codecov",
|
"codecov": "codecov",
|
||||||
@@ -51,6 +52,10 @@
|
|||||||
"lint-staged": "^8.0.4",
|
"lint-staged": "^8.0.4",
|
||||||
"node-fetch": "^2.3.0",
|
"node-fetch": "^2.3.0",
|
||||||
"pre-commit": "^1.2.2",
|
"pre-commit": "^1.2.2",
|
||||||
"prettier": "^1.15.2"
|
"prettier": "1.17.1"
|
||||||
|
},
|
||||||
|
"prettier": {
|
||||||
|
"singleQuote": true,
|
||||||
|
"trailingComma": "es5"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/build-utils",
|
"name": "@now/build-utils",
|
||||||
"version": "0.5.4",
|
"version": "0.5.6",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"types": "./dist/index.d.js",
|
"types": "./dist/index.d.js",
|
||||||
|
|||||||
@@ -31,7 +31,10 @@ class FileFsRef implements File {
|
|||||||
this.fsPath = fsPath;
|
this.fsPath = fsPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
static async fromFsPath({ mode, fsPath }: FileFsRefOptions): Promise<FileFsRef> {
|
static async fromFsPath({
|
||||||
|
mode,
|
||||||
|
fsPath,
|
||||||
|
}: FileFsRefOptions): Promise<FileFsRef> {
|
||||||
let m = mode;
|
let m = mode;
|
||||||
if (!m) {
|
if (!m) {
|
||||||
const stat = await fs.lstat(fsPath);
|
const stat = await fs.lstat(fsPath);
|
||||||
@@ -40,7 +43,11 @@ class FileFsRef implements File {
|
|||||||
return new FileFsRef({ mode: m, fsPath });
|
return new FileFsRef({ mode: m, fsPath });
|
||||||
}
|
}
|
||||||
|
|
||||||
static async fromStream({ mode = 0o100644, stream, fsPath }: FromStreamOptions): Promise<FileFsRef> {
|
static async fromStream({
|
||||||
|
mode = 0o100644,
|
||||||
|
stream,
|
||||||
|
fsPath,
|
||||||
|
}: FromStreamOptions): Promise<FileFsRef> {
|
||||||
assert(typeof mode === 'number');
|
assert(typeof mode === 'number');
|
||||||
assert(typeof stream.pipe === 'function'); // is-stream
|
assert(typeof stream.pipe === 'function'); // is-stream
|
||||||
assert(typeof fsPath === 'string');
|
assert(typeof fsPath === 'string');
|
||||||
@@ -48,7 +55,7 @@ class FileFsRef implements File {
|
|||||||
|
|
||||||
await new Promise<void>((resolve, reject) => {
|
await new Promise<void>((resolve, reject) => {
|
||||||
const dest = fs.createWriteStream(fsPath, {
|
const dest = fs.createWriteStream(fsPath, {
|
||||||
mode: mode & 0o777
|
mode: mode & 0o777,
|
||||||
});
|
});
|
||||||
stream.pipe(dest);
|
stream.pipe(dest);
|
||||||
stream.on('error', reject);
|
stream.on('error', reject);
|
||||||
@@ -72,15 +79,15 @@ class FileFsRef implements File {
|
|||||||
let flag = false;
|
let flag = false;
|
||||||
|
|
||||||
// eslint-disable-next-line consistent-return
|
// eslint-disable-next-line consistent-return
|
||||||
return multiStream((cb) => {
|
return multiStream(cb => {
|
||||||
if (flag) return cb(null, null);
|
if (flag) return cb(null, null);
|
||||||
flag = true;
|
flag = true;
|
||||||
|
|
||||||
this.toStreamAsync()
|
this.toStreamAsync()
|
||||||
.then((stream) => {
|
.then(stream => {
|
||||||
cb(null, stream);
|
cb(null, stream);
|
||||||
})
|
})
|
||||||
.catch((error) => {
|
.catch(error => {
|
||||||
cb(error, null);
|
cb(error, null);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -4,11 +4,11 @@ import { File, Files, Meta } from '../types';
|
|||||||
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
||||||
|
|
||||||
export interface DownloadedFiles {
|
export interface DownloadedFiles {
|
||||||
[filePath: string]: FileFsRef
|
[filePath: string]: FileFsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
const S_IFMT = 61440; /* 0170000 type of file */
|
const S_IFMT = 61440; /* 0170000 type of file */
|
||||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||||
|
|
||||||
export function isSymbolicLink(mode: number): boolean {
|
export function isSymbolicLink(mode: number): boolean {
|
||||||
return (mode & S_IFMT) === S_IFLNK;
|
return (mode & S_IFMT) === S_IFLNK;
|
||||||
@@ -17,9 +17,9 @@ export function isSymbolicLink(mode: number): boolean {
|
|||||||
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||||
const { mode } = file;
|
const { mode } = file;
|
||||||
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
||||||
const [ target ] = await Promise.all([
|
const [target] = await Promise.all([
|
||||||
readlink((file as FileFsRef).fsPath),
|
readlink((file as FileFsRef).fsPath),
|
||||||
mkdirp(path.dirname(fsPath))
|
mkdirp(path.dirname(fsPath)),
|
||||||
]);
|
]);
|
||||||
await symlink(target, fsPath);
|
await symlink(target, fsPath);
|
||||||
return FileFsRef.fromFsPath({ mode, fsPath });
|
return FileFsRef.fromFsPath({ mode, fsPath });
|
||||||
@@ -34,12 +34,25 @@ async function removeFile(basePath: string, fileMatched: string) {
|
|||||||
await remove(file);
|
await remove(file);
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async function download(files: Files, basePath: string, meta?: Meta): Promise<DownloadedFiles> {
|
export default async function download(
|
||||||
|
files: Files,
|
||||||
|
basePath: string,
|
||||||
|
meta?: Meta
|
||||||
|
): Promise<DownloadedFiles> {
|
||||||
|
const { isDev = false, filesChanged = null, filesRemoved = null } =
|
||||||
|
meta || {};
|
||||||
|
|
||||||
|
if (isDev) {
|
||||||
|
// In `now dev`, the `download()` function is a no-op because
|
||||||
|
// the `basePath` matches the `cwd` of the dev server, so the
|
||||||
|
// source files are already available.
|
||||||
|
return files as DownloadedFiles;
|
||||||
|
}
|
||||||
|
|
||||||
const files2: DownloadedFiles = {};
|
const files2: DownloadedFiles = {};
|
||||||
const { filesChanged = null, filesRemoved = null } = meta || {};
|
|
||||||
|
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
Object.keys(files).map(async (name) => {
|
Object.keys(files).map(async name => {
|
||||||
// If the file does not exist anymore, remove it.
|
// If the file does not exist anymore, remove it.
|
||||||
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
||||||
await removeFile(basePath, name);
|
await removeFile(basePath, name);
|
||||||
@@ -55,7 +68,7 @@ export default async function download(files: Files, basePath: string, meta?: Me
|
|||||||
const fsPath = path.join(basePath, name);
|
const fsPath = path.join(basePath, name);
|
||||||
|
|
||||||
files2[name] = await downloadFile(file, fsPath);
|
files2[name] = await downloadFile(file, fsPath);
|
||||||
}),
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
return files2;
|
return files2;
|
||||||
|
|||||||
@@ -8,12 +8,16 @@ import FileFsRef from '../file-fs-ref';
|
|||||||
type GlobOptions = vanillaGlob_.IOptions;
|
type GlobOptions = vanillaGlob_.IOptions;
|
||||||
|
|
||||||
interface FsFiles {
|
interface FsFiles {
|
||||||
[filePath: string]: FileFsRef
|
[filePath: string]: FileFsRef;
|
||||||
}
|
}
|
||||||
|
|
||||||
const vanillaGlob = promisify(vanillaGlob_);
|
const vanillaGlob = promisify(vanillaGlob_);
|
||||||
|
|
||||||
export default async function glob(pattern: string, opts: GlobOptions | string, mountpoint?: string): Promise<FsFiles> {
|
export default async function glob(
|
||||||
|
pattern: string,
|
||||||
|
opts: GlobOptions | string,
|
||||||
|
mountpoint?: string
|
||||||
|
): Promise<FsFiles> {
|
||||||
let options: GlobOptions;
|
let options: GlobOptions;
|
||||||
if (typeof opts === 'string') {
|
if (typeof opts === 'string') {
|
||||||
options = { cwd: opts };
|
options = { cwd: opts };
|
||||||
@@ -23,7 +27,7 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
|||||||
|
|
||||||
if (!options.cwd) {
|
if (!options.cwd) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
'Second argument (basePath) must be specified for names of resulting files',
|
'Second argument (basePath) must be specified for names of resulting files'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -41,11 +45,11 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
|||||||
const files = await vanillaGlob(pattern, options);
|
const files = await vanillaGlob(pattern, options);
|
||||||
|
|
||||||
for (const relativePath of files) {
|
for (const relativePath of files) {
|
||||||
const fsPath = path.join(options.cwd!, relativePath);
|
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||||
assert(
|
assert(
|
||||||
stat,
|
stat,
|
||||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
|
||||||
);
|
);
|
||||||
if (stat.isFile()) {
|
if (stat.isFile()) {
|
||||||
const isSymlink = options.symlinks![fsPath];
|
const isSymlink = options.symlinks![fsPath];
|
||||||
|
|||||||
@@ -7,6 +7,6 @@ export default function rename(files: Files, delegate: Delegate): Files {
|
|||||||
...newFiles,
|
...newFiles,
|
||||||
[delegate(name)]: files[name],
|
[delegate(name)]: files[name],
|
||||||
}),
|
}),
|
||||||
{},
|
{}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -109,7 +109,7 @@ export async function installDependencies(
|
|||||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||||
await spawnAsync(
|
await spawnAsync(
|
||||||
'npm',
|
'npm',
|
||||||
['install'].concat(commandArgs),
|
['install', '--unsafe-perm'].concat(commandArgs),
|
||||||
destPath,
|
destPath,
|
||||||
opts as SpawnOptions
|
opts as SpawnOptions
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -1,26 +1,28 @@
|
|||||||
import eos from 'end-of-stream';
|
import eos from 'end-of-stream';
|
||||||
|
|
||||||
export default function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
export default function streamToBuffer(
|
||||||
|
stream: NodeJS.ReadableStream
|
||||||
|
): Promise<Buffer> {
|
||||||
return new Promise<Buffer>((resolve, reject) => {
|
return new Promise<Buffer>((resolve, reject) => {
|
||||||
const buffers: Buffer[] = [];
|
const buffers: Buffer[] = [];
|
||||||
|
|
||||||
stream.on('data', buffers.push.bind(buffers))
|
stream.on('data', buffers.push.bind(buffers));
|
||||||
|
|
||||||
eos(stream, (err) => {
|
eos(stream, err => {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(err);
|
reject(err);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
switch (buffers.length) {
|
switch (buffers.length) {
|
||||||
case 0:
|
case 0:
|
||||||
resolve(Buffer.allocUnsafe(0));
|
resolve(Buffer.allocUnsafe(0));
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
resolve(buffers[0]);
|
resolve(buffers[0]);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
resolve(Buffer.concat(buffers));
|
resolve(Buffer.concat(buffers));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,34 +1,51 @@
|
|||||||
import FileBlob from './file-blob';
|
import FileBlob from './file-blob';
|
||||||
import FileFsRef from './file-fs-ref';
|
import FileFsRef from './file-fs-ref';
|
||||||
import FileRef from './file-ref';
|
import FileRef from './file-ref';
|
||||||
import { File, Files, AnalyzeOptions, BuildOptions, PrepareCacheOptions, ShouldServeOptions, Meta } from './types';
|
import {
|
||||||
|
File,
|
||||||
|
Files,
|
||||||
|
AnalyzeOptions,
|
||||||
|
BuildOptions,
|
||||||
|
PrepareCacheOptions,
|
||||||
|
ShouldServeOptions,
|
||||||
|
Meta,
|
||||||
|
} from './types';
|
||||||
import { Lambda, createLambda } from './lambda';
|
import { Lambda, createLambda } from './lambda';
|
||||||
import download from './fs/download';
|
import download, { DownloadedFiles } from './fs/download';
|
||||||
import getWriteableDirectory from './fs/get-writable-directory'
|
import getWriteableDirectory from './fs/get-writable-directory';
|
||||||
import glob from './fs/glob';
|
import glob from './fs/glob';
|
||||||
import rename from './fs/rename';
|
import rename from './fs/rename';
|
||||||
import { installDependencies, runPackageJsonScript, runNpmInstall, runShellScript } from './fs/run-user-scripts';
|
import {
|
||||||
|
installDependencies,
|
||||||
|
runPackageJsonScript,
|
||||||
|
runNpmInstall,
|
||||||
|
runShellScript,
|
||||||
|
} from './fs/run-user-scripts';
|
||||||
import streamToBuffer from './fs/stream-to-buffer';
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
import shouldServe from './should-serve';
|
import shouldServe from './should-serve';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
FileBlob,
|
FileBlob,
|
||||||
FileFsRef,
|
FileFsRef,
|
||||||
FileRef,
|
FileRef,
|
||||||
Files,
|
Files,
|
||||||
File,
|
File,
|
||||||
Meta,
|
Meta,
|
||||||
Lambda,
|
Lambda,
|
||||||
createLambda,
|
createLambda,
|
||||||
download,
|
download,
|
||||||
getWriteableDirectory,
|
DownloadedFiles,
|
||||||
glob,
|
getWriteableDirectory,
|
||||||
rename,
|
glob,
|
||||||
installDependencies, runPackageJsonScript, runNpmInstall, runShellScript,
|
rename,
|
||||||
streamToBuffer,
|
installDependencies,
|
||||||
AnalyzeOptions,
|
runPackageJsonScript,
|
||||||
BuildOptions,
|
runNpmInstall,
|
||||||
PrepareCacheOptions,
|
runShellScript,
|
||||||
ShouldServeOptions,
|
streamToBuffer,
|
||||||
shouldServe,
|
AnalyzeOptions,
|
||||||
|
BuildOptions,
|
||||||
|
PrepareCacheOptions,
|
||||||
|
ShouldServeOptions,
|
||||||
|
shouldServe,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -32,9 +32,7 @@ export class Lambda {
|
|||||||
public runtime: string;
|
public runtime: string;
|
||||||
public environment: Environment;
|
public environment: Environment;
|
||||||
|
|
||||||
constructor({
|
constructor({ zipBuffer, handler, runtime, environment }: LambdaOptions) {
|
||||||
zipBuffer, handler, runtime, environment,
|
|
||||||
}: LambdaOptions) {
|
|
||||||
this.type = 'Lambda';
|
this.type = 'Lambda';
|
||||||
this.zipBuffer = zipBuffer;
|
this.zipBuffer = zipBuffer;
|
||||||
this.handler = handler;
|
this.handler = handler;
|
||||||
@@ -47,7 +45,10 @@ const sema = new Sema(10);
|
|||||||
const mtime = new Date(1540000000000);
|
const mtime = new Date(1540000000000);
|
||||||
|
|
||||||
export async function createLambda({
|
export async function createLambda({
|
||||||
files, handler, runtime, environment = {},
|
files,
|
||||||
|
handler,
|
||||||
|
runtime,
|
||||||
|
environment = {},
|
||||||
}: CreateLambdaOptions): Promise<Lambda> {
|
}: CreateLambdaOptions): Promise<Lambda> {
|
||||||
assert(typeof files === 'object', '"files" must be an object');
|
assert(typeof files === 'object', '"files" must be an object');
|
||||||
assert(typeof handler === 'string', '"handler" is not a string');
|
assert(typeof handler === 'string', '"handler" is not a string');
|
||||||
@@ -97,7 +98,9 @@ export async function createZip(files: Files): Promise<Buffer> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
zipFile.end();
|
zipFile.end();
|
||||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
streamToBuffer(zipFile.outputStream)
|
||||||
|
.then(resolve)
|
||||||
|
.catch(reject);
|
||||||
});
|
});
|
||||||
|
|
||||||
return zipBuffer;
|
return zipBuffer;
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import FileFsRef from './file-fs-ref';
|
|||||||
export default function shouldServe({
|
export default function shouldServe({
|
||||||
entrypoint,
|
entrypoint,
|
||||||
files,
|
files,
|
||||||
requestPath
|
requestPath,
|
||||||
}: ShouldServeOptions): boolean {
|
}: ShouldServeOptions): boolean {
|
||||||
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
||||||
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
||||||
@@ -23,5 +23,5 @@ export default function shouldServe({
|
|||||||
}
|
}
|
||||||
|
|
||||||
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
||||||
return Object.hasOwnProperty.call(obj, key)
|
return Object.hasOwnProperty.call(obj, key);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "api/index.js", "use": "@now/node" }],
|
||||||
{ "src": "api/index.js", "use": "@now/node" }
|
|
||||||
],
|
|
||||||
"probes": [
|
"probes": [
|
||||||
{ "path": "/api/index.js", "mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER" }
|
{
|
||||||
|
"path": "/api/index.js",
|
||||||
|
"mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [
|
||||||
{ "src": "index.js", "use": "@now/node", "config": { "maxLambdaSize": "15mb" } }
|
{
|
||||||
],
|
"src": "index.js",
|
||||||
"probes": [
|
"use": "@now/node",
|
||||||
{ "path": "/", "mustContain": "found:RANDOMNESS_PLACEHOLDER" }
|
"config": { "maxLambdaSize": "15mb" }
|
||||||
]
|
}
|
||||||
}
|
],
|
||||||
|
"probes": [{ "path": "/", "mustContain": "found:RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
|
|||||||
@@ -15,10 +15,6 @@
|
|||||||
"strict": true,
|
"strict": true,
|
||||||
"target": "esnext"
|
"target": "esnext"
|
||||||
},
|
},
|
||||||
"include": [
|
"include": ["src/**/*"],
|
||||||
"src/**/*"
|
"exclude": ["node_modules"]
|
||||||
],
|
|
||||||
"exclude": [
|
|
||||||
"node_modules"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,12 +9,13 @@ const { shouldServe } = require('@now/build-utils'); // eslint-disable-line impo
|
|||||||
|
|
||||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||||
|
|
||||||
exports.build = async ({ files, entrypoint }) => {
|
exports.build = async ({
|
||||||
|
workPath, files, entrypoint, meta,
|
||||||
|
}) => {
|
||||||
console.log('downloading files...');
|
console.log('downloading files...');
|
||||||
const srcDir = await getWritableDirectory();
|
|
||||||
const outDir = await getWritableDirectory();
|
const outDir = await getWritableDirectory();
|
||||||
|
|
||||||
await download(files, srcDir);
|
await download(files, workPath, meta);
|
||||||
|
|
||||||
const handlerPath = path.join(__dirname, 'handler');
|
const handlerPath = path.join(__dirname, 'handler');
|
||||||
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
||||||
@@ -24,7 +25,7 @@ exports.build = async ({ files, entrypoint }) => {
|
|||||||
|
|
||||||
// For now only the entrypoint file is copied into the lambda
|
// For now only the entrypoint file is copied into the lambda
|
||||||
await copyFile(
|
await copyFile(
|
||||||
path.join(srcDir, entrypoint),
|
path.join(workPath, entrypoint),
|
||||||
path.join(outDir, entrypoint),
|
path.join(outDir, entrypoint),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/cgi",
|
"name": "@now/cgi",
|
||||||
"version": "0.1.3",
|
"version": "0.1.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ const getGoUrl = (version: string, platform: string, arch: string) => {
|
|||||||
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||||
};
|
};
|
||||||
|
|
||||||
export async function getAnalyzedEntrypoint(filePath: string) {
|
export async function getAnalyzedEntrypoint(filePath: string, modulePath = '') {
|
||||||
debug('Analyzing entrypoint %o', filePath);
|
debug('Analyzing entrypoint %o', filePath);
|
||||||
const bin = join(__dirname, 'analyze');
|
const bin = join(__dirname, 'analyze');
|
||||||
|
|
||||||
@@ -35,7 +35,8 @@ export async function getAnalyzedEntrypoint(filePath: string) {
|
|||||||
await go.build(src, dest);
|
await go.build(src, dest);
|
||||||
}
|
}
|
||||||
|
|
||||||
const args = [filePath];
|
const args = [`-modpath=${modulePath}`, filePath];
|
||||||
|
|
||||||
const analyzed = await execa.stdout(bin, args);
|
const analyzed = await execa.stdout(bin, args);
|
||||||
debug('Analyzed entrypoint %o', analyzed);
|
debug('Analyzed entrypoint %o', analyzed);
|
||||||
return analyzed;
|
return analyzed;
|
||||||
@@ -125,7 +126,15 @@ export async function downloadGo(
|
|||||||
// If we found GOPATH in ENV, or default `Go` path exists
|
// If we found GOPATH in ENV, or default `Go` path exists
|
||||||
// asssume that user have `Go` installed
|
// asssume that user have `Go` installed
|
||||||
if (isUserGo || process.env.GOPATH !== undefined) {
|
if (isUserGo || process.env.GOPATH !== undefined) {
|
||||||
return createGo(dir, platform, arch);
|
const { stdout } = await execa('go', ['version']);
|
||||||
|
|
||||||
|
if (parseInt(stdout.split('.')[1]) >= 11) {
|
||||||
|
return createGo(dir, platform, arch);
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(
|
||||||
|
`Your current ${stdout} doesn't support Go Modules. Please update.`
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
// Check `Go` bin in builder CWD
|
// Check `Go` bin in builder CWD
|
||||||
const isGoExist = await pathExists(join(dir, 'bin'));
|
const isGoExist = await pathExists(join(dir, 'bin'));
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import { join, sep, dirname } from 'path';
|
import { join, sep, dirname, basename } from 'path';
|
||||||
import { readFile, writeFile, pathExists, move } from 'fs-extra';
|
import { readFile, writeFile, pathExists, move } from 'fs-extra';
|
||||||
|
import { homedir } from 'os';
|
||||||
|
import execa from 'execa';
|
||||||
|
|
||||||
import {
|
import {
|
||||||
glob,
|
glob,
|
||||||
@@ -14,6 +16,7 @@ import {
|
|||||||
import { createGo, getAnalyzedEntrypoint } from './go-helpers';
|
import { createGo, getAnalyzedEntrypoint } from './go-helpers';
|
||||||
|
|
||||||
interface Analyzed {
|
interface Analyzed {
|
||||||
|
found?: boolean;
|
||||||
packageName: string;
|
packageName: string;
|
||||||
functionName: string;
|
functionName: string;
|
||||||
watch: string[];
|
watch: string[];
|
||||||
@@ -28,6 +31,18 @@ interface BuildParamsType extends BuildOptions {
|
|||||||
meta: BuildParamsMeta;
|
meta: BuildParamsMeta;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Initialize private git repo for Go Modules
|
||||||
|
async function initPrivateGit(credentials: string) {
|
||||||
|
await execa('git', [
|
||||||
|
'config',
|
||||||
|
'--global',
|
||||||
|
'credential.helper',
|
||||||
|
`store --file ${join(homedir(), '.git-credentials')}`,
|
||||||
|
]);
|
||||||
|
|
||||||
|
await writeFile(join(homedir(), '.git-credentials'), credentials);
|
||||||
|
}
|
||||||
|
|
||||||
export const version = 2;
|
export const version = 2;
|
||||||
|
|
||||||
export const config = {
|
export const config = {
|
||||||
@@ -38,8 +53,14 @@ export async function build({
|
|||||||
files,
|
files,
|
||||||
entrypoint,
|
entrypoint,
|
||||||
config,
|
config,
|
||||||
|
workPath,
|
||||||
meta = {} as BuildParamsMeta,
|
meta = {} as BuildParamsMeta,
|
||||||
}: BuildParamsType) {
|
}: BuildParamsType) {
|
||||||
|
if (process.env.GIT_CREDENTIALS && !meta.isDev) {
|
||||||
|
console.log('Initialize Git credentials...');
|
||||||
|
await initPrivateGit(process.env.GIT_CREDENTIALS);
|
||||||
|
}
|
||||||
|
|
||||||
console.log('Downloading user files...');
|
console.log('Downloading user files...');
|
||||||
const entrypointArr = entrypoint.split(sep);
|
const entrypointArr = entrypoint.split(sep);
|
||||||
|
|
||||||
@@ -48,17 +69,82 @@ export async function build({
|
|||||||
getWriteableDirectory(),
|
getWriteableDirectory(),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
|
const srcPath = join(goPath, 'src', 'lambda');
|
||||||
|
let downloadedFiles;
|
||||||
if (meta.isDev) {
|
if (meta.isDev) {
|
||||||
const devGoPath = `dev${entrypointArr[entrypointArr.length - 1]}`;
|
downloadedFiles = await download(files, workPath, meta);
|
||||||
const goPathArr = goPath.split(sep);
|
} else {
|
||||||
goPathArr.pop();
|
downloadedFiles = await download(files, srcPath);
|
||||||
goPathArr.push(devGoPath);
|
|
||||||
goPath = goPathArr.join(sep);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const srcPath = join(goPath, 'src', 'lambda');
|
console.log(`Parsing AST for "${entrypoint}"`);
|
||||||
const downloadedFiles = await download(files, srcPath);
|
let analyzed: string;
|
||||||
const input = dirname(downloadedFiles[entrypoint].fsPath);
|
try {
|
||||||
|
let goModAbsPathDir = '';
|
||||||
|
for (const file of Object.keys(downloadedFiles)) {
|
||||||
|
if (file === 'go.mod') {
|
||||||
|
goModAbsPathDir = dirname(downloadedFiles[file].fsPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
analyzed = await getAnalyzedEntrypoint(
|
||||||
|
downloadedFiles[entrypoint].fsPath,
|
||||||
|
goModAbsPathDir
|
||||||
|
);
|
||||||
|
} catch (err) {
|
||||||
|
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!analyzed) {
|
||||||
|
const err = new Error(
|
||||||
|
`Could not find an exported function in "${entrypoint}"
|
||||||
|
Learn more: https://zeit.co/docs/v2/deployments/official-builders/go-now-go/#entrypoint
|
||||||
|
`
|
||||||
|
);
|
||||||
|
console.log(err.message);
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
|
||||||
|
const parsedAnalyzed = JSON.parse(analyzed) as Analyzed;
|
||||||
|
|
||||||
|
if (meta.isDev) {
|
||||||
|
const base = dirname(downloadedFiles['now.json'].fsPath);
|
||||||
|
const destNow = join(
|
||||||
|
base,
|
||||||
|
'.now',
|
||||||
|
'cache',
|
||||||
|
basename(entrypoint, '.go'),
|
||||||
|
'src',
|
||||||
|
'lambda'
|
||||||
|
);
|
||||||
|
// this will ensure Go rebuilt fast
|
||||||
|
goPath = join(base, '.now', 'cache', basename(entrypoint, '.go'));
|
||||||
|
await download(downloadedFiles, destNow);
|
||||||
|
|
||||||
|
downloadedFiles = await glob('**', destNow);
|
||||||
|
}
|
||||||
|
|
||||||
|
// find `go.mod` in downloadedFiles
|
||||||
|
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||||
|
let isGoModExist = false;
|
||||||
|
let goModPath = '';
|
||||||
|
let goModPathArr: string[] = [];
|
||||||
|
for (const file of Object.keys(downloadedFiles)) {
|
||||||
|
const fileDirname = dirname(downloadedFiles[file].fsPath);
|
||||||
|
if (file === 'go.mod') {
|
||||||
|
isGoModExist = true;
|
||||||
|
goModPath = fileDirname;
|
||||||
|
goModPathArr = goModPath.split(sep);
|
||||||
|
} else if (file.includes('go.mod')) {
|
||||||
|
isGoModExist = true;
|
||||||
|
if (entrypointDirname === fileDirname) {
|
||||||
|
goModPath = fileDirname;
|
||||||
|
goModPathArr = goModPath.split(sep);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const input = entrypointDirname;
|
||||||
var includedFiles: Files = {};
|
var includedFiles: Files = {};
|
||||||
|
|
||||||
if (config && config.includeFiles) {
|
if (config && config.includeFiles) {
|
||||||
@@ -70,37 +156,19 @@ export async function build({
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Parsing AST for "${entrypoint}"`);
|
|
||||||
let analyzed: string;
|
|
||||||
try {
|
|
||||||
analyzed = await getAnalyzedEntrypoint(downloadedFiles[entrypoint].fsPath);
|
|
||||||
} catch (err) {
|
|
||||||
console.log(`Failed to parse AST for "${entrypoint}"`);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!analyzed) {
|
|
||||||
const err = new Error(
|
|
||||||
`Could not find an exported function in "${entrypoint}"`
|
|
||||||
);
|
|
||||||
console.log(err.message);
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
|
|
||||||
const parsedAnalyzed = JSON.parse(analyzed) as Analyzed;
|
|
||||||
|
|
||||||
const handlerFunctionName = parsedAnalyzed.functionName;
|
const handlerFunctionName = parsedAnalyzed.functionName;
|
||||||
console.log(
|
console.log(
|
||||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`
|
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`
|
||||||
);
|
);
|
||||||
|
|
||||||
// we need `main.go` in the same dir as the entrypoint,
|
|
||||||
// otherwise `go build` will refuse to build
|
|
||||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
|
||||||
|
|
||||||
// check if package name other than main
|
// check if package name other than main
|
||||||
|
// using `go.mod` way building the handler
|
||||||
const packageName = parsedAnalyzed.packageName;
|
const packageName = parsedAnalyzed.packageName;
|
||||||
const isGoModExist = await pathExists(join(entrypointDirname, 'go.mod'));
|
|
||||||
|
if (isGoModExist && packageName === 'main') {
|
||||||
|
throw new Error('Please change `package main` to `package handler`');
|
||||||
|
}
|
||||||
|
|
||||||
if (packageName !== 'main') {
|
if (packageName !== 'main') {
|
||||||
const go = await createGo(
|
const go = await createGo(
|
||||||
goPath,
|
goPath,
|
||||||
@@ -132,10 +200,7 @@ export async function build({
|
|||||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||||
|
|
||||||
if (isGoModExist) {
|
if (isGoModExist) {
|
||||||
const goModContents = await readFile(
|
const goModContents = await readFile(join(goModPath, 'go.mod'), 'utf8');
|
||||||
join(entrypointDirname, 'go.mod'),
|
|
||||||
'utf8'
|
|
||||||
);
|
|
||||||
const usrModName = goModContents.split('\n')[0].split(' ')[1];
|
const usrModName = goModContents.split('\n')[0].split(' ')[1];
|
||||||
goPackageName = `${usrModName}/${packageName}`;
|
goPackageName = `${usrModName}/${packageName}`;
|
||||||
}
|
}
|
||||||
@@ -144,11 +209,16 @@ export async function build({
|
|||||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||||
|
|
||||||
// write main__mod__.go
|
if (goModPathArr.length > 1) {
|
||||||
await writeFile(
|
// using `go.mod` path to write main__mod__.go
|
||||||
join(entrypointDirname, mainModGoFileName),
|
await writeFile(join(goModPath, mainModGoFileName), mainModGoContents);
|
||||||
mainModGoContents
|
} else {
|
||||||
);
|
// using `entrypointDirname` to write main__mod__.go
|
||||||
|
await writeFile(
|
||||||
|
join(entrypointDirname, mainModGoFileName),
|
||||||
|
mainModGoContents
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// move user go file to folder
|
// move user go file to folder
|
||||||
try {
|
try {
|
||||||
@@ -169,25 +239,34 @@ export async function build({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
if (
|
||||||
overwrite: forceMove,
|
dirname(downloadedFiles[entrypoint].fsPath) === goModPath ||
|
||||||
});
|
!isGoModExist
|
||||||
|
) {
|
||||||
|
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
||||||
|
overwrite: forceMove,
|
||||||
|
});
|
||||||
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log('failed to move entry to package folder');
|
console.log('failed to move entry to package folder');
|
||||||
throw err;
|
throw err;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (meta.isDev) {
|
if (meta.isDev) {
|
||||||
const isGoModBk = await pathExists(join(entrypointDirname, 'go.mod.bk'));
|
let entrypointDir = entrypointDirname;
|
||||||
|
if (goModPathArr.length > 1) {
|
||||||
|
entrypointDir = goModPath;
|
||||||
|
}
|
||||||
|
const isGoModBk = await pathExists(join(entrypointDir, 'go.mod.bk'));
|
||||||
if (isGoModBk) {
|
if (isGoModBk) {
|
||||||
await move(
|
await move(
|
||||||
join(entrypointDirname, 'go.mod.bk'),
|
join(entrypointDir, 'go.mod.bk'),
|
||||||
join(entrypointDirname, 'go.mod'),
|
join(entrypointDir, 'go.mod'),
|
||||||
{ overwrite: true }
|
{ overwrite: true }
|
||||||
);
|
);
|
||||||
await move(
|
await move(
|
||||||
join(entrypointDirname, 'go.sum.bk'),
|
join(entrypointDir, 'go.sum.bk'),
|
||||||
join(entrypointDirname, 'go.sum'),
|
join(entrypointDir, 'go.sum'),
|
||||||
{ overwrite: true }
|
{ overwrite: true }
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@@ -204,8 +283,11 @@ export async function build({
|
|||||||
|
|
||||||
console.log('Running `go build`...');
|
console.log('Running `go build`...');
|
||||||
const destPath = join(outDir, 'handler');
|
const destPath = join(outDir, 'handler');
|
||||||
|
const isGoModInRootDir = goModPathArr.length === 1;
|
||||||
|
const baseGoModPath = isGoModInRootDir ? entrypointDirname : goModPath;
|
||||||
try {
|
try {
|
||||||
const src = [join(entrypointDirname, mainModGoFileName)];
|
let src = [join(baseGoModPath, mainModGoFileName)];
|
||||||
|
|
||||||
await go.build(src, destPath, config.ldsflags);
|
await go.build(src, destPath, config.ldsflags);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.log('failed to `go build`');
|
console.log('failed to `go build`');
|
||||||
@@ -214,17 +296,20 @@ export async function build({
|
|||||||
if (meta.isDev) {
|
if (meta.isDev) {
|
||||||
// caching for `now dev`
|
// caching for `now dev`
|
||||||
await move(
|
await move(
|
||||||
join(entrypointDirname, 'go.mod'),
|
join(baseGoModPath, 'go.mod'),
|
||||||
join(entrypointDirname, 'go.mod.bk'),
|
join(baseGoModPath, 'go.mod.bk'),
|
||||||
{ overwrite: true }
|
{ overwrite: true }
|
||||||
);
|
);
|
||||||
await move(
|
await move(
|
||||||
join(entrypointDirname, 'go.sum'),
|
join(baseGoModPath, 'go.sum'),
|
||||||
join(entrypointDirname, 'go.sum.bk'),
|
join(baseGoModPath, 'go.sum.bk'),
|
||||||
{ overwrite: true }
|
{ overwrite: true }
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
// legacy mode
|
||||||
|
// we need `main.go` in the same dir as the entrypoint,
|
||||||
|
// otherwise `go build` will refuse to build
|
||||||
const go = await createGo(
|
const go = await createGo(
|
||||||
goPath,
|
goPath,
|
||||||
process.platform,
|
process.platform,
|
||||||
@@ -286,16 +371,17 @@ export async function build({
|
|||||||
};
|
};
|
||||||
|
|
||||||
let watch = parsedAnalyzed.watch;
|
let watch = parsedAnalyzed.watch;
|
||||||
|
let watchSub: string[] = [];
|
||||||
// if `entrypoint` located in subdirectory
|
// if `entrypoint` located in subdirectory
|
||||||
// we will need to concat it with return watch array
|
// we will need to concat it with return watch array
|
||||||
if (entrypointArr.length > 1) {
|
if (entrypointArr.length > 1) {
|
||||||
entrypointArr.pop();
|
entrypointArr.pop();
|
||||||
watch = parsedAnalyzed.watch.map(file => join(...entrypointArr, file));
|
watchSub = parsedAnalyzed.watch.map(file => join(...entrypointArr, file));
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
output,
|
output,
|
||||||
watch,
|
watch: watch.concat(watchSub),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/go",
|
"name": "@now/go",
|
||||||
"version": "0.4.5",
|
"version": "0.5.1",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -8,4 +8,4 @@
|
|||||||
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||||
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -15,4 +15,4 @@
|
|||||||
"mustContain": "RANDOMNESS_PLACEHOLDER"
|
"mustContain": "RANDOMNESS_PLACEHOLDER"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "index.go", "use": "@now/go" }]
|
||||||
{ "src": "index.go", "use": "@now/go" }
|
}
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "*.go", "use": "@now/go" }],
|
||||||
{ "src": "*.go", "use": "@now/go" }
|
|
||||||
],
|
|
||||||
"env": {
|
"env": {
|
||||||
"RANDOMNESS_ENV_VAR": "RANDOMNESS_PLACEHOLDER"
|
"RANDOMNESS_ENV_VAR": "RANDOMNESS_PLACEHOLDER"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,9 +5,7 @@
|
|||||||
"src": "index.go",
|
"src": "index.go",
|
||||||
"use": "@now/go",
|
"use": "@now/go",
|
||||||
"config": {
|
"config": {
|
||||||
"includeFiles": [
|
"includeFiles": ["templates/**"]
|
||||||
"templates/**"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
|||||||
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
package function
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Person struct
|
||||||
|
type Person struct {
|
||||||
|
name string
|
||||||
|
age int
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewPerson struct method
|
||||||
|
func NewPerson(name string, age int) *Person {
|
||||||
|
return &Person{name: name, age: age}
|
||||||
|
}
|
||||||
|
|
||||||
|
// H func
|
||||||
|
func H(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module go-mod
|
||||||
|
|
||||||
|
go 1.12
|
||||||
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package handler
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package api
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
"with-shared/shared"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, shared.Say("RANDOMNESS_PLACEHOLDER"))
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module with-shared
|
||||||
|
|
||||||
|
go 1.12
|
||||||
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "api/*.go", "use": "@now/go" }],
|
||||||
|
"probes": [{ "path": "/api", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||||
|
}
|
||||||
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
package shared
|
||||||
|
|
||||||
|
// Say func
|
||||||
|
func Say(text string) string {
|
||||||
|
return text
|
||||||
|
}
|
||||||
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "**/*.go", "use": "@now/go" }],
|
||||||
|
"probes": [
|
||||||
|
{ "path": "/sub-1", "mustContain": "RANDOMNESS_PLACEHOLDER" },
|
||||||
|
{ "path": "/sub-2", "mustContain": "RANDOMNESS_PLACEHOLDER" }
|
||||||
|
]
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module sub-1
|
||||||
|
|
||||||
|
go 1.12
|
||||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package sub1
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
module sub-2
|
||||||
|
|
||||||
|
go 1.12
|
||||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package sub2
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Handler func
|
||||||
|
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||||
|
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||||
|
}
|
||||||
@@ -26,8 +26,8 @@ for (const fixture of fs.readdirSync(fixturesPath)) {
|
|||||||
await expect(
|
await expect(
|
||||||
testDeployment(
|
testDeployment(
|
||||||
{ builderUrl, buildUtilsUrl },
|
{ builderUrl, buildUtilsUrl },
|
||||||
path.join(fixturesPath, fixture),
|
path.join(fixturesPath, fixture)
|
||||||
),
|
)
|
||||||
).resolves.toBeDefined();
|
).resolves.toBeDefined();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ package main
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/parser"
|
"go/parser"
|
||||||
@@ -10,9 +11,22 @@ import (
|
|||||||
"log"
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ignoredFoldersRegex []*regexp.Regexp
|
||||||
|
|
||||||
|
func init() {
|
||||||
|
ignoredFolders := []string{"vendor", "testdata", ".now"}
|
||||||
|
|
||||||
|
// Build the regex that matches if a path contains the respective ignored folder
|
||||||
|
// The pattern will look like: (.*/)?vendor/.*, which matches every path that contains a vendor folder
|
||||||
|
for _, folder := range ignoredFolders {
|
||||||
|
ignoredFoldersRegex = append(ignoredFoldersRegex, regexp.MustCompile("(.*/)?"+folder+"/.*"))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
type analyze struct {
|
type analyze struct {
|
||||||
PackageName string `json:"packageName"`
|
PackageName string `json:"packageName"`
|
||||||
FuncName string `json:"functionName"`
|
FuncName string `json:"functionName"`
|
||||||
@@ -40,8 +54,9 @@ func visit(files *[]string) filepath.WalkFunc {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// we don't need Dirs, or test files
|
// we don't need Dirs, or test files
|
||||||
// we only want `.go` files
|
// we only want `.go` files. Further, we ignore
|
||||||
if info.IsDir() || itf || filepath.Ext(path) != ".go" {
|
// every file that is in one of the ignored folders.
|
||||||
|
if info.IsDir() || itf || filepath.Ext(path) != ".go" || isInIgnoredFolder(path) {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -50,6 +65,19 @@ func visit(files *[]string) filepath.WalkFunc {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// isInIgnoredFolder checks if the given path is in one of the ignored folders.
|
||||||
|
func isInIgnoredFolder(path string) bool {
|
||||||
|
// Make sure the regex works for Windows paths
|
||||||
|
path = filepath.ToSlash(path)
|
||||||
|
|
||||||
|
for _, pattern := range ignoredFoldersRegex {
|
||||||
|
if pattern.MatchString(path) {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
// return unique file
|
// return unique file
|
||||||
func unique(files []string) []string {
|
func unique(files []string) []string {
|
||||||
encountered := map[string]bool{}
|
encountered := map[string]bool{}
|
||||||
@@ -65,13 +93,13 @@ func unique(files []string) []string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
if len(os.Args) != 2 {
|
if len(os.Args) != 3 {
|
||||||
// Args should have the program name on `0`
|
// Args should have the program name on `0`
|
||||||
// and the file name on `1`
|
// and the file name on `1`
|
||||||
fmt.Println("Wrong number of args; Usage is:\n ./go-analyze file_name.go")
|
fmt.Println("Wrong number of args; Usage is:\n ./go-analyze -modpath=module-path file_name.go")
|
||||||
os.Exit(1)
|
os.Exit(1)
|
||||||
}
|
}
|
||||||
fileName := os.Args[1]
|
fileName := os.Args[2]
|
||||||
rf, err := ioutil.ReadFile(fileName)
|
rf, err := ioutil.ReadFile(fileName)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
@@ -95,6 +123,17 @@ func main() {
|
|||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// looking related packages
|
||||||
|
var modPath string
|
||||||
|
flag.StringVar(&modPath, "modpath", "", "module path")
|
||||||
|
flag.Parse()
|
||||||
|
if len(modPath) > 1 {
|
||||||
|
err = filepath.Walk(modPath, visit(&files))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
for _, file := range files {
|
for _, file := range files {
|
||||||
absFileName, _ := filepath.Abs(fileName)
|
absFileName, _ := filepath.Abs(fileName)
|
||||||
absFile, _ := filepath.Abs(file)
|
absFile, _ := filepath.Abs(file)
|
||||||
@@ -127,7 +166,14 @@ func main() {
|
|||||||
for _, ed := range exportedDecl {
|
for _, ed := range exportedDecl {
|
||||||
if strings.Contains(se, ed) {
|
if strings.Contains(se, ed) {
|
||||||
// find relative path of related file
|
// find relative path of related file
|
||||||
rel, err := filepath.Rel(filepath.Dir(fileName), file)
|
var basePath string
|
||||||
|
if modPath == "" {
|
||||||
|
basePath = filepath.Dir(fileName)
|
||||||
|
} else {
|
||||||
|
basePath = modPath
|
||||||
|
}
|
||||||
|
|
||||||
|
rel, err := filepath.Rel(basePath, file)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
@@ -138,24 +184,32 @@ func main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
parsed := parse(fileName)
|
parsed := parse(fileName)
|
||||||
|
offset := parsed.Pos()
|
||||||
|
reqRep := "*http.Request http.ResponseWriter"
|
||||||
|
|
||||||
for _, decl := range parsed.Decls {
|
for _, decl := range parsed.Decls {
|
||||||
fn, ok := decl.(*ast.FuncDecl)
|
fn, ok := decl.(*ast.FuncDecl)
|
||||||
if !ok {
|
if !ok {
|
||||||
// this declaraction is not a function
|
// this declaration is not a function
|
||||||
// so we're not interested
|
// so we're not interested
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if fn.Name.IsExported() == true {
|
if fn.Name.IsExported() == true {
|
||||||
// we found the first exported function
|
// find a valid `net/http` handler function
|
||||||
// we're done!
|
for _, param := range fn.Type.Params.List {
|
||||||
analyzed := analyze{
|
if strings.Contains(reqRep, string(rf[param.Type.Pos()-offset:param.Type.End()-offset])) {
|
||||||
PackageName: parsed.Name.Name,
|
// we found the first exported function with `net/http`
|
||||||
FuncName: fn.Name.Name,
|
// we're done!
|
||||||
Watch: unique(relatedFiles),
|
analyzed := analyze{
|
||||||
|
PackageName: parsed.Name.Name,
|
||||||
|
FuncName: fn.Name.Name,
|
||||||
|
Watch: unique(relatedFiles),
|
||||||
|
}
|
||||||
|
analyzedJSON, _ := json.Marshal(analyzed)
|
||||||
|
fmt.Print(string(analyzedJSON))
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
json, _ := json.Marshal(analyzed)
|
|
||||||
fmt.Print(string(json))
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/lambda",
|
"name": "@now/lambda",
|
||||||
"version": "0.5.3",
|
"version": "0.5.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -1,11 +1,22 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [
|
||||||
{ "src": "index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } },
|
{
|
||||||
{ "src": "subdirectory/index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } }
|
"src": "index.zip",
|
||||||
|
"use": "@now/lambda",
|
||||||
|
"config": { "handler": "index.handler", "runtime": "nodejs8.10" }
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"src": "subdirectory/index.zip",
|
||||||
|
"use": "@now/lambda",
|
||||||
|
"config": { "handler": "index.handler", "runtime": "nodejs8.10" }
|
||||||
|
}
|
||||||
],
|
],
|
||||||
"probes": [
|
"probes": [
|
||||||
{ "path": "/", "mustContain": "cow:NO_REPLACE_TO_AVOID_CRC_MISMATCH" },
|
{ "path": "/", "mustContain": "cow:NO_REPLACE_TO_AVOID_CRC_MISMATCH" },
|
||||||
{ "path": "/subdirectory/", "mustContain": "yoda:NO_REPLACE_TO_AVOID_CRC_MISMATCH" }
|
{
|
||||||
|
"path": "/subdirectory/",
|
||||||
|
"mustContain": "yoda:NO_REPLACE_TO_AVOID_CRC_MISMATCH"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
1
packages/now-layer-node/.gitignore
vendored
Normal file
1
packages/now-layer-node/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/dist
|
||||||
32
packages/now-layer-node/package.json
Normal file
32
packages/now-layer-node/package.json
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"name": "@now/layer-node",
|
||||||
|
"version": "0.0.2",
|
||||||
|
"main": "./dist/src/index",
|
||||||
|
"license": "MIT",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
|
"directory": "packages/now-layer-node"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"test": "tsc && jest",
|
||||||
|
"prepublishOnly": "tsc"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"fs-extra": "7.0.1",
|
||||||
|
"node-fetch": "2.6.0",
|
||||||
|
"promisepipe": "3.0.0",
|
||||||
|
"stream-to-promise": "2.2.0",
|
||||||
|
"tar": "4.4.6",
|
||||||
|
"yauzl-promise": "2.1.3"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/tar": "4.0.0",
|
||||||
|
"@types/yauzl-promise": "2.1.0",
|
||||||
|
"typescript": "3.3.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
37
packages/now-layer-node/src/index.ts
Normal file
37
packages/now-layer-node/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { tmpdir } from 'os';
|
||||||
|
import { join } from 'path';
|
||||||
|
import { glob, Files } from '@now/build-utils';
|
||||||
|
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||||
|
import { install } from './install';
|
||||||
|
|
||||||
|
interface BuildLayerConfig {
|
||||||
|
runtimeVersion: string;
|
||||||
|
platform: string;
|
||||||
|
arch: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BuildLayerResult {
|
||||||
|
files: Files;
|
||||||
|
entrypoint: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function buildLayer({
|
||||||
|
runtimeVersion,
|
||||||
|
platform,
|
||||||
|
arch,
|
||||||
|
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||||
|
const dir = join(
|
||||||
|
tmpdir(),
|
||||||
|
`now-layer-node-${runtimeVersion}-${platform}-${arch}`
|
||||||
|
);
|
||||||
|
const exists = await pathExists(dir);
|
||||||
|
if (exists) {
|
||||||
|
await remove(dir);
|
||||||
|
}
|
||||||
|
await mkdir(dir);
|
||||||
|
const { entrypoint } = await install(dir, runtimeVersion, platform, arch);
|
||||||
|
const files = await glob('{bin/node,bin/node.exe,include/**}', {
|
||||||
|
cwd: dir,
|
||||||
|
});
|
||||||
|
return { files, entrypoint };
|
||||||
|
}
|
||||||
68
packages/now-layer-node/src/install.ts
Normal file
68
packages/now-layer-node/src/install.ts
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import { basename, join } from 'path';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import { extract } from 'tar';
|
||||||
|
import pipe from 'promisepipe';
|
||||||
|
import { createWriteStream } from 'fs-extra';
|
||||||
|
import { unzip, zipFromFile } from './unzip';
|
||||||
|
|
||||||
|
export async function install(
|
||||||
|
dest: string,
|
||||||
|
version: string,
|
||||||
|
platform: string,
|
||||||
|
arch: string
|
||||||
|
) {
|
||||||
|
const tarballUrl = getUrl(version, platform, arch);
|
||||||
|
console.log('Downloading from ' + tarballUrl);
|
||||||
|
console.log('Downloading to ' + dest);
|
||||||
|
const res = await fetch(tarballUrl);
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`HTTP request failed: ${res.status}`);
|
||||||
|
}
|
||||||
|
let entrypoint: string;
|
||||||
|
if (platform === 'win32') {
|
||||||
|
// Put it in the `bin` dir for consistency with the tarballs
|
||||||
|
const finalDest = join(dest, 'bin');
|
||||||
|
const zipName = basename(tarballUrl);
|
||||||
|
const zipPath = join(dest, zipName);
|
||||||
|
|
||||||
|
await pipe(
|
||||||
|
res.body,
|
||||||
|
createWriteStream(zipPath)
|
||||||
|
);
|
||||||
|
|
||||||
|
const zipFile = await zipFromFile(zipPath);
|
||||||
|
await unzip(zipFile, finalDest, { strip: 1 });
|
||||||
|
entrypoint = join('bin', 'node.exe');
|
||||||
|
} else {
|
||||||
|
const extractStream = extract({ strip: 1, C: dest });
|
||||||
|
if (!extractStream.destroy) {
|
||||||
|
// If there is an error in promisepipe,
|
||||||
|
// it expects a destroy method
|
||||||
|
extractStream.destroy = () => {};
|
||||||
|
}
|
||||||
|
await pipe(
|
||||||
|
res.body,
|
||||||
|
extractStream
|
||||||
|
);
|
||||||
|
entrypoint = join('bin', 'node');
|
||||||
|
}
|
||||||
|
|
||||||
|
return { entrypoint };
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getUrl(
|
||||||
|
version: string,
|
||||||
|
platform: string = process.platform,
|
||||||
|
arch: string = process.arch
|
||||||
|
): string {
|
||||||
|
let ext: string;
|
||||||
|
let plat: string;
|
||||||
|
if (platform === 'win32') {
|
||||||
|
ext = 'zip';
|
||||||
|
plat = 'win';
|
||||||
|
} else {
|
||||||
|
ext = 'tar.gz';
|
||||||
|
plat = platform;
|
||||||
|
}
|
||||||
|
return `https://nodejs.org/dist/v${version}/node-v${version}-${plat}-${arch}.${ext}`;
|
||||||
|
}
|
||||||
96
packages/now-layer-node/src/unzip.ts
Normal file
96
packages/now-layer-node/src/unzip.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import { tmpdir } from 'os';
|
||||||
|
import pipe from 'promisepipe';
|
||||||
|
import { dirname, join } from 'path';
|
||||||
|
import { createWriteStream, mkdirp, symlink, unlink } from 'fs-extra';
|
||||||
|
import streamToPromise from 'stream-to-promise';
|
||||||
|
import {
|
||||||
|
Entry,
|
||||||
|
ZipFile,
|
||||||
|
open as zipFromFile,
|
||||||
|
fromBuffer as zipFromBuffer,
|
||||||
|
} from 'yauzl-promise';
|
||||||
|
|
||||||
|
export { zipFromFile, zipFromBuffer, ZipFile };
|
||||||
|
|
||||||
|
export async function unzipToTemp(
|
||||||
|
data: Buffer | string,
|
||||||
|
tmpDir: string = tmpdir()
|
||||||
|
): Promise<string> {
|
||||||
|
const dir = join(
|
||||||
|
tmpDir,
|
||||||
|
`zeit-fun-${Math.random()
|
||||||
|
.toString(16)
|
||||||
|
.substring(2)}`
|
||||||
|
);
|
||||||
|
let zip: ZipFile;
|
||||||
|
if (Buffer.isBuffer(data)) {
|
||||||
|
zip = await zipFromBuffer(data);
|
||||||
|
} else {
|
||||||
|
zip = await zipFromFile(data);
|
||||||
|
}
|
||||||
|
await unzip(zip, dir);
|
||||||
|
await zip.close();
|
||||||
|
return dir;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UnzipOptions {
|
||||||
|
strip?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function unzip(
|
||||||
|
zipFile: ZipFile,
|
||||||
|
dir: string,
|
||||||
|
opts: UnzipOptions = {}
|
||||||
|
): Promise<void> {
|
||||||
|
let entry: Entry;
|
||||||
|
const strip = opts.strip || 0;
|
||||||
|
while ((entry = await zipFile.readEntry()) !== null) {
|
||||||
|
const fileName =
|
||||||
|
strip === 0
|
||||||
|
? entry.fileName
|
||||||
|
: entry.fileName
|
||||||
|
.split('/')
|
||||||
|
.slice(strip)
|
||||||
|
.join('/');
|
||||||
|
const destPath = join(dir, fileName);
|
||||||
|
if (/\/$/.test(entry.fileName)) {
|
||||||
|
await mkdirp(destPath);
|
||||||
|
} else {
|
||||||
|
const [entryStream] = await Promise.all([
|
||||||
|
entry.openReadStream(),
|
||||||
|
// ensure parent directory exists
|
||||||
|
mkdirp(dirname(destPath)),
|
||||||
|
]);
|
||||||
|
const mode = entry.externalFileAttributes >>> 16;
|
||||||
|
if (isSymbolicLink(mode)) {
|
||||||
|
const linkDest = String(await streamToPromise(entryStream));
|
||||||
|
await symlink(linkDest, destPath);
|
||||||
|
} else {
|
||||||
|
const octal = mode & 4095 /* 07777 */;
|
||||||
|
const modeOctal = ('0000' + octal.toString(8)).slice(-4);
|
||||||
|
const modeVal = parseInt(modeOctal, 8);
|
||||||
|
try {
|
||||||
|
await unlink(destPath);
|
||||||
|
} catch (err) {
|
||||||
|
if (err.code !== 'ENOENT') {
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const destStream = createWriteStream(destPath, {
|
||||||
|
mode: modeVal,
|
||||||
|
});
|
||||||
|
await pipe(
|
||||||
|
entryStream,
|
||||||
|
destStream
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const S_IFMT = 61440; /* 0170000 type of file */
|
||||||
|
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||||
|
|
||||||
|
export function isSymbolicLink(mode: number): boolean {
|
||||||
|
return (mode & S_IFMT) === S_IFLNK;
|
||||||
|
}
|
||||||
54
packages/now-layer-node/test/test.js
Normal file
54
packages/now-layer-node/test/test.js
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
/* global jest, expect, it */
|
||||||
|
jest.setTimeout(30 * 1000);
|
||||||
|
const { buildLayer } = require('../');
|
||||||
|
|
||||||
|
describe('buildLayer', () => {
|
||||||
|
it('should get node 10 and metadata for windows', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '10.16.0',
|
||||||
|
platform: 'win32',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(entrypoint).toBe('bin/node.exe');
|
||||||
|
expect(names.has('bin/node.exe')).toBeTruthy();
|
||||||
|
expect(names.has('bin/npm.cmd')).toBeFalsy();
|
||||||
|
expect(names.has('bin/npx.cmd')).toBeFalsy();
|
||||||
|
expect(names.has('bin/node_modules')).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get node 10 and metadata for macos', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '10.16.0',
|
||||||
|
platform: 'darwin',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(entrypoint).toBe('bin/node');
|
||||||
|
expect(names.has('bin/node')).toBeTruthy();
|
||||||
|
expect(names.has('bin/npm')).toBeFalsy();
|
||||||
|
expect(names.has('bin/npx')).toBeFalsy();
|
||||||
|
expect(names.has('lib/node_modules')).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get node 10 and metadata for linux', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '10.16.0',
|
||||||
|
platform: 'linux',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(entrypoint).toBe('bin/node');
|
||||||
|
expect(names.has('bin/node')).toBeTruthy();
|
||||||
|
expect(names.has('include/node/node.h')).toBeTruthy();
|
||||||
|
expect(names.has('bin/npm')).toBeFalsy();
|
||||||
|
expect(names.has('bin/npx')).toBeFalsy();
|
||||||
|
expect(names.has('lib/node_modules')).toBeFalsy();
|
||||||
|
});
|
||||||
|
});
|
||||||
18
packages/now-layer-node/tsconfig.json
Normal file
18
packages/now-layer-node/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"declaration": false,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"lib": ["esnext"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"noEmitOnError": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"types": ["node"],
|
||||||
|
"strict": true,
|
||||||
|
"target": "esnext"
|
||||||
|
}
|
||||||
|
}
|
||||||
4
packages/now-layer-node/types/promisepipe.ts
Normal file
4
packages/now-layer-node/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
declare module 'promisepipe' {
|
||||||
|
import { Stream } from 'stream';
|
||||||
|
export default function pipe(...args: Stream[]): Promise<void>;
|
||||||
|
}
|
||||||
6
packages/now-layer-node/types/stream-to-promise.ts
Normal file
6
packages/now-layer-node/types/stream-to-promise.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
declare module 'stream-to-promise' {
|
||||||
|
import { Stream } from 'stream';
|
||||||
|
export default function streamToPromise(
|
||||||
|
stream: NodeJS.ReadableStream
|
||||||
|
): Promise<string>;
|
||||||
|
}
|
||||||
1
packages/now-layer-npm/.gitignore
vendored
Normal file
1
packages/now-layer-npm/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/dist
|
||||||
29
packages/now-layer-npm/package.json
Normal file
29
packages/now-layer-npm/package.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"name": "@now/layer-npm",
|
||||||
|
"version": "0.0.2",
|
||||||
|
"main": "./dist/src/index",
|
||||||
|
"license": "MIT",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
|
"directory": "packages/now-layer-npm"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"test": "tsc && jest",
|
||||||
|
"prepublishOnly": "tsc"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"fs-extra": "7.0.1",
|
||||||
|
"node-fetch": "2.6.0",
|
||||||
|
"promisepipe": "3.0.0",
|
||||||
|
"tar": "4.4.6"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/tar": "4.0.0",
|
||||||
|
"typescript": "3.3.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
37
packages/now-layer-npm/src/index.ts
Normal file
37
packages/now-layer-npm/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { tmpdir } from 'os';
|
||||||
|
import { join } from 'path';
|
||||||
|
import { glob, Files } from '@now/build-utils';
|
||||||
|
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||||
|
import { install } from './install';
|
||||||
|
|
||||||
|
interface BuildLayerConfig {
|
||||||
|
runtimeVersion: string;
|
||||||
|
platform: string;
|
||||||
|
arch: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BuildLayerResult {
|
||||||
|
files: Files;
|
||||||
|
entrypoint: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function buildLayer({
|
||||||
|
runtimeVersion,
|
||||||
|
platform,
|
||||||
|
arch,
|
||||||
|
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||||
|
const dir = join(
|
||||||
|
tmpdir(),
|
||||||
|
`now-layer-npm-${runtimeVersion}-${platform}-${arch}`
|
||||||
|
);
|
||||||
|
const exists = await pathExists(dir);
|
||||||
|
if (exists) {
|
||||||
|
await remove(dir);
|
||||||
|
}
|
||||||
|
await mkdir(dir);
|
||||||
|
const { entrypoint } = await install(dir, runtimeVersion);
|
||||||
|
const files = await glob('{bin/**,lib/**,node_modules/**}', {
|
||||||
|
cwd: dir,
|
||||||
|
});
|
||||||
|
return { files, entrypoint };
|
||||||
|
}
|
||||||
29
packages/now-layer-npm/src/install.ts
Normal file
29
packages/now-layer-npm/src/install.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { join } from 'path';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import { extract } from 'tar';
|
||||||
|
import pipe from 'promisepipe';
|
||||||
|
|
||||||
|
export async function install(dest: string, version: string) {
|
||||||
|
const tarballUrl = `https://registry.npmjs.org/npm/-/npm-${version}.tgz`;
|
||||||
|
console.log('Downloading from ' + tarballUrl);
|
||||||
|
console.log('Downloading to ' + dest);
|
||||||
|
const res = await fetch(tarballUrl);
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`HTTP request failed: ${res.status}`);
|
||||||
|
}
|
||||||
|
const extractStream = extract({ strip: 1, C: dest });
|
||||||
|
if (!extractStream.destroy) {
|
||||||
|
// If there is an error in promisepipe,
|
||||||
|
// it expects a destroy method
|
||||||
|
extractStream.destroy = () => {};
|
||||||
|
}
|
||||||
|
await pipe(
|
||||||
|
res.body,
|
||||||
|
extractStream
|
||||||
|
);
|
||||||
|
|
||||||
|
const pathToManifest = join(dest, 'package.json');
|
||||||
|
const manifest = require(pathToManifest);
|
||||||
|
const entrypoint = manifest.bin.npm;
|
||||||
|
return { entrypoint };
|
||||||
|
}
|
||||||
50
packages/now-layer-npm/test/test.js
Normal file
50
packages/now-layer-npm/test/test.js
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
/* global jest, expect, it */
|
||||||
|
jest.setTimeout(30 * 1000);
|
||||||
|
const { buildLayer } = require('../');
|
||||||
|
|
||||||
|
describe('buildLayer', () => {
|
||||||
|
it('should get npm 6 but not npm for windows', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '6.9.0',
|
||||||
|
platform: 'win32',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(names.has('bin/npm.cmd')).toBeTruthy();
|
||||||
|
expect(names.has('bin/npx.cmd')).toBeTruthy();
|
||||||
|
expect(names.has('README.md')).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get npm 6 but not npm for macos', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '6.9.0',
|
||||||
|
platform: 'darwin',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(names.has('bin/npm')).toBeTruthy();
|
||||||
|
expect(names.has('bin/npx')).toBeTruthy();
|
||||||
|
expect(names.has('README.md')).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get npm 6 but not npm for linux', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '6.9.0',
|
||||||
|
platform: 'linux',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(names.has('bin/npm')).toBeTruthy();
|
||||||
|
expect(names.has('bin/npx')).toBeTruthy();
|
||||||
|
expect(names.has('README.md')).toBeFalsy();
|
||||||
|
});
|
||||||
|
});
|
||||||
18
packages/now-layer-npm/tsconfig.json
Normal file
18
packages/now-layer-npm/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"declaration": false,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"lib": ["esnext"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"noEmitOnError": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"types": ["node"],
|
||||||
|
"strict": true,
|
||||||
|
"target": "esnext"
|
||||||
|
}
|
||||||
|
}
|
||||||
4
packages/now-layer-npm/types/promisepipe.ts
Normal file
4
packages/now-layer-npm/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
declare module 'promisepipe' {
|
||||||
|
import { Stream } from 'stream';
|
||||||
|
export default function pipe(...args: Stream[]): Promise<void>;
|
||||||
|
}
|
||||||
1
packages/now-layer-yarn/.gitignore
vendored
Normal file
1
packages/now-layer-yarn/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
/dist
|
||||||
29
packages/now-layer-yarn/package.json
Normal file
29
packages/now-layer-yarn/package.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"name": "@now/layer-yarn",
|
||||||
|
"version": "0.0.2",
|
||||||
|
"main": "./dist/src/index",
|
||||||
|
"license": "MIT",
|
||||||
|
"files": [
|
||||||
|
"dist"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://github.com/zeit/now-builders.git",
|
||||||
|
"directory": "packages/now-layer-yarn"
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"build": "tsc",
|
||||||
|
"test": "tsc && jest",
|
||||||
|
"prepublishOnly": "tsc"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"fs-extra": "7.0.1",
|
||||||
|
"node-fetch": "2.6.0",
|
||||||
|
"promisepipe": "3.0.0",
|
||||||
|
"tar": "4.4.6"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/tar": "4.0.0",
|
||||||
|
"typescript": "3.3.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
37
packages/now-layer-yarn/src/index.ts
Normal file
37
packages/now-layer-yarn/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
import { tmpdir } from 'os';
|
||||||
|
import { join } from 'path';
|
||||||
|
import { glob, Files } from '@now/build-utils';
|
||||||
|
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||||
|
import { install } from './install';
|
||||||
|
|
||||||
|
interface BuildLayerConfig {
|
||||||
|
runtimeVersion: string;
|
||||||
|
platform: string;
|
||||||
|
arch: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BuildLayerResult {
|
||||||
|
files: Files;
|
||||||
|
entrypoint: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function buildLayer({
|
||||||
|
runtimeVersion,
|
||||||
|
platform,
|
||||||
|
arch,
|
||||||
|
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||||
|
const dir = join(
|
||||||
|
tmpdir(),
|
||||||
|
`now-layer-yarn-${runtimeVersion}-${platform}-${arch}`
|
||||||
|
);
|
||||||
|
const exists = await pathExists(dir);
|
||||||
|
if (exists) {
|
||||||
|
await remove(dir);
|
||||||
|
}
|
||||||
|
await mkdir(dir);
|
||||||
|
const { entrypoint } = await install(dir, runtimeVersion);
|
||||||
|
const files = await glob('{bin/**,lib/**}', {
|
||||||
|
cwd: dir,
|
||||||
|
});
|
||||||
|
return { files, entrypoint };
|
||||||
|
}
|
||||||
29
packages/now-layer-yarn/src/install.ts
Normal file
29
packages/now-layer-yarn/src/install.ts
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import { join } from 'path';
|
||||||
|
import fetch from 'node-fetch';
|
||||||
|
import { extract } from 'tar';
|
||||||
|
import pipe from 'promisepipe';
|
||||||
|
|
||||||
|
export async function install(dest: string, version: string) {
|
||||||
|
const tarballUrl = `https://registry.npmjs.org/yarn/-/yarn-${version}.tgz`;
|
||||||
|
console.log('Downloading from ' + tarballUrl);
|
||||||
|
console.log('Downloading to ' + dest);
|
||||||
|
const res = await fetch(tarballUrl);
|
||||||
|
if (!res.ok) {
|
||||||
|
throw new Error(`HTTP request failed: ${res.status}`);
|
||||||
|
}
|
||||||
|
const extractStream = extract({ strip: 1, C: dest });
|
||||||
|
if (!extractStream.destroy) {
|
||||||
|
// If there is an error in promisepipe,
|
||||||
|
// it expects a destroy method
|
||||||
|
extractStream.destroy = () => {};
|
||||||
|
}
|
||||||
|
await pipe(
|
||||||
|
res.body,
|
||||||
|
extractStream
|
||||||
|
);
|
||||||
|
|
||||||
|
const pathToManifest = join(dest, 'package.json');
|
||||||
|
const manifest = require(pathToManifest);
|
||||||
|
const entrypoint = manifest.bin.yarn;
|
||||||
|
return { entrypoint };
|
||||||
|
}
|
||||||
49
packages/now-layer-yarn/test/test.js
Normal file
49
packages/now-layer-yarn/test/test.js
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
/* global jest, expect, it */
|
||||||
|
jest.setTimeout(30 * 1000);
|
||||||
|
const { buildLayer } = require('../');
|
||||||
|
|
||||||
|
describe('buildLayer', () => {
|
||||||
|
it('should get yarn for windows', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '1.16.0',
|
||||||
|
platform: 'win32',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(entrypoint).toBe('./bin/yarn.js');
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(names.has('bin/yarn.cmd')).toBeTruthy();
|
||||||
|
expect(names.has('lib/cli.js')).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get yarn for macos', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '1.16.0',
|
||||||
|
platform: 'darwin',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(entrypoint).toBe('./bin/yarn.js');
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(names.has('bin/yarn')).toBeTruthy();
|
||||||
|
expect(names.has('lib/cli.js')).toBeTruthy();
|
||||||
|
expect(names.has('README.md')).toBeFalsy();
|
||||||
|
});
|
||||||
|
|
||||||
|
it('should get yarn for linux', async () => {
|
||||||
|
const { files, entrypoint } = await buildLayer({
|
||||||
|
runtimeVersion: '1.16.0',
|
||||||
|
platform: 'linux',
|
||||||
|
arch: 'x64',
|
||||||
|
});
|
||||||
|
const names = new Set(Object.keys(files));
|
||||||
|
expect(names).toBeTruthy();
|
||||||
|
expect(entrypoint).toBe('./bin/yarn.js');
|
||||||
|
expect(names.size).toBeGreaterThan(0);
|
||||||
|
expect(names.has('bin/yarn')).toBeTruthy();
|
||||||
|
expect(names.has('lib/cli.js')).toBeTruthy();
|
||||||
|
expect(names.has('README.md')).toBeFalsy();
|
||||||
|
});
|
||||||
|
});
|
||||||
18
packages/now-layer-yarn/tsconfig.json
Normal file
18
packages/now-layer-yarn/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"declaration": false,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"lib": ["esnext"],
|
||||||
|
"module": "commonjs",
|
||||||
|
"moduleResolution": "node",
|
||||||
|
"noEmitOnError": true,
|
||||||
|
"noFallthroughCasesInSwitch": true,
|
||||||
|
"noImplicitReturns": true,
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": true,
|
||||||
|
"outDir": "dist",
|
||||||
|
"types": ["node"],
|
||||||
|
"strict": true,
|
||||||
|
"target": "esnext"
|
||||||
|
}
|
||||||
|
}
|
||||||
4
packages/now-layer-yarn/types/promisepipe.ts
Normal file
4
packages/now-layer-yarn/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
declare module 'promisepipe' {
|
||||||
|
import { Stream } from 'stream';
|
||||||
|
export default function pipe(...args: Stream[]): Promise<void>;
|
||||||
|
}
|
||||||
@@ -8,9 +8,11 @@ const { runNpmInstall } = require('@now/build-utils/fs/run-user-scripts.js'); //
|
|||||||
|
|
||||||
const writeFile = promisify(fs.writeFile);
|
const writeFile = promisify(fs.writeFile);
|
||||||
|
|
||||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
exports.build = async ({
|
||||||
|
files, entrypoint, workPath, meta,
|
||||||
|
}) => {
|
||||||
console.log('downloading user files...');
|
console.log('downloading user files...');
|
||||||
const downloadedFiles = await download(files, workPath);
|
const downloadedFiles = await download(files, workPath, meta);
|
||||||
console.log('writing package.json...');
|
console.log('writing package.json...');
|
||||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.15' } };
|
const packageJson = { dependencies: { 'mdx-deck': '1.7.15' } };
|
||||||
const packageJsonPath = path.join(workPath, 'package.json');
|
const packageJsonPath = path.join(workPath, 'package.json');
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/mdx-deck",
|
"name": "@now/mdx-deck",
|
||||||
"version": "0.5.3",
|
"version": "0.5.4",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@now/next",
|
"name": "@now/next",
|
||||||
"version": "0.3.1",
|
"version": "0.4.1",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"main": "./dist/index",
|
"main": "./dist/index",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
"directory": "packages/now-next"
|
"directory": "packages/now-next"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@now/node-bridge": "^1.1.2",
|
"@now/node-bridge": "^1.1.4",
|
||||||
"fs-extra": "^7.0.0",
|
"fs-extra": "^7.0.0",
|
||||||
"get-port": "^5.0.0",
|
"get-port": "^5.0.0",
|
||||||
"resolve-from": "^5.0.0",
|
"resolve-from": "^5.0.0",
|
||||||
|
|||||||
@@ -32,10 +32,12 @@ import {
|
|||||||
getRoutes,
|
getRoutes,
|
||||||
includeOnlyEntryDirectory,
|
includeOnlyEntryDirectory,
|
||||||
normalizePackageJson,
|
normalizePackageJson,
|
||||||
onlyStaticDirectory,
|
filesFromDirectory,
|
||||||
stringMap,
|
stringMap,
|
||||||
syncEnvVars,
|
syncEnvVars,
|
||||||
validateEntrypoint,
|
validateEntrypoint,
|
||||||
|
normalizePage,
|
||||||
|
getDynamicRoutes,
|
||||||
} from './utils';
|
} from './utils';
|
||||||
|
|
||||||
interface BuildParamsMeta {
|
interface BuildParamsMeta {
|
||||||
@@ -155,14 +157,13 @@ export const build = async ({
|
|||||||
entrypoint,
|
entrypoint,
|
||||||
meta = {} as BuildParamsMeta,
|
meta = {} as BuildParamsMeta,
|
||||||
}: BuildParamsType): Promise<{
|
}: BuildParamsType): Promise<{
|
||||||
routes?: any[];
|
routes?: ({ src?: string; dest?: string } | { handle: string })[];
|
||||||
output: Files;
|
output: Files;
|
||||||
watch?: string[];
|
watch?: string[];
|
||||||
childProcesses: ChildProcess[];
|
childProcesses: ChildProcess[];
|
||||||
}> => {
|
}> => {
|
||||||
validateEntrypoint(entrypoint);
|
validateEntrypoint(entrypoint);
|
||||||
|
|
||||||
const routes: any[] = [];
|
|
||||||
const entryDirectory = path.dirname(entrypoint);
|
const entryDirectory = path.dirname(entrypoint);
|
||||||
const entryPath = path.join(workPath, entryDirectory);
|
const entryPath = path.join(workPath, entryDirectory);
|
||||||
const dotNext = path.join(entryPath, '.next');
|
const dotNext = path.join(entryPath, '.next');
|
||||||
@@ -191,6 +192,10 @@ export const build = async ({
|
|||||||
console.log(`${name} Installing dependencies...`);
|
console.log(`${name} Installing dependencies...`);
|
||||||
await runNpmInstall(entryPath, ['--prefer-offline']);
|
await runNpmInstall(entryPath, ['--prefer-offline']);
|
||||||
|
|
||||||
|
if (!process.env.NODE_ENV) {
|
||||||
|
process.env.NODE_ENV = 'development';
|
||||||
|
}
|
||||||
|
|
||||||
// The runtime env vars consist of the base `process.env` vars, but with the
|
// The runtime env vars consist of the base `process.env` vars, but with the
|
||||||
// build env vars removed, and the runtime env vars mixed in afterwards
|
// build env vars removed, and the runtime env vars mixed in afterwards
|
||||||
const runtimeEnv: EnvConfig = Object.assign({}, process.env);
|
const runtimeEnv: EnvConfig = Object.assign({}, process.env);
|
||||||
@@ -210,7 +215,13 @@ export const build = async ({
|
|||||||
|
|
||||||
return {
|
return {
|
||||||
output: {},
|
output: {},
|
||||||
routes: getRoutes(entryDirectory, pathsInside, files, urls[entrypoint]),
|
routes: getRoutes(
|
||||||
|
entryPath,
|
||||||
|
entryDirectory,
|
||||||
|
pathsInside,
|
||||||
|
files,
|
||||||
|
urls[entrypoint]
|
||||||
|
),
|
||||||
watch: pathsInside,
|
watch: pathsInside,
|
||||||
childProcesses: childProcess ? [childProcess] : [],
|
childProcesses: childProcess ? [childProcess] : [],
|
||||||
};
|
};
|
||||||
@@ -285,7 +296,10 @@ export const build = async ({
|
|||||||
await unlinkFile(path.join(entryPath, '.npmrc'));
|
await unlinkFile(path.join(entryPath, '.npmrc'));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const exportedPageRoutes: { src: string; dest: string }[] = [];
|
||||||
const lambdas: { [key: string]: Lambda } = {};
|
const lambdas: { [key: string]: Lambda } = {};
|
||||||
|
const staticPages: { [key: string]: FileFsRef } = {};
|
||||||
|
const dynamicPages: string[] = [];
|
||||||
|
|
||||||
if (isLegacy) {
|
if (isLegacy) {
|
||||||
const filesAfterBuild = await glob('**', entryPath);
|
const filesAfterBuild = await glob('**', entryPath);
|
||||||
@@ -310,7 +324,9 @@ export const build = async ({
|
|||||||
file => file.startsWith('node_modules/.cache')
|
file => file.startsWith('node_modules/.cache')
|
||||||
);
|
);
|
||||||
const launcherFiles = {
|
const launcherFiles = {
|
||||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
'now__bridge.js': new FileFsRef({
|
||||||
|
fsPath: require('@now/node-bridge'),
|
||||||
|
}),
|
||||||
};
|
};
|
||||||
const nextFiles: { [key: string]: FileFsRef } = {
|
const nextFiles: { [key: string]: FileFsRef } = {
|
||||||
...nodeModules,
|
...nodeModules,
|
||||||
@@ -372,15 +388,33 @@ export const build = async ({
|
|||||||
} else {
|
} else {
|
||||||
console.log('preparing lambda files...');
|
console.log('preparing lambda files...');
|
||||||
const launcherFiles = {
|
const launcherFiles = {
|
||||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
'now__bridge.js': new FileFsRef({
|
||||||
|
fsPath: require('@now/node-bridge'),
|
||||||
|
}),
|
||||||
'now__launcher.js': new FileFsRef({
|
'now__launcher.js': new FileFsRef({
|
||||||
fsPath: path.join(__dirname, 'launcher.js'),
|
fsPath: path.join(__dirname, 'launcher.js'),
|
||||||
}),
|
}),
|
||||||
};
|
};
|
||||||
const pages = await glob(
|
const pagesDir = path.join(entryPath, '.next', 'serverless', 'pages');
|
||||||
'**/*.js',
|
|
||||||
path.join(entryPath, '.next', 'serverless', 'pages')
|
const pages = await glob('**/*.js', pagesDir);
|
||||||
);
|
const staticPageFiles = await glob('**/*.html', pagesDir);
|
||||||
|
|
||||||
|
Object.keys(staticPageFiles).forEach((page: string) => {
|
||||||
|
const staticRoute = path.join(entryDirectory, page);
|
||||||
|
staticPages[staticRoute] = staticPageFiles[page];
|
||||||
|
|
||||||
|
const pathname = page.replace(/\.html$/, '');
|
||||||
|
|
||||||
|
if (pathname.startsWith('$') || pathname.includes('/$')) {
|
||||||
|
dynamicPages.push(pathname);
|
||||||
|
}
|
||||||
|
|
||||||
|
exportedPageRoutes.push({
|
||||||
|
src: `^${path.join('/', entryDirectory, pathname)}$`,
|
||||||
|
dest: path.join('/', staticRoute),
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
const pageKeys = Object.keys(pages);
|
const pageKeys = Object.keys(pages);
|
||||||
|
|
||||||
@@ -413,12 +447,16 @@ export const build = async ({
|
|||||||
await Promise.all(
|
await Promise.all(
|
||||||
pageKeys.map(async page => {
|
pageKeys.map(async page => {
|
||||||
// These default pages don't have to be handled as they'd always 404
|
// These default pages don't have to be handled as they'd always 404
|
||||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
if (['_app.js', '_document.js'].includes(page)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const pathname = page.replace(/\.js$/, '');
|
const pathname = page.replace(/\.js$/, '');
|
||||||
|
|
||||||
|
if (pathname.startsWith('$') || pathname.includes('/$')) {
|
||||||
|
dynamicPages.push(normalizePage(pathname));
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`Creating lambda for page: "${page}"...`);
|
console.log(`Creating lambda for page: "${page}"...`);
|
||||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||||
files: {
|
files: {
|
||||||
@@ -448,14 +486,61 @@ export const build = async ({
|
|||||||
{}
|
{}
|
||||||
);
|
);
|
||||||
|
|
||||||
const staticDirectoryFiles = onlyStaticDirectory(
|
const entryDirectoryFiles = includeOnlyEntryDirectory(files, entryDirectory);
|
||||||
includeOnlyEntryDirectory(files, entryDirectory),
|
const staticDirectoryFiles = filesFromDirectory(
|
||||||
entryDirectory
|
entryDirectoryFiles,
|
||||||
|
path.join(entryDirectory, 'static')
|
||||||
|
);
|
||||||
|
const publicDirectoryFiles = filesFromDirectory(
|
||||||
|
entryDirectoryFiles,
|
||||||
|
path.join(entryDirectory, 'public')
|
||||||
|
);
|
||||||
|
const publicFiles = Object.keys(publicDirectoryFiles).reduce(
|
||||||
|
(mappedFiles, file) => ({
|
||||||
|
...mappedFiles,
|
||||||
|
[file.replace(/public[/\\]+/, '')]: publicDirectoryFiles[file],
|
||||||
|
}),
|
||||||
|
{}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let dynamicRoutes = getDynamicRoutes(
|
||||||
|
entryPath,
|
||||||
|
entryDirectory,
|
||||||
|
dynamicPages
|
||||||
|
).map(route => {
|
||||||
|
// make sure .html is added to dest for now until
|
||||||
|
// outputting static files to clean routes is available
|
||||||
|
if (staticPages[`${route.dest}.html`]) {
|
||||||
|
route.dest = `${route.dest}.html`;
|
||||||
|
}
|
||||||
|
return route;
|
||||||
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
output: { ...lambdas, ...staticFiles, ...staticDirectoryFiles },
|
output: {
|
||||||
routes: [],
|
...publicFiles,
|
||||||
|
...lambdas,
|
||||||
|
...staticPages,
|
||||||
|
...staticFiles,
|
||||||
|
...staticDirectoryFiles,
|
||||||
|
},
|
||||||
|
routes: [
|
||||||
|
// Static exported pages (.html rewrites)
|
||||||
|
...exportedPageRoutes,
|
||||||
|
// Next.js page lambdas, `static/` folder, reserved assets, and `public/`
|
||||||
|
// folder
|
||||||
|
{ handle: 'filesystem' },
|
||||||
|
// Dynamic routes
|
||||||
|
...dynamicRoutes,
|
||||||
|
...(isLegacy
|
||||||
|
? []
|
||||||
|
: [
|
||||||
|
{
|
||||||
|
src: path.join('/', entryDirectory, '.*'),
|
||||||
|
dest: path.join('/', entryDirectory, '_error'),
|
||||||
|
},
|
||||||
|
]),
|
||||||
|
],
|
||||||
watch: [],
|
watch: [],
|
||||||
childProcesses: [],
|
childProcesses: [],
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -1,12 +1,16 @@
|
|||||||
if (!process.env.NODE_ENV) {
|
if (!process.env.NODE_ENV) {
|
||||||
process.env.NODE_ENV = process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
process.env.NODE_ENV =
|
||||||
|
process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||||
}
|
}
|
||||||
|
|
||||||
const { Server } = require('http');
|
const { Server } = require('http');
|
||||||
const { Bridge } = require('./now__bridge');
|
const { Bridge } = require('./now__bridge');
|
||||||
const page = require('./page');
|
const page = require('./page');
|
||||||
|
|
||||||
const server = new Server(page.render);
|
// page.render is for React rendering
|
||||||
|
// page.default is for /api rendering
|
||||||
|
// page is for module.exports in /api
|
||||||
|
const server = new Server(page.render || page.default || page);
|
||||||
const bridge = new Bridge(server);
|
const bridge = new Bridge(server);
|
||||||
bridge.listen();
|
bridge.listen();
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,8 @@ import url from 'url';
|
|||||||
import { Bridge } from './now__bridge';
|
import { Bridge } from './now__bridge';
|
||||||
|
|
||||||
if (!process.env.NODE_ENV) {
|
if (!process.env.NODE_ENV) {
|
||||||
process.env.NODE_ENV = process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
process.env.NODE_ENV =
|
||||||
|
process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||||
}
|
}
|
||||||
|
|
||||||
const app = next({});
|
const app = next({});
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import fs from 'fs-extra';
|
import fs from 'fs-extra';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
import resolveFrom from 'resolve-from';
|
||||||
import { Files } from '@now/build-utils';
|
import { Files } from '@now/build-utils';
|
||||||
|
|
||||||
type stringMap = { [key: string]: string };
|
type stringMap = { [key: string]: string };
|
||||||
@@ -73,11 +74,11 @@ function excludeLockFiles(files: Files): Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Include the static directory from files
|
* Include only the files from a selected directory
|
||||||
*/
|
*/
|
||||||
function onlyStaticDirectory(files: Files, entryDir: string): Files {
|
function filesFromDirectory(files: Files, dir: string): Files {
|
||||||
function matcher(filePath: string) {
|
function matcher(filePath: string) {
|
||||||
return !filePath.startsWith(path.join(entryDir, 'static'));
|
return !filePath.startsWith(dir.replace(/\\/g, '/'));
|
||||||
}
|
}
|
||||||
|
|
||||||
return excludeFiles(files, matcher);
|
return excludeFiles(files, matcher);
|
||||||
@@ -168,7 +169,18 @@ function getPathsInside(entryDirectory: string, files: Files) {
|
|||||||
return watch;
|
return watch;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function normalizePage(page: string): string {
|
||||||
|
// remove '/index' from the end
|
||||||
|
page = page.replace(/\/index$/, '/');
|
||||||
|
// Resolve on anything that doesn't start with `/`
|
||||||
|
if (!page.startsWith('/')) {
|
||||||
|
page = `/${page}`;
|
||||||
|
}
|
||||||
|
return page;
|
||||||
|
}
|
||||||
|
|
||||||
function getRoutes(
|
function getRoutes(
|
||||||
|
entryPath: string,
|
||||||
entryDirectory: string,
|
entryDirectory: string,
|
||||||
pathsInside: string[],
|
pathsInside: string[],
|
||||||
files: Files,
|
files: Files,
|
||||||
@@ -195,8 +207,10 @@ function getRoutes(
|
|||||||
dest: `${url}/static/$1`,
|
dest: `${url}/static/$1`,
|
||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
const filePaths = Object.keys(filesInside);
|
||||||
|
const dynamicPages = [];
|
||||||
|
|
||||||
for (const file of Object.keys(filesInside)) {
|
for (const file of filePaths) {
|
||||||
const relativePath = path.relative(entryDirectory, file);
|
const relativePath = path.relative(entryDirectory, file);
|
||||||
const isPage = pathIsInside('pages', relativePath);
|
const isPage = pathIsInside('pages', relativePath);
|
||||||
|
|
||||||
@@ -212,6 +226,10 @@ function getRoutes(
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (pageName.startsWith('$') || pageName.includes('/$')) {
|
||||||
|
dynamicPages.push(normalizePage(pageName));
|
||||||
|
}
|
||||||
|
|
||||||
routes.push({
|
routes.push({
|
||||||
src: `${prefix}${pageName}`,
|
src: `${prefix}${pageName}`,
|
||||||
dest: `${url}/${pageName}`,
|
dest: `${url}/${pageName}`,
|
||||||
@@ -227,6 +245,82 @@ function getRoutes(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
routes.push(
|
||||||
|
...getDynamicRoutes(entryPath, entryDirectory, dynamicPages).map(
|
||||||
|
(route: { src: string; dest: string }) => {
|
||||||
|
// convert to make entire RegExp match as one group
|
||||||
|
route.src = route.src.replace('^', '^(').replace('$', ')$');
|
||||||
|
route.dest = `${url}/$1`;
|
||||||
|
return route;
|
||||||
|
}
|
||||||
|
)
|
||||||
|
);
|
||||||
|
|
||||||
|
// Add public folder routes
|
||||||
|
for (const file of filePaths) {
|
||||||
|
const relativePath = path.relative(entryDirectory, file);
|
||||||
|
const isPublic = pathIsInside('public', relativePath);
|
||||||
|
|
||||||
|
if (!isPublic) continue;
|
||||||
|
|
||||||
|
const fileName = path.relative('public', relativePath);
|
||||||
|
const route = {
|
||||||
|
src: `${prefix}${fileName}`,
|
||||||
|
dest: `${url}/${fileName}`,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Only add the route if a page is not already using it
|
||||||
|
if (!routes.some(r => r.src === route.src)) {
|
||||||
|
routes.push(route);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return routes;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDynamicRoutes(
|
||||||
|
entryPath: string,
|
||||||
|
entryDirectory: string,
|
||||||
|
dynamicPages: string[]
|
||||||
|
): { src: string; dest: string }[] {
|
||||||
|
if (!dynamicPages.length) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
let getRouteRegex:
|
||||||
|
| ((pageName: string) => { re: RegExp })
|
||||||
|
| undefined = undefined;
|
||||||
|
|
||||||
|
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
|
||||||
|
|
||||||
|
try {
|
||||||
|
({ getRouteRegex, getSortedRoutes } = require(resolveFrom(
|
||||||
|
entryPath,
|
||||||
|
'next-server/dist/lib/router/utils'
|
||||||
|
)));
|
||||||
|
if (typeof getRouteRegex !== 'function') {
|
||||||
|
getRouteRegex = undefined;
|
||||||
|
}
|
||||||
|
} catch (_) {}
|
||||||
|
|
||||||
|
if (!getRouteRegex || !getSortedRoutes) {
|
||||||
|
throw new Error(
|
||||||
|
'Found usage of dynamic routes but not on a new enough version of Next.js.'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const pageMatchers = getSortedRoutes(dynamicPages).map(pageName => ({
|
||||||
|
pageName,
|
||||||
|
matcher: getRouteRegex!(pageName).re,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const routes: { src: string; dest: string }[] = [];
|
||||||
|
pageMatchers.forEach(pageMatcher => {
|
||||||
|
routes.push({
|
||||||
|
src: pageMatcher.matcher.source,
|
||||||
|
dest: path.join('/', entryDirectory, pageMatcher.pageName),
|
||||||
|
});
|
||||||
|
});
|
||||||
return routes;
|
return routes;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -250,10 +344,11 @@ export {
|
|||||||
includeOnlyEntryDirectory,
|
includeOnlyEntryDirectory,
|
||||||
excludeLockFiles,
|
excludeLockFiles,
|
||||||
normalizePackageJson,
|
normalizePackageJson,
|
||||||
onlyStaticDirectory,
|
filesFromDirectory,
|
||||||
getNextConfig,
|
getNextConfig,
|
||||||
getPathsInside,
|
getPathsInside,
|
||||||
getRoutes,
|
getRoutes,
|
||||||
stringMap,
|
stringMap,
|
||||||
syncEnvVars,
|
syncEnvVars,
|
||||||
|
normalizePage,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -12,10 +12,12 @@ it(
|
|||||||
} = await runBuildLambda(path.join(__dirname, 'standard'));
|
} = await runBuildLambda(path.join(__dirname, 'standard'));
|
||||||
expect(output.index).toBeDefined();
|
expect(output.index).toBeDefined();
|
||||||
const filePaths = Object.keys(output);
|
const filePaths = Object.keys(output);
|
||||||
|
const serverlessError = filePaths.some(filePath => filePath.match(/_error/));
|
||||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||||
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
||||||
expect(hasUnderScoreErrorStaticFile).toBeTruthy();
|
expect(hasUnderScoreErrorStaticFile).toBeTruthy();
|
||||||
|
expect(serverlessError).toBeTruthy();
|
||||||
},
|
},
|
||||||
FOUR_MINUTES,
|
FOUR_MINUTES,
|
||||||
);
|
);
|
||||||
@@ -28,6 +30,7 @@ it(
|
|||||||
} = await runBuildLambda(path.join(__dirname, 'monorepo'));
|
} = await runBuildLambda(path.join(__dirname, 'monorepo'));
|
||||||
expect(output['www/index']).toBeDefined();
|
expect(output['www/index']).toBeDefined();
|
||||||
expect(output['www/static/test.txt']).toBeDefined();
|
expect(output['www/static/test.txt']).toBeDefined();
|
||||||
|
expect(output['www/data.txt']).toBeDefined();
|
||||||
const filePaths = Object.keys(output);
|
const filePaths = Object.keys(output);
|
||||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||||
@@ -96,3 +99,14 @@ it(
|
|||||||
},
|
},
|
||||||
FOUR_MINUTES,
|
FOUR_MINUTES,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
it(
|
||||||
|
'Should build the public-files test',
|
||||||
|
async () => {
|
||||||
|
const {
|
||||||
|
buildResult: { output },
|
||||||
|
} = await runBuildLambda(path.join(__dirname, 'public-files'));
|
||||||
|
expect(output['robots.txt']).toBeDefined();
|
||||||
|
},
|
||||||
|
FOUR_MINUTES,
|
||||||
|
);
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "package.json", "use": "@now/next" }]
|
||||||
{"src": "package.json", "use": "@now/next"}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,4 +3,4 @@
|
|||||||
"isomorphic-unfetch": "latest",
|
"isomorphic-unfetch": "latest",
|
||||||
"next": "7.0.0"
|
"next": "7.0.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "next.config.js", "use": "@now/next" }]
|
||||||
{"src": "next.config.js", "use": "@now/next"}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,4 +2,4 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"next": "^7.0.2"
|
"next": "^7.0.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "next.config.js", "use": "@now/next" }]
|
||||||
{"src": "next.config.js", "use": "@now/next"}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,4 +2,4 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"next": "7.0.2"
|
"next": "7.0.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "www/package.json", "use": "@now/next" }]
|
||||||
{ "src": "www/package.json", "use": "@now/next" }
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1 @@
|
|||||||
|
data
|
||||||
@@ -1,6 +1,4 @@
|
|||||||
{
|
{
|
||||||
"version": 2,
|
"version": 2,
|
||||||
"builds": [
|
"builds": [{ "src": "pages/index.js", "use": "@now/next" }]
|
||||||
{"src": "pages/index.js", "use": "@now/next"}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -0,0 +1,3 @@
|
|||||||
|
module.exports = {
|
||||||
|
target: 'serverless',
|
||||||
|
};
|
||||||
4
packages/now-next/test/integration/public-files/now.json
Normal file
4
packages/now-next/test/integration/public-files/now.json
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "next.config.js", "use": "@now/next" }]
|
||||||
|
}
|
||||||
10
packages/now-next/test/integration/public-files/package.json
Normal file
10
packages/now-next/test/integration/public-files/package.json
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"scripts": {
|
||||||
|
"now-build": "next build"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"next": "8",
|
||||||
|
"react": "16",
|
||||||
|
"react-dom": "16"
|
||||||
|
}
|
||||||
|
}
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user