mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
136 Commits
@now/php@0
...
@now/pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e186f89cfd | ||
|
|
50cade8bba | ||
|
|
13866e61f6 | ||
|
|
b72f902271 | ||
|
|
159cfe99dd | ||
|
|
1d9a96d104 | ||
|
|
245f846d3e | ||
|
|
c5ef7f3f35 | ||
|
|
ccba15a5aa | ||
|
|
f49aefa8e4 | ||
|
|
d6b36df4ce | ||
|
|
3e4dd10a79 | ||
|
|
73956706bd | ||
|
|
bd8da5360d | ||
|
|
6d5a2a4438 | ||
|
|
c88dc78e33 | ||
|
|
63ac11e9f7 | ||
|
|
1840632729 | ||
|
|
00d8eb0f65 | ||
|
|
3db58ac373 | ||
|
|
92a1720eea | ||
|
|
9abbfbe3f3 | ||
|
|
11ef8aa816 | ||
|
|
3a122ea950 | ||
|
|
737e50630a | ||
|
|
fb27b7b9be | ||
|
|
d1a4aecd2f | ||
|
|
5ef7014ed8 | ||
|
|
0ff2c9950e | ||
|
|
ddcdcdf3e2 | ||
|
|
bfc99f19d2 | ||
|
|
de2c08cfe8 | ||
|
|
9679f07124 | ||
|
|
6ce24d6a4e | ||
|
|
e3e029f5f6 | ||
|
|
89172a6e89 | ||
|
|
e8f1dbaa46 | ||
|
|
16b5b6fdf3 | ||
|
|
3bab29ff76 | ||
|
|
d675d2e668 | ||
|
|
2dda88e676 | ||
|
|
5a0090eb1f | ||
|
|
d438b4ec4e | ||
|
|
f8810fd7e6 | ||
|
|
a642cfea96 | ||
|
|
2daa20a9f2 | ||
|
|
4d5c0c40f0 | ||
|
|
29051681df | ||
|
|
96d5e81538 | ||
|
|
9ba9dd6949 | ||
|
|
b362d57270 | ||
|
|
4ff95e1718 | ||
|
|
ef02bedd4d | ||
|
|
ed68a09c3e | ||
|
|
ac7ae5fc5d | ||
|
|
9727b1f020 | ||
|
|
2dc454f15f | ||
|
|
4463af5c7a | ||
|
|
c00fb37cf6 | ||
|
|
4deb426f9c | ||
|
|
008b04413a | ||
|
|
f177ba46e9 | ||
|
|
c030fce589 | ||
|
|
50a5150bb5 | ||
|
|
0578ccf47e | ||
|
|
e32cd36ded | ||
|
|
6ac0ab121c | ||
|
|
05db2e6a73 | ||
|
|
0b89d30d6c | ||
|
|
8a021c9417 | ||
|
|
f218771382 | ||
|
|
17309291ed | ||
|
|
86300577ae | ||
|
|
f9594e0d61 | ||
|
|
20fd4b2e12 | ||
|
|
718e4d0e0c | ||
|
|
dc3584cd08 | ||
|
|
b41788b241 | ||
|
|
af9a2f9792 | ||
|
|
f8b8e760de | ||
|
|
93d6ec8024 | ||
|
|
7ed6b84056 | ||
|
|
31da488365 | ||
|
|
8eaf05f782 | ||
|
|
9311e90f27 | ||
|
|
c0de970de2 | ||
|
|
465ac2093d | ||
|
|
19ab0e8698 | ||
|
|
02fa98e5e3 | ||
|
|
4aef9d48b0 | ||
|
|
bd2d05344e | ||
|
|
edc7696623 | ||
|
|
e2f91094bc | ||
|
|
38dba57378 | ||
|
|
be6a6ba1d7 | ||
|
|
31fb5d9ec8 | ||
|
|
6c8f946a48 | ||
|
|
d59e1b9789 | ||
|
|
2852d3fbc3 | ||
|
|
d0292eb751 | ||
|
|
17bbf69346 | ||
|
|
4fb4229c90 | ||
|
|
03b7586b50 | ||
|
|
a1427866ca | ||
|
|
5f787b8146 | ||
|
|
b03405a665 | ||
|
|
4393dad15a | ||
|
|
b4d604b2e9 | ||
|
|
5fb6e5c0ba | ||
|
|
9d7dd3a713 | ||
|
|
4f867b320d | ||
|
|
c153690104 | ||
|
|
8c1b96edf7 | ||
|
|
15c83a69f7 | ||
|
|
0986de85ee | ||
|
|
94c5d83ccc | ||
|
|
ff49b9d32d | ||
|
|
ec5290dab1 | ||
|
|
4f758ec84e | ||
|
|
7951be156a | ||
|
|
1bafc1d7b7 | ||
|
|
1493101325 | ||
|
|
824b044a96 | ||
|
|
0978be4c3d | ||
|
|
dc832aa6c3 | ||
|
|
8df77fe4fa | ||
|
|
ff413b45fa | ||
|
|
e7befb5dc1 | ||
|
|
b898f82771 | ||
|
|
e6b22cb0df | ||
|
|
cbfe4a133d | ||
|
|
823b78c626 | ||
|
|
03e1255043 | ||
|
|
3373cbca4e | ||
|
|
4fba4b5f67 | ||
|
|
9fcf6da3c1 |
@@ -29,14 +29,8 @@ jobs:
|
||||
- run:
|
||||
name: Tests and Coverage
|
||||
command: yarn test-coverage
|
||||
- run:
|
||||
name: Potentially save npm token
|
||||
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
||||
- run:
|
||||
name: Potentially publish releases to npm
|
||||
command: ./.circleci/publish.sh
|
||||
workflows:
|
||||
version: 2
|
||||
build-and-deploy:
|
||||
build-and-test:
|
||||
jobs:
|
||||
- build
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
if [ -z "$NPM_TOKEN" ]; then
|
||||
echo "NPM_TOKEN not found. Did you forget to assign the GitHub Action secret?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
|
||||
if [ ! -e ~/.npmrc ]; then
|
||||
echo "~/.npmrc file does not exist, skipping publish"
|
||||
exit 0
|
||||
|
||||
43
.editorconfig
Normal file
43
.editorconfig
Normal file
@@ -0,0 +1,43 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[{*.json,*.json.example,*.gyp,*.yml,*.yaml,*.workflow}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[{*.py,*.asm}]
|
||||
indent_style = space
|
||||
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
|
||||
[*.asm]
|
||||
indent_size = 8
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
# Ideal settings - some plugins might support these
|
||||
[*.js,*.jsx,*.ts,*.tsx]
|
||||
quote_type = single
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.java,*.go,*.rs,*.php,*.ng,*.d,*.cs,*.swift}]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
|
||||
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.tsx,*.d,*.cs,*.swift}]
|
||||
curly_bracket_next_line = false
|
||||
spaces_around_operators = true
|
||||
spaces_around_brackets = outside
|
||||
# close enough to 1TB
|
||||
indent_brace_style = K&R
|
||||
@@ -3,8 +3,15 @@
|
||||
/**/node_modules/*
|
||||
/packages/now-go/go/*
|
||||
/packages/now-build-utils/dist/*
|
||||
/packages/now-build-utils/src/*.js
|
||||
/packages/now-build-utils/src/fs/*.js
|
||||
/packages/now-node/dist/*
|
||||
/packages/now-layer-node/dist/*
|
||||
/packages/now-layer-npm/dist/*
|
||||
/packages/now-layer-yarn/dist/*
|
||||
/packages/now-next/dist/*
|
||||
/packages/now-node-bridge/*
|
||||
/packages/now-python/*
|
||||
/packages/now-python/dist/*
|
||||
/packages/now-optipng/dist/*
|
||||
/packages/now-go/*
|
||||
/packages/now-rust/dist/*
|
||||
|
||||
9
.github/CODEOWNERS
vendored
Normal file
9
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# Documentation
|
||||
# https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
* @styfle
|
||||
/packages/now-node @styfle @tootallnate
|
||||
/packages/now-next @timer @dav-is
|
||||
/packages/now-go @styfle @sophearak
|
||||
/packages/now-python @styfle @sophearak
|
||||
/packages/now-rust @styfle @mike-engel @anmonteiro
|
||||
76
.github/main.workflow
vendored
Normal file
76
.github/main.workflow
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
workflow "Canary publish" {
|
||||
on = "push"
|
||||
resolves = ["3. Canary yarn run publish"]
|
||||
}
|
||||
|
||||
action "0. Canary filter" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
args = "branch canary"
|
||||
}
|
||||
|
||||
action "0. Canary PR not deleted" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
needs = ["0. Canary filter"]
|
||||
args = "not deleted"
|
||||
}
|
||||
|
||||
action "1. Canary yarn install" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["0. Canary PR not deleted"]
|
||||
runs = "yarn"
|
||||
args = "install"
|
||||
}
|
||||
|
||||
action "2. Canary yarn run build" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["1. Canary yarn install"]
|
||||
runs = "yarn"
|
||||
args = "run build"
|
||||
}
|
||||
|
||||
action "3. Canary yarn run publish" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["2. Canary yarn run build"]
|
||||
runs = "yarn"
|
||||
args = "run publish-from-github"
|
||||
secrets = ["NPM_TOKEN"]
|
||||
}
|
||||
|
||||
|
||||
workflow "Master publish" {
|
||||
on = "push"
|
||||
resolves = ["3. Master yarn run publish"]
|
||||
}
|
||||
|
||||
action "0. Master filter" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
args = "branch master"
|
||||
}
|
||||
|
||||
action "0. Master PR not deleted" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
needs = ["0. Master filter"]
|
||||
args = "not deleted"
|
||||
}
|
||||
|
||||
action "1. Master yarn install" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["0. Master PR not deleted"]
|
||||
runs = "yarn"
|
||||
args = "install"
|
||||
}
|
||||
|
||||
action "2. Master yarn run build" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["1. Master yarn install"]
|
||||
runs = "yarn"
|
||||
args = "run build"
|
||||
}
|
||||
|
||||
action "3. Master yarn run publish" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["2. Master yarn run build"]
|
||||
runs = "yarn"
|
||||
args = "run publish-from-github"
|
||||
secrets = ["NPM_TOKEN"]
|
||||
}
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
||||
node_modules
|
||||
tmp
|
||||
target/
|
||||
.next
|
||||
.next
|
||||
coverage
|
||||
*.tgz
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
74
CODE_OF_CONDUCT.md
Normal file
74
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,74 @@
|
||||
## Code of Conduct
|
||||
|
||||
### Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to making participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, gender identity and expression, level of experience,
|
||||
nationality, personal appearance, race, religion, or sexual identity and
|
||||
orientation.
|
||||
|
||||
### Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
- Using welcoming and inclusive language
|
||||
- Being respectful of differing viewpoints and experiences
|
||||
- Gracefully accepting constructive criticism
|
||||
- Focusing on what is best for the community
|
||||
- Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
### Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
### Scope
|
||||
|
||||
This Code of Conduct applies both within project spaces and in public spaces
|
||||
when an individual is representing the project or its community. Examples of
|
||||
representing a project or community include using an official project e-mail
|
||||
address, posting via an official social media account, or acting as an appointed
|
||||
representative at an online or offline event. Representation of a project may be
|
||||
further defined and clarified by project maintainers.
|
||||
|
||||
### Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at [abuse@zeit.co](mailto:abuse@zeit.co). All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
### Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at [http://contributor-covenant.org/version/1/4][version]
|
||||
|
||||
[homepage]: http://contributor-covenant.org
|
||||
[version]: http://contributor-covenant.org/version/1/4/
|
||||
13
README.md
13
README.md
@@ -1,8 +1,11 @@
|
||||
# now-builders
|
||||
|
||||
This is the full list of official Builders provided by the ZEIT team.
|
||||
This is a monorepo containing the [Official Builders](https://zeit.co/docs/v2/deployments/builders/overview) provided by the ZEIT team.
|
||||
|
||||
More details here: https://zeit.co/docs/v2/deployments/builders/overview/
|
||||
There are two branches:
|
||||
|
||||
- canary - published to npm as `canary` dist-tag, eg `@now/node@canary`
|
||||
- master - published to npm as `latest` dist-tag, eg `@now/node@latest`
|
||||
|
||||
### Publishing to npm
|
||||
|
||||
@@ -20,8 +23,8 @@ For the canary channel use:
|
||||
yarn publish-canary
|
||||
```
|
||||
|
||||
CircleCI will take care of publishing the updated packages to npm from there.
|
||||
GitHub Actions will take care of publishing the updated packages to npm from there.
|
||||
|
||||
If for some reason CircleCI fails to publish the npm package, you may do so
|
||||
If for some reason GitHub Actions fails to publish the npm package, you may do so
|
||||
manually by running `npm publish` from the package directory. Make sure to
|
||||
include the `--tag canary` parameter if you are publishing a canary release!
|
||||
use `npm publish --tag canary` if you are publishing a canary release!
|
||||
|
||||
@@ -29,12 +29,11 @@ Serverless:
|
||||
- No runtime dependencies, meaning smaller lambda functions
|
||||
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
||||
|
||||
|
||||
#### Possible Ways to Fix It
|
||||
|
||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||
|
||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||
|
||||
```
|
||||
npm install next --save
|
||||
@@ -46,7 +45,7 @@ npm install next --save
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -54,9 +53,9 @@ npm install next --save
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
target: 'serverless'
|
||||
target: 'serverless',
|
||||
// Other options are still valid
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||
@@ -70,4 +69,4 @@ module.exports = {
|
||||
|
||||
### Useful Links
|
||||
|
||||
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||
|
||||
@@ -20,7 +20,7 @@ npm install next --save
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -28,9 +28,9 @@ npm install next --save
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
target: 'serverless'
|
||||
target: 'serverless',
|
||||
// Other options
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||
|
||||
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# `@now/static-build` Failed to detect a server running
|
||||
|
||||
#### Why This Warning Occurred
|
||||
|
||||
When running `now dev`, the `@now/static-build` builder proxies relevant HTTP
|
||||
requests to the server that is created by the `now-dev` script in the
|
||||
`package.json` file.
|
||||
|
||||
In order for `now dev` to know which port the server is running on, the builder
|
||||
is provided a `$PORT` environment variable that the server _must_ bind to. The
|
||||
error "Failed to detect a server running on port" is printed if the builder fails
|
||||
to detect a server listening on that specific port within five minutes.
|
||||
|
||||
#### Possible Ways to Fix It
|
||||
|
||||
Please ensure that your `now-dev` script binds the spawned development server on
|
||||
the provided `$PORT` that the builder expects the server to bind to.
|
||||
|
||||
For example, if you are using Gatsby, your `now-dev` script must use the `-p`
|
||||
(port) option to bind to the `$PORT` specified from the builder:
|
||||
|
||||
```
|
||||
{
|
||||
...
|
||||
"scripts": {
|
||||
...
|
||||
"now-dev": "gatsby develop -p $PORT"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Consult your static builder program's `--help` or documentation to figure out what
|
||||
the command line flag to bind to a specific port is (in many cases, it is one of:
|
||||
`-p` / `-P` / `--port`).
|
||||
|
||||
### Useful Links
|
||||
|
||||
- [`@now/static-build` Local Development Documentation](https://zeit.co/docs/v2/deployments/official-builders/static-build-now-static-build#local-development)
|
||||
@@ -1,5 +1,37 @@
|
||||
const childProcess = require('child_process');
|
||||
const path = require('path');
|
||||
|
||||
const command = 'git diff HEAD~1 --name-only';
|
||||
const diff = childProcess.execSync(command).toString();
|
||||
|
||||
const changed = diff
|
||||
.split('\n')
|
||||
.filter(item => Boolean(item) && item.includes('packages/'))
|
||||
.map(item => path.relative('packages', item).split('/')[0]);
|
||||
|
||||
const matches = [];
|
||||
|
||||
if (changed.length > 0) {
|
||||
console.log('The following packages have changed:');
|
||||
|
||||
changed.map((item) => {
|
||||
matches.push(item);
|
||||
console.log(item);
|
||||
|
||||
return null;
|
||||
});
|
||||
} else {
|
||||
matches.push('now-node');
|
||||
console.log(`No packages changed, defaulting to ${matches[0]}`);
|
||||
}
|
||||
|
||||
const testMatch = Array.from(new Set(matches)).map(
|
||||
item => `**/${item}/**/?(*.)+(spec|test).[jt]s?(x)`,
|
||||
);
|
||||
|
||||
module.exports = {
|
||||
testEnvironment: 'node',
|
||||
testMatch,
|
||||
collectCoverageFrom: [
|
||||
'packages/(!test)/**/*.{js,jsx}',
|
||||
'!**/node_modules/**',
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": [
|
||||
"packages/*"
|
||||
],
|
||||
"packages": ["packages/*"],
|
||||
"command": {
|
||||
"publish": {
|
||||
"npmClient": "npm",
|
||||
|
||||
11
package.json
11
package.json
@@ -12,8 +12,9 @@
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
"bootstrap": "lerna bootstrap",
|
||||
"publish-stable": "lerna version",
|
||||
"publish-canary": "lerna version prerelease --preid canary",
|
||||
"publish-stable": "git checkout master && git pull && lerna version",
|
||||
"publish-canary": "git checkout canary && git pull && lerna version prerelease --preid canary",
|
||||
"publish-from-github": "./.circleci/publish.sh",
|
||||
"build": "./.circleci/build.sh",
|
||||
"lint": "eslint .",
|
||||
"codecov": "codecov",
|
||||
@@ -51,6 +52,10 @@
|
||||
"lint-staged": "^8.0.4",
|
||||
"node-fetch": "^2.3.0",
|
||||
"pre-commit": "^1.2.2",
|
||||
"prettier": "^1.15.2"
|
||||
"prettier": "1.17.1"
|
||||
},
|
||||
"prettier": {
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,13 @@
|
||||
const execa = require('execa');
|
||||
const { join } = require('path');
|
||||
const snakeCase = require('snake-case');
|
||||
const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const {
|
||||
glob,
|
||||
download,
|
||||
createLambda,
|
||||
getWriteableDirectory,
|
||||
shouldServe,
|
||||
} = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '10mb',
|
||||
@@ -15,7 +18,7 @@ exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
exports.build = async ({
|
||||
workPath, files, entrypoint, config,
|
||||
}) => {
|
||||
const srcDir = await getWritableDirectory();
|
||||
const srcDir = await getWriteableDirectory();
|
||||
|
||||
console.log('downloading files...');
|
||||
await download(files, srcDir);
|
||||
@@ -55,3 +58,5 @@ exports.build = async ({
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
|
||||
exports.shouldServe = shouldServe;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/bash",
|
||||
"version": "0.2.0",
|
||||
"version": "0.2.3",
|
||||
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
||||
"main": "index.js",
|
||||
"author": "Nathan Rajlich <nate@zeit.co>",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.5.0",
|
||||
"version": "0.5.6",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -10,8 +10,10 @@
|
||||
"directory": "packages/now-build-utils"
|
||||
},
|
||||
"dependencies": {
|
||||
"@types/cross-spawn": "6.0.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "2.1.4",
|
||||
"cross-spawn": "6.0.5",
|
||||
"end-of-stream": "1.4.1",
|
||||
"fs-extra": "7.0.0",
|
||||
"glob": "7.1.3",
|
||||
|
||||
@@ -31,7 +31,10 @@ class FileFsRef implements File {
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
static async fromFsPath({ mode, fsPath }: FileFsRefOptions): Promise<FileFsRef> {
|
||||
static async fromFsPath({
|
||||
mode,
|
||||
fsPath,
|
||||
}: FileFsRefOptions): Promise<FileFsRef> {
|
||||
let m = mode;
|
||||
if (!m) {
|
||||
const stat = await fs.lstat(fsPath);
|
||||
@@ -40,7 +43,11 @@ class FileFsRef implements File {
|
||||
return new FileFsRef({ mode: m, fsPath });
|
||||
}
|
||||
|
||||
static async fromStream({ mode = 0o100644, stream, fsPath }: FromStreamOptions): Promise<FileFsRef> {
|
||||
static async fromStream({
|
||||
mode = 0o100644,
|
||||
stream,
|
||||
fsPath,
|
||||
}: FromStreamOptions): Promise<FileFsRef> {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
assert(typeof fsPath === 'string');
|
||||
@@ -48,7 +55,7 @@ class FileFsRef implements File {
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const dest = fs.createWriteStream(fsPath, {
|
||||
mode: mode & 0o777
|
||||
mode: mode & 0o777,
|
||||
});
|
||||
stream.pipe(dest);
|
||||
stream.on('error', reject);
|
||||
@@ -72,15 +79,15 @@ class FileFsRef implements File {
|
||||
let flag = false;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return multiStream((cb) => {
|
||||
return multiStream(cb => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
.then(stream => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
.catch(error => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -8,7 +8,6 @@ import { File } from './types';
|
||||
interface FileRefOptions {
|
||||
mode?: number;
|
||||
digest: string;
|
||||
mutable?: boolean;
|
||||
}
|
||||
|
||||
const semaToDownloadFromS3 = new Sema(5);
|
||||
@@ -26,26 +25,29 @@ export default class FileRef implements File {
|
||||
public type: 'FileRef';
|
||||
public mode: number;
|
||||
public digest: string;
|
||||
public mutable: boolean;
|
||||
|
||||
constructor({ mode = 0o100644, digest, mutable = false }: FileRefOptions) {
|
||||
constructor({ mode = 0o100644, digest }: FileRefOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof digest === 'string');
|
||||
assert(typeof mutable === 'boolean');
|
||||
this.type = 'FileRef';
|
||||
this.mode = mode;
|
||||
this.digest = digest;
|
||||
this.mutable = mutable;
|
||||
}
|
||||
|
||||
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||
let url = '';
|
||||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
||||
const digestParts = this.digest.split(':');
|
||||
if (digestParts[0] === 'sha') {
|
||||
url = this.mutable
|
||||
? `https://s3.amazonaws.com/now-files/${digestParts[1]}`
|
||||
: `https://dmmcy0pwk6bqi.cloudfront.net/${digestParts[1]}`;
|
||||
const [digestType, digestHash] = this.digest.split(':');
|
||||
if (digestType === 'sha') {
|
||||
// This CloudFront URL edge caches the `now-files` S3 bucket to prevent
|
||||
// overloading it
|
||||
// `https://now-files.s3.amazonaws.com/${digestHash}`
|
||||
url = `https://dmmcy0pwk6bqi.cloudfront.net/${digestHash}`;
|
||||
} else if (digestType === 'sha+ephemeral') {
|
||||
// This URL is currently only used for cache files that constantly
|
||||
// change. We shouldn't cache it on CloudFront because it'd always be a
|
||||
// MISS.
|
||||
url = `https://now-ephemeral-files.s3.amazonaws.com/${digestHash}`;
|
||||
} else {
|
||||
throw new Error('Expected digest to be sha');
|
||||
}
|
||||
@@ -58,14 +60,14 @@ export default class FileRef implements File {
|
||||
const resp = await fetch(url);
|
||||
if (!resp.ok) {
|
||||
const error = new BailableError(
|
||||
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
||||
`download: ${resp.status} ${resp.statusText} for ${url}`
|
||||
);
|
||||
if (resp.status === 403) error.bail = true;
|
||||
throw error;
|
||||
}
|
||||
return resp.body;
|
||||
},
|
||||
{ factor: 1, retries: 3 },
|
||||
{ factor: 1, retries: 3 }
|
||||
);
|
||||
} finally {
|
||||
// console.timeEnd(`downloading ${url}`);
|
||||
@@ -77,15 +79,15 @@ export default class FileRef implements File {
|
||||
let flag = false;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return multiStream((cb) => {
|
||||
return multiStream(cb => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
.then(stream => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
.catch(error => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,11 +4,11 @@ import { File, Files, Meta } from '../types';
|
||||
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
||||
|
||||
export interface DownloadedFiles {
|
||||
[filePath: string]: FileFsRef
|
||||
[filePath: string]: FileFsRef;
|
||||
}
|
||||
|
||||
const S_IFMT = 61440; /* 0170000 type of file */
|
||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||
const S_IFMT = 61440; /* 0170000 type of file */
|
||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||
|
||||
export function isSymbolicLink(mode: number): boolean {
|
||||
return (mode & S_IFMT) === S_IFLNK;
|
||||
@@ -17,9 +17,9 @@ export function isSymbolicLink(mode: number): boolean {
|
||||
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||
const { mode } = file;
|
||||
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
||||
const [ target ] = await Promise.all([
|
||||
const [target] = await Promise.all([
|
||||
readlink((file as FileFsRef).fsPath),
|
||||
mkdirp(path.dirname(fsPath))
|
||||
mkdirp(path.dirname(fsPath)),
|
||||
]);
|
||||
await symlink(target, fsPath);
|
||||
return FileFsRef.fromFsPath({ mode, fsPath });
|
||||
@@ -34,12 +34,25 @@ async function removeFile(basePath: string, fileMatched: string) {
|
||||
await remove(file);
|
||||
}
|
||||
|
||||
export default async function download(files: Files, basePath: string, meta?: Meta): Promise<DownloadedFiles> {
|
||||
export default async function download(
|
||||
files: Files,
|
||||
basePath: string,
|
||||
meta?: Meta
|
||||
): Promise<DownloadedFiles> {
|
||||
const { isDev = false, filesChanged = null, filesRemoved = null } =
|
||||
meta || {};
|
||||
|
||||
if (isDev) {
|
||||
// In `now dev`, the `download()` function is a no-op because
|
||||
// the `basePath` matches the `cwd` of the dev server, so the
|
||||
// source files are already available.
|
||||
return files as DownloadedFiles;
|
||||
}
|
||||
|
||||
const files2: DownloadedFiles = {};
|
||||
const { filesChanged = null, filesRemoved = null } = meta || {};
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(files).map(async (name) => {
|
||||
Object.keys(files).map(async name => {
|
||||
// If the file does not exist anymore, remove it.
|
||||
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
||||
await removeFile(basePath, name);
|
||||
@@ -55,7 +68,7 @@ export default async function download(files: Files, basePath: string, meta?: Me
|
||||
const fsPath = path.join(basePath, name);
|
||||
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
||||
return files2;
|
||||
|
||||
@@ -8,12 +8,16 @@ import FileFsRef from '../file-fs-ref';
|
||||
type GlobOptions = vanillaGlob_.IOptions;
|
||||
|
||||
interface FsFiles {
|
||||
[filePath: string]: FileFsRef
|
||||
[filePath: string]: FileFsRef;
|
||||
}
|
||||
|
||||
const vanillaGlob = promisify(vanillaGlob_);
|
||||
|
||||
export default async function glob(pattern: string, opts: GlobOptions | string, mountpoint?: string): Promise<FsFiles> {
|
||||
export default async function glob(
|
||||
pattern: string,
|
||||
opts: GlobOptions | string,
|
||||
mountpoint?: string
|
||||
): Promise<FsFiles> {
|
||||
let options: GlobOptions;
|
||||
if (typeof opts === 'string') {
|
||||
options = { cwd: opts };
|
||||
@@ -23,7 +27,7 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
||||
|
||||
if (!options.cwd) {
|
||||
throw new Error(
|
||||
'Second argument (basePath) must be specified for names of resulting files',
|
||||
'Second argument (basePath) must be specified for names of resulting files'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -41,11 +45,11 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
||||
const files = await vanillaGlob(pattern, options);
|
||||
|
||||
for (const relativePath of files) {
|
||||
const fsPath = path.join(options.cwd!, relativePath);
|
||||
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||
assert(
|
||||
stat,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
|
||||
);
|
||||
if (stat.isFile()) {
|
||||
const isSymlink = options.symlinks![fsPath];
|
||||
|
||||
@@ -7,6 +7,6 @@ export default function rename(files: Files, delegate: Delegate): Files {
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}),
|
||||
{},
|
||||
{}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
import assert from 'assert';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import { spawn, SpawnOptions } from 'child_process';
|
||||
import spawn from 'cross-spawn';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
|
||||
function spawnAsync(command: string, args: string[], cwd: string, opts: SpawnOptions = {}) {
|
||||
function spawnAsync(
|
||||
command: string,
|
||||
args: string[],
|
||||
cwd: string,
|
||||
opts: SpawnOptions = {}
|
||||
) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const stderrLogs: Buffer[] = []
|
||||
const stderrLogs: Buffer[] = [];
|
||||
opts = { stdio: 'inherit', cwd, ...opts };
|
||||
const child = spawn(command, args, opts);
|
||||
|
||||
if (opts.stdio === 'pipe'){
|
||||
if (opts.stdio === 'pipe') {
|
||||
child.stderr.on('data', data => stderrLogs.push(data));
|
||||
}
|
||||
|
||||
child.on('error', reject);
|
||||
child.on('close', (code, signal) => {
|
||||
if (code === 0) {
|
||||
return resolve()
|
||||
return resolve();
|
||||
}
|
||||
|
||||
const errorLogs = stderrLogs.map(line => line.toString()).join('');
|
||||
@@ -58,13 +64,15 @@ async function scanParentDirs(destPath: string, scriptName?: string) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
const packageJson = JSON.parse(
|
||||
await fs.readFile(packageJsonPath, 'utf8')
|
||||
);
|
||||
hasScript = Boolean(
|
||||
packageJson.scripts && scriptName && packageJson.scripts[scriptName],
|
||||
packageJson.scripts && scriptName && packageJson.scripts[scriptName]
|
||||
);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
hasPackageLockJson = await fs.pathExists(
|
||||
path.join(currentDestPath, 'package-lock.json'),
|
||||
path.join(currentDestPath, 'package-lock.json')
|
||||
);
|
||||
break;
|
||||
}
|
||||
@@ -77,7 +85,10 @@ async function scanParentDirs(destPath: string, scriptName?: string) {
|
||||
return { hasScript, hasPackageLockJson };
|
||||
}
|
||||
|
||||
export async function installDependencies(destPath: string, args: string[] = []) {
|
||||
export async function installDependencies(
|
||||
destPath: string,
|
||||
args: string[] = []
|
||||
) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let commandArgs = args;
|
||||
@@ -91,23 +102,23 @@ export async function installDependencies(destPath: string, args: string[] = [])
|
||||
// Node.js version that `@now/node` and `@now/node-server` use
|
||||
npm_config_target: '8.10.0',
|
||||
},
|
||||
stdio: 'pipe'
|
||||
stdio: 'pipe',
|
||||
};
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync(
|
||||
'npm',
|
||||
['install'].concat(commandArgs),
|
||||
['install', '--unsafe-perm'].concat(commandArgs),
|
||||
destPath,
|
||||
opts as SpawnOptions
|
||||
);
|
||||
} else {
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath].concat(commandArgs),
|
||||
['--ignore-engines', '--cwd', destPath].concat(commandArgs),
|
||||
destPath,
|
||||
opts as SpawnOptions,
|
||||
opts as SpawnOptions
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -120,7 +131,7 @@ export async function runPackageJsonScript(
|
||||
assert(path.isAbsolute(destPath));
|
||||
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
||||
destPath,
|
||||
scriptName,
|
||||
scriptName
|
||||
);
|
||||
if (!hasScript) return false;
|
||||
|
||||
@@ -129,7 +140,12 @@ export async function runPackageJsonScript(
|
||||
await spawnAsync('npm', ['run', scriptName], destPath, opts);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath, opts);
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath, 'run', scriptName],
|
||||
destPath,
|
||||
opts
|
||||
);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
@@ -1,26 +1,28 @@
|
||||
import eos from 'end-of-stream';
|
||||
|
||||
export default function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
export default function streamToBuffer(
|
||||
stream: NodeJS.ReadableStream
|
||||
): Promise<Buffer> {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const buffers: Buffer[] = [];
|
||||
|
||||
stream.on('data', buffers.push.bind(buffers))
|
||||
stream.on('data', buffers.push.bind(buffers));
|
||||
|
||||
eos(stream, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
resolve(Buffer.allocUnsafe(0));
|
||||
break;
|
||||
case 1:
|
||||
resolve(buffers[0]);
|
||||
break;
|
||||
default:
|
||||
resolve(Buffer.concat(buffers));
|
||||
}
|
||||
eos(stream, err => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
resolve(Buffer.allocUnsafe(0));
|
||||
break;
|
||||
case 1:
|
||||
resolve(buffers[0]);
|
||||
break;
|
||||
default:
|
||||
resolve(Buffer.concat(buffers));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,51 @@
|
||||
import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { File, Files, AnalyzeOptions, BuildOptions, PrepareCacheOptions, ShouldServeOptions, Meta } from './types';
|
||||
import {
|
||||
File,
|
||||
Files,
|
||||
AnalyzeOptions,
|
||||
BuildOptions,
|
||||
PrepareCacheOptions,
|
||||
ShouldServeOptions,
|
||||
Meta,
|
||||
} from './types';
|
||||
import { Lambda, createLambda } from './lambda';
|
||||
import download from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory'
|
||||
import download, { DownloadedFiles } from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory';
|
||||
import glob from './fs/glob';
|
||||
import rename from './fs/rename';
|
||||
import { installDependencies, runPackageJsonScript, runNpmInstall, runShellScript } from './fs/run-user-scripts';
|
||||
import {
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runShellScript,
|
||||
} from './fs/run-user-scripts';
|
||||
import streamToBuffer from './fs/stream-to-buffer';
|
||||
import shouldServe from './should-serve';
|
||||
|
||||
export {
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
FileRef,
|
||||
Files,
|
||||
File,
|
||||
Meta,
|
||||
Lambda,
|
||||
createLambda,
|
||||
download,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
rename,
|
||||
installDependencies, runPackageJsonScript, runNpmInstall, runShellScript,
|
||||
streamToBuffer,
|
||||
AnalyzeOptions,
|
||||
BuildOptions,
|
||||
PrepareCacheOptions,
|
||||
ShouldServeOptions,
|
||||
shouldServe,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
FileRef,
|
||||
Files,
|
||||
File,
|
||||
Meta,
|
||||
Lambda,
|
||||
createLambda,
|
||||
download,
|
||||
DownloadedFiles,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
rename,
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runShellScript,
|
||||
streamToBuffer,
|
||||
AnalyzeOptions,
|
||||
BuildOptions,
|
||||
PrepareCacheOptions,
|
||||
ShouldServeOptions,
|
||||
shouldServe,
|
||||
};
|
||||
|
||||
@@ -32,9 +32,7 @@ export class Lambda {
|
||||
public runtime: string;
|
||||
public environment: Environment;
|
||||
|
||||
constructor({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
}: LambdaOptions) {
|
||||
constructor({ zipBuffer, handler, runtime, environment }: LambdaOptions) {
|
||||
this.type = 'Lambda';
|
||||
this.zipBuffer = zipBuffer;
|
||||
this.handler = handler;
|
||||
@@ -47,7 +45,10 @@ const sema = new Sema(10);
|
||||
const mtime = new Date(1540000000000);
|
||||
|
||||
export async function createLambda({
|
||||
files, handler, runtime, environment = {},
|
||||
files,
|
||||
handler,
|
||||
runtime,
|
||||
environment = {},
|
||||
}: CreateLambdaOptions): Promise<Lambda> {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
assert(typeof handler === 'string', '"handler" is not a string');
|
||||
@@ -97,7 +98,9 @@ export async function createZip(files: Files): Promise<Buffer> {
|
||||
}
|
||||
|
||||
zipFile.end();
|
||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
||||
streamToBuffer(zipFile.outputStream)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
return zipBuffer;
|
||||
|
||||
@@ -5,7 +5,7 @@ import FileFsRef from './file-fs-ref';
|
||||
export default function shouldServe({
|
||||
entrypoint,
|
||||
files,
|
||||
requestPath
|
||||
requestPath,
|
||||
}: ShouldServeOptions): boolean {
|
||||
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
||||
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
||||
@@ -23,5 +23,5 @@ export default function shouldServe({
|
||||
}
|
||||
|
||||
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
||||
return Object.hasOwnProperty.call(obj, key)
|
||||
return Object.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
|
||||
@@ -5,6 +5,9 @@ export interface File {
|
||||
type: string;
|
||||
mode: number;
|
||||
toStream: () => NodeJS.ReadableStream;
|
||||
/**
|
||||
* The absolute path to the file in the filesystem
|
||||
*/
|
||||
fsPath?: string;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "api/index.js", "use": "@now/node" }
|
||||
],
|
||||
"builds": [{ "src": "api/index.js", "use": "@now/node" }],
|
||||
"probes": [
|
||||
{ "path": "/api/index.js", "mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER" }
|
||||
{
|
||||
"path": "/api/index.js",
|
||||
"mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
9
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/index.js
vendored
Normal file
9
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/index.js
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
const scheduler = require('@google-cloud/scheduler');
|
||||
|
||||
module.exports = (_, res) => {
|
||||
if (scheduler) {
|
||||
res.end('found:RANDOMNESS_PLACEHOLDER');
|
||||
} else {
|
||||
res.end('nope:RANDOMNESS_PLACEHOLDER');
|
||||
}
|
||||
};
|
||||
11
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/now.json
vendored
Normal file
11
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/now.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{
|
||||
"src": "index.js",
|
||||
"use": "@now/node",
|
||||
"config": { "maxLambdaSize": "15mb" }
|
||||
}
|
||||
],
|
||||
"probes": [{ "path": "/", "mustContain": "found:RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
8
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/package.json
vendored
Normal file
8
packages/now-build-utils/test/fixtures/15-yarn-ignore-engines/package.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"name": "15-yarn-ignore-engines",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"dependencies": {
|
||||
"@google-cloud/scheduler": "0.3.0"
|
||||
}
|
||||
}
|
||||
@@ -15,10 +15,6 @@
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
@@ -5,15 +5,17 @@ const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/
|
||||
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { shouldServe } = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
exports.build = async ({
|
||||
workPath, files, entrypoint, meta,
|
||||
}) => {
|
||||
console.log('downloading files...');
|
||||
const srcDir = await getWritableDirectory();
|
||||
const outDir = await getWritableDirectory();
|
||||
|
||||
await download(files, srcDir);
|
||||
await download(files, workPath, meta);
|
||||
|
||||
const handlerPath = path.join(__dirname, 'handler');
|
||||
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
||||
@@ -23,7 +25,7 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
|
||||
// For now only the entrypoint file is copied into the lambda
|
||||
await copyFile(
|
||||
path.join(srcDir, entrypoint),
|
||||
path.join(workPath, entrypoint),
|
||||
path.join(outDir, entrypoint),
|
||||
);
|
||||
|
||||
@@ -40,3 +42,5 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
|
||||
exports.shouldServe = shouldServe;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/cgi",
|
||||
"version": "0.1.0",
|
||||
"version": "0.1.4",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
5
packages/now-go/.gitignore
vendored
5
packages/now-go/.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
node_modules
|
||||
*.log
|
||||
/?.js
|
||||
/go
|
||||
/get-exported-function-name
|
||||
/analyze
|
||||
*.js
|
||||
!util/install.js
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
test
|
||||
tsconfig.json
|
||||
package-lock.json
|
||||
yarn.lock
|
||||
yarn.lock
|
||||
@@ -1,129 +0,0 @@
|
||||
const tar = require('tar');
|
||||
const execa = require('execa');
|
||||
const fetch = require('node-fetch');
|
||||
const { mkdirp } = require('fs-extra');
|
||||
const { dirname, join } = require('path');
|
||||
const debug = require('debug')('@now/go:go-helpers');
|
||||
|
||||
const archMap = new Map([['x64', 'amd64'], ['x86', '386']]);
|
||||
const platformMap = new Map([['win32', 'windows']]);
|
||||
|
||||
// Location where the `go` binary will be installed after `postinstall`
|
||||
const GO_DIR = join(__dirname, 'go');
|
||||
const GO_BIN = join(GO_DIR, 'bin/go');
|
||||
|
||||
const getPlatform = p => platformMap.get(p) || p;
|
||||
const getArch = a => archMap.get(a) || a;
|
||||
const getGoUrl = (version, platform, arch) => {
|
||||
const goArch = getArch(arch);
|
||||
const goPlatform = getPlatform(platform);
|
||||
const ext = platform === 'win32' ? 'zip' : 'tar.gz';
|
||||
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||
};
|
||||
|
||||
async function getExportedFunctionName(filePath) {
|
||||
debug('Detecting handler name for %o', filePath);
|
||||
const bin = join(__dirname, 'get-exported-function-name');
|
||||
const args = [filePath];
|
||||
const name = await execa.stdout(bin, args);
|
||||
debug('Detected exported name %o', name);
|
||||
return name;
|
||||
}
|
||||
|
||||
// Creates a `$GOPATH` directory tree, as per `go help gopath` instructions.
|
||||
// Without this, `go` won't recognize the `$GOPATH`.
|
||||
function createGoPathTree(goPath, platform, arch) {
|
||||
const tuple = `${getPlatform(platform)}_${getArch(arch)}`;
|
||||
debug('Creating GOPATH directory structure for %o (%s)', goPath, tuple);
|
||||
return Promise.all([
|
||||
mkdirp(join(goPath, 'bin')),
|
||||
mkdirp(join(goPath, 'pkg', tuple)),
|
||||
]);
|
||||
}
|
||||
|
||||
async function get({ src } = {}) {
|
||||
const args = ['get'];
|
||||
if (src) {
|
||||
debug('Fetching `go` dependencies for file %o', src);
|
||||
args.push(src);
|
||||
} else {
|
||||
debug('Fetching `go` dependencies for cwd %o', this.cwd);
|
||||
}
|
||||
await this(...args);
|
||||
}
|
||||
|
||||
async function build({ src, dest }) {
|
||||
debug('Building `go` binary %o -> %o', src, dest);
|
||||
let sources;
|
||||
if (Array.isArray(src)) {
|
||||
sources = src;
|
||||
} else {
|
||||
sources = [src];
|
||||
}
|
||||
await this('build', '-o', dest, ...sources);
|
||||
}
|
||||
|
||||
async function createGo(
|
||||
goPath,
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
opts = {},
|
||||
goMod = false,
|
||||
) {
|
||||
const env = {
|
||||
...process.env,
|
||||
PATH: `${dirname(GO_BIN)}:${process.env.PATH}`,
|
||||
GOPATH: goPath,
|
||||
...opts.env,
|
||||
};
|
||||
|
||||
if (goMod) {
|
||||
env.GO111MODULE = 'on';
|
||||
}
|
||||
|
||||
function go(...args) {
|
||||
debug('Exec %o', `go ${args.join(' ')}`);
|
||||
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||
}
|
||||
go.cwd = opts.cwd || process.cwd();
|
||||
go.get = get;
|
||||
go.build = build;
|
||||
go.goPath = goPath;
|
||||
await createGoPathTree(goPath, platform, arch);
|
||||
return go;
|
||||
}
|
||||
|
||||
async function downloadGo(
|
||||
dir = GO_DIR,
|
||||
version = '1.12',
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
) {
|
||||
debug('Installing `go` v%s to %o for %s %s', version, dir, platform, arch);
|
||||
|
||||
const url = getGoUrl(version, platform, arch);
|
||||
debug('Downloading `go` URL: %o', url);
|
||||
const res = await fetch(url);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download: ${url} (${res.status})`);
|
||||
}
|
||||
|
||||
// TODO: use a zip extractor when `ext === "zip"`
|
||||
await mkdirp(dir);
|
||||
await new Promise((resolve, reject) => {
|
||||
res.body
|
||||
.on('error', reject)
|
||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||
.on('error', reject)
|
||||
.on('finish', resolve);
|
||||
});
|
||||
|
||||
return createGo(dir, platform, arch);
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
createGo,
|
||||
downloadGo,
|
||||
getExportedFunctionName,
|
||||
};
|
||||
170
packages/now-go/go-helpers.ts
Normal file
170
packages/now-go/go-helpers.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import tar from 'tar';
|
||||
import execa from 'execa';
|
||||
import fetch from 'node-fetch';
|
||||
import { mkdirp, pathExists } from 'fs-extra';
|
||||
import { dirname, join } from 'path';
|
||||
import { homedir } from 'os';
|
||||
import Debug from 'debug';
|
||||
|
||||
const debug = Debug('@now/go:go-helpers');
|
||||
const archMap = new Map([['x64', 'amd64'], ['x86', '386']]);
|
||||
const platformMap = new Map([['win32', 'windows']]);
|
||||
|
||||
// Location where the `go` binary will be installed after `postinstall`
|
||||
const GO_DIR = join(__dirname, 'go');
|
||||
const GO_BIN = join(GO_DIR, 'bin/go');
|
||||
|
||||
const getPlatform = (p: string) => platformMap.get(p) || p;
|
||||
const getArch = (a: string) => archMap.get(a) || a;
|
||||
const getGoUrl = (version: string, platform: string, arch: string) => {
|
||||
const goArch = getArch(arch);
|
||||
const goPlatform = getPlatform(platform);
|
||||
const ext = platform === 'win32' ? 'zip' : 'tar.gz';
|
||||
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||
};
|
||||
|
||||
export async function getAnalyzedEntrypoint(filePath: string, modulePath = '') {
|
||||
debug('Analyzing entrypoint %o', filePath);
|
||||
const bin = join(__dirname, 'analyze');
|
||||
|
||||
const isAnalyzeExist = await pathExists(bin);
|
||||
if (!isAnalyzeExist) {
|
||||
const src = join(__dirname, 'util', 'analyze.go');
|
||||
const dest = join(__dirname, 'analyze');
|
||||
const go = await downloadGo();
|
||||
await go.build(src, dest);
|
||||
}
|
||||
|
||||
const args = [`-modpath=${modulePath}`, filePath];
|
||||
|
||||
const analyzed = await execa.stdout(bin, args);
|
||||
debug('Analyzed entrypoint %o', analyzed);
|
||||
return analyzed;
|
||||
}
|
||||
|
||||
// Creates a `$GOPATH` directory tree, as per `go help gopath` instructions.
|
||||
// Without this, `go` won't recognize the `$GOPATH`.
|
||||
function createGoPathTree(goPath: string, platform: string, arch: string) {
|
||||
const tuple = `${getPlatform(platform)}_${getArch(arch)}`;
|
||||
debug('Creating GOPATH directory structure for %o (%s)', goPath, tuple);
|
||||
return Promise.all([
|
||||
mkdirp(join(goPath, 'bin')),
|
||||
mkdirp(join(goPath, 'pkg', tuple)),
|
||||
]);
|
||||
}
|
||||
|
||||
class GoWrapper {
|
||||
private env: { [key: string]: string };
|
||||
private opts: execa.Options;
|
||||
|
||||
constructor(env: { [key: string]: string }, opts: execa.Options = {}) {
|
||||
if (!opts.cwd) {
|
||||
opts.cwd = process.cwd();
|
||||
}
|
||||
this.env = env;
|
||||
this.opts = opts;
|
||||
}
|
||||
|
||||
private execute(...args: string[]) {
|
||||
const { opts, env } = this;
|
||||
debug('Exec %o', `go ${args.join(' ')}`);
|
||||
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||
}
|
||||
|
||||
mod() {
|
||||
return this.execute('mod', 'tidy');
|
||||
}
|
||||
|
||||
get(src?: string) {
|
||||
const args = ['get'];
|
||||
if (src) {
|
||||
debug('Fetching `go` dependencies for file %o', src);
|
||||
args.push(src);
|
||||
} else {
|
||||
debug('Fetching `go` dependencies for cwd %o', this.opts.cwd);
|
||||
}
|
||||
return this.execute(...args);
|
||||
}
|
||||
|
||||
build(src: string | string[], dest: string, ldsflags = '-s -w') {
|
||||
debug('Building optimized `go` binary %o -> %o', src, dest);
|
||||
const sources = Array.isArray(src) ? src : [src];
|
||||
return this.execute('build', '-ldflags', ldsflags, '-o', dest, ...sources);
|
||||
}
|
||||
}
|
||||
|
||||
export async function createGo(
|
||||
goPath: string,
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
opts: execa.Options = {},
|
||||
goMod = false
|
||||
) {
|
||||
const path = `${dirname(GO_BIN)}:${process.env.PATH}`;
|
||||
const env: { [key: string]: string } = {
|
||||
...process.env,
|
||||
PATH: path,
|
||||
GOPATH: goPath,
|
||||
...opts.env,
|
||||
};
|
||||
if (goMod) {
|
||||
env.GO111MODULE = 'on';
|
||||
}
|
||||
await createGoPathTree(goPath, platform, arch);
|
||||
return new GoWrapper(env, opts);
|
||||
}
|
||||
|
||||
export async function downloadGo(
|
||||
dir = GO_DIR,
|
||||
version = '1.12',
|
||||
platform = process.platform,
|
||||
arch = process.arch
|
||||
) {
|
||||
// Check default `Go` in user machine
|
||||
const isUserGo = await pathExists(join(homedir(), 'go'));
|
||||
|
||||
// If we found GOPATH in ENV, or default `Go` path exists
|
||||
// asssume that user have `Go` installed
|
||||
if (isUserGo || process.env.GOPATH !== undefined) {
|
||||
const { stdout } = await execa('go', ['version']);
|
||||
|
||||
if (parseInt(stdout.split('.')[1]) >= 11) {
|
||||
return createGo(dir, platform, arch);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Your current ${stdout} doesn't support Go Modules. Please update.`
|
||||
);
|
||||
} else {
|
||||
// Check `Go` bin in builder CWD
|
||||
const isGoExist = await pathExists(join(dir, 'bin'));
|
||||
if (!isGoExist) {
|
||||
debug(
|
||||
'Installing `go` v%s to %o for %s %s',
|
||||
version,
|
||||
dir,
|
||||
platform,
|
||||
arch
|
||||
);
|
||||
const url = getGoUrl(version, platform, arch);
|
||||
debug('Downloading `go` URL: %o', url);
|
||||
console.log('Downloading Go ...');
|
||||
const res = await fetch(url);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download: ${url} (${res.status})`);
|
||||
}
|
||||
|
||||
// TODO: use a zip extractor when `ext === "zip"`
|
||||
await mkdirp(dir);
|
||||
await new Promise((resolve, reject) => {
|
||||
res.body
|
||||
.on('error', reject)
|
||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||
.on('error', reject)
|
||||
.on('finish', resolve);
|
||||
});
|
||||
}
|
||||
return createGo(dir, platform, arch);
|
||||
}
|
||||
}
|
||||
@@ -1,210 +0,0 @@
|
||||
const { join, sep, dirname } = require('path');
|
||||
const {
|
||||
readFile, writeFile, pathExists, move,
|
||||
} = require('fs-extra');
|
||||
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createGo, getExportedFunctionName } = require('./go-helpers');
|
||||
|
||||
const config = {
|
||||
maxLambdaSize: '10mb',
|
||||
};
|
||||
|
||||
async function build({ files, entrypoint }) {
|
||||
console.log('Downloading user files...');
|
||||
|
||||
const [goPath, outDir] = await Promise.all([
|
||||
getWritableDirectory(),
|
||||
getWritableDirectory(),
|
||||
]);
|
||||
|
||||
const srcPath = join(goPath, 'src', 'lambda');
|
||||
const downloadedFiles = await download(files, srcPath);
|
||||
|
||||
console.log(`Parsing AST for "${entrypoint}"`);
|
||||
let parseFunctionName;
|
||||
try {
|
||||
parseFunctionName = await getExportedFunctionName(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!parseFunctionName) {
|
||||
const err = new Error(
|
||||
`Could not find an exported function in "${entrypoint}"`,
|
||||
);
|
||||
console.log(err.message);
|
||||
throw err;
|
||||
}
|
||||
|
||||
const handlerFunctionName = parseFunctionName.split(',')[0];
|
||||
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`,
|
||||
);
|
||||
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
// check if package name other than main
|
||||
const packageName = parseFunctionName.split(',')[1];
|
||||
const isGoModExist = await pathExists(join(entrypointDirname, 'go.mod'));
|
||||
if (packageName !== 'main') {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
true,
|
||||
);
|
||||
if (!isGoModExist) {
|
||||
try {
|
||||
const defaultGoModContent = `module ${packageName}`;
|
||||
|
||||
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
||||
} catch (err) {
|
||||
console.log(`failed to create default go.mod for ${packageName}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const mainModGoFileName = 'main__mod__.go';
|
||||
const modMainGoContents = await readFile(
|
||||
join(__dirname, mainModGoFileName),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
let goPackageName = `${packageName}/${packageName}`;
|
||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||
|
||||
if (isGoModExist) {
|
||||
const goModContents = await readFile(
|
||||
join(entrypointDirname, 'go.mod'),
|
||||
'utf8',
|
||||
);
|
||||
goPackageName = `${
|
||||
goModContents.split('\n')[0].split(' ')[1]
|
||||
}/${packageName}`;
|
||||
}
|
||||
|
||||
const mainModGoContents = modMainGoContents
|
||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||
|
||||
// write main__mod__.go
|
||||
await writeFile(
|
||||
join(entrypointDirname, mainModGoFileName),
|
||||
mainModGoContents,
|
||||
);
|
||||
|
||||
// move user go file to folder
|
||||
try {
|
||||
// default path
|
||||
let finalDestination = join(entrypointDirname, packageName, entrypoint);
|
||||
const entrypointArr = entrypoint.split(sep);
|
||||
|
||||
// if `entrypoint` include folder, only use filename
|
||||
if (entrypointArr.length > 1) {
|
||||
finalDestination = join(
|
||||
entrypointDirname,
|
||||
packageName,
|
||||
entrypointArr.pop(),
|
||||
);
|
||||
}
|
||||
|
||||
await move(downloadedFiles[entrypoint].fsPath, finalDestination);
|
||||
} catch (err) {
|
||||
console.log('failed to move entry to package folder');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('tidy go.mod file');
|
||||
try {
|
||||
// ensure go.mod up-to-date
|
||||
await go('mod', 'tidy');
|
||||
} catch (err) {
|
||||
console.log('failed to `go mod tidy`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [join(entrypointDirname, mainModGoFileName)];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
false,
|
||||
);
|
||||
const origianlMainGoContents = await readFile(
|
||||
join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
|
||||
// in order to allow the user to have `main.go`,
|
||||
// we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||
// and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', outDir),
|
||||
handler: 'handler',
|
||||
runtime: 'go1.x',
|
||||
environment: {},
|
||||
});
|
||||
|
||||
return {
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = { config, build };
|
||||
388
packages/now-go/index.ts
Normal file
388
packages/now-go/index.ts
Normal file
@@ -0,0 +1,388 @@
|
||||
import { join, sep, dirname, basename } from 'path';
|
||||
import { readFile, writeFile, pathExists, move } from 'fs-extra';
|
||||
import { homedir } from 'os';
|
||||
import execa from 'execa';
|
||||
|
||||
import {
|
||||
glob,
|
||||
download,
|
||||
createLambda,
|
||||
getWriteableDirectory,
|
||||
BuildOptions,
|
||||
shouldServe,
|
||||
Files,
|
||||
} from '@now/build-utils';
|
||||
|
||||
import { createGo, getAnalyzedEntrypoint } from './go-helpers';
|
||||
|
||||
interface Analyzed {
|
||||
found?: boolean;
|
||||
packageName: string;
|
||||
functionName: string;
|
||||
watch: string[];
|
||||
}
|
||||
interface BuildParamsMeta {
|
||||
isDev: boolean | undefined;
|
||||
}
|
||||
interface BuildParamsType extends BuildOptions {
|
||||
files: Files;
|
||||
entrypoint: string;
|
||||
workPath: string;
|
||||
meta: BuildParamsMeta;
|
||||
}
|
||||
|
||||
// Initialize private git repo for Go Modules
|
||||
async function initPrivateGit(credentials: string) {
|
||||
await execa('git', [
|
||||
'config',
|
||||
'--global',
|
||||
'credential.helper',
|
||||
`store --file ${join(homedir(), '.git-credentials')}`,
|
||||
]);
|
||||
|
||||
await writeFile(join(homedir(), '.git-credentials'), credentials);
|
||||
}
|
||||
|
||||
export const version = 2;
|
||||
|
||||
export const config = {
|
||||
maxLambdaSize: '10mb',
|
||||
};
|
||||
|
||||
export async function build({
|
||||
files,
|
||||
entrypoint,
|
||||
config,
|
||||
workPath,
|
||||
meta = {} as BuildParamsMeta,
|
||||
}: BuildParamsType) {
|
||||
if (process.env.GIT_CREDENTIALS && !meta.isDev) {
|
||||
console.log('Initialize Git credentials...');
|
||||
await initPrivateGit(process.env.GIT_CREDENTIALS);
|
||||
}
|
||||
|
||||
console.log('Downloading user files...');
|
||||
const entrypointArr = entrypoint.split(sep);
|
||||
|
||||
let [goPath, outDir] = await Promise.all([
|
||||
getWriteableDirectory(),
|
||||
getWriteableDirectory(),
|
||||
]);
|
||||
|
||||
const srcPath = join(goPath, 'src', 'lambda');
|
||||
let downloadedFiles;
|
||||
if (meta.isDev) {
|
||||
downloadedFiles = await download(files, workPath, meta);
|
||||
} else {
|
||||
downloadedFiles = await download(files, srcPath);
|
||||
}
|
||||
|
||||
console.log(`Parsing AST for "${entrypoint}"`);
|
||||
let analyzed: string;
|
||||
try {
|
||||
let goModAbsPathDir = '';
|
||||
for (const file of Object.keys(downloadedFiles)) {
|
||||
if (file === 'go.mod') {
|
||||
goModAbsPathDir = dirname(downloadedFiles[file].fsPath);
|
||||
}
|
||||
}
|
||||
analyzed = await getAnalyzedEntrypoint(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
goModAbsPathDir
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!analyzed) {
|
||||
const err = new Error(
|
||||
`Could not find an exported function in "${entrypoint}"
|
||||
Learn more: https://zeit.co/docs/v2/deployments/official-builders/go-now-go/#entrypoint
|
||||
`
|
||||
);
|
||||
console.log(err.message);
|
||||
throw err;
|
||||
}
|
||||
|
||||
const parsedAnalyzed = JSON.parse(analyzed) as Analyzed;
|
||||
|
||||
if (meta.isDev) {
|
||||
const base = dirname(downloadedFiles['now.json'].fsPath);
|
||||
const destNow = join(
|
||||
base,
|
||||
'.now',
|
||||
'cache',
|
||||
basename(entrypoint, '.go'),
|
||||
'src',
|
||||
'lambda'
|
||||
);
|
||||
// this will ensure Go rebuilt fast
|
||||
goPath = join(base, '.now', 'cache', basename(entrypoint, '.go'));
|
||||
await download(downloadedFiles, destNow);
|
||||
|
||||
downloadedFiles = await glob('**', destNow);
|
||||
}
|
||||
|
||||
// find `go.mod` in downloadedFiles
|
||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
let isGoModExist = false;
|
||||
let goModPath = '';
|
||||
let goModPathArr: string[] = [];
|
||||
for (const file of Object.keys(downloadedFiles)) {
|
||||
const fileDirname = dirname(downloadedFiles[file].fsPath);
|
||||
if (file === 'go.mod') {
|
||||
isGoModExist = true;
|
||||
goModPath = fileDirname;
|
||||
goModPathArr = goModPath.split(sep);
|
||||
} else if (file.includes('go.mod')) {
|
||||
isGoModExist = true;
|
||||
if (entrypointDirname === fileDirname) {
|
||||
goModPath = fileDirname;
|
||||
goModPathArr = goModPath.split(sep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const input = entrypointDirname;
|
||||
var includedFiles: Files = {};
|
||||
|
||||
if (config && config.includeFiles) {
|
||||
for (const pattern of config.includeFiles) {
|
||||
const files = await glob(pattern, input);
|
||||
for (const assetName of Object.keys(files)) {
|
||||
includedFiles[assetName] = files[assetName];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const handlerFunctionName = parsedAnalyzed.functionName;
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`
|
||||
);
|
||||
|
||||
// check if package name other than main
|
||||
// using `go.mod` way building the handler
|
||||
const packageName = parsedAnalyzed.packageName;
|
||||
|
||||
if (isGoModExist && packageName === 'main') {
|
||||
throw new Error('Please change `package main` to `package handler`');
|
||||
}
|
||||
|
||||
if (packageName !== 'main') {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
true
|
||||
);
|
||||
if (!isGoModExist) {
|
||||
try {
|
||||
const defaultGoModContent = `module ${packageName}`;
|
||||
|
||||
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
||||
} catch (err) {
|
||||
console.log(`failed to create default go.mod for ${packageName}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const mainModGoFileName = 'main__mod__.go';
|
||||
const modMainGoContents = await readFile(
|
||||
join(__dirname, mainModGoFileName),
|
||||
'utf8'
|
||||
);
|
||||
|
||||
let goPackageName = `${packageName}/${packageName}`;
|
||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||
|
||||
if (isGoModExist) {
|
||||
const goModContents = await readFile(join(goModPath, 'go.mod'), 'utf8');
|
||||
const usrModName = goModContents.split('\n')[0].split(' ')[1];
|
||||
goPackageName = `${usrModName}/${packageName}`;
|
||||
}
|
||||
|
||||
const mainModGoContents = modMainGoContents
|
||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||
|
||||
if (goModPathArr.length > 1) {
|
||||
// using `go.mod` path to write main__mod__.go
|
||||
await writeFile(join(goModPath, mainModGoFileName), mainModGoContents);
|
||||
} else {
|
||||
// using `entrypointDirname` to write main__mod__.go
|
||||
await writeFile(
|
||||
join(entrypointDirname, mainModGoFileName),
|
||||
mainModGoContents
|
||||
);
|
||||
}
|
||||
|
||||
// move user go file to folder
|
||||
try {
|
||||
// default path
|
||||
let finalDestination = join(entrypointDirname, packageName, entrypoint);
|
||||
let forceMove = false;
|
||||
|
||||
if (meta.isDev) {
|
||||
forceMove = true;
|
||||
}
|
||||
|
||||
// if `entrypoint` include folder, only use filename
|
||||
if (entrypointArr.length > 1) {
|
||||
finalDestination = join(
|
||||
entrypointDirname,
|
||||
packageName,
|
||||
entrypointArr[entrypointArr.length - 1]
|
||||
);
|
||||
}
|
||||
|
||||
if (
|
||||
dirname(downloadedFiles[entrypoint].fsPath) === goModPath ||
|
||||
!isGoModExist
|
||||
) {
|
||||
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
||||
overwrite: forceMove,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('failed to move entry to package folder');
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (meta.isDev) {
|
||||
let entrypointDir = entrypointDirname;
|
||||
if (goModPathArr.length > 1) {
|
||||
entrypointDir = goModPath;
|
||||
}
|
||||
const isGoModBk = await pathExists(join(entrypointDir, 'go.mod.bk'));
|
||||
if (isGoModBk) {
|
||||
await move(
|
||||
join(entrypointDir, 'go.mod.bk'),
|
||||
join(entrypointDir, 'go.mod'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
await move(
|
||||
join(entrypointDir, 'go.sum.bk'),
|
||||
join(entrypointDir, 'go.sum'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Tidy `go.mod` file...');
|
||||
try {
|
||||
// ensure go.mod up-to-date
|
||||
await go.mod();
|
||||
} catch (err) {
|
||||
console.log('failed to `go mod tidy`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
const isGoModInRootDir = goModPathArr.length === 1;
|
||||
const baseGoModPath = isGoModInRootDir ? entrypointDirname : goModPath;
|
||||
try {
|
||||
let src = [join(baseGoModPath, mainModGoFileName)];
|
||||
|
||||
await go.build(src, destPath, config.ldsflags);
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
if (meta.isDev) {
|
||||
// caching for `now dev`
|
||||
await move(
|
||||
join(baseGoModPath, 'go.mod'),
|
||||
join(baseGoModPath, 'go.mod.bk'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
await move(
|
||||
join(baseGoModPath, 'go.sum'),
|
||||
join(baseGoModPath, 'go.sum.bk'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// legacy mode
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
false
|
||||
);
|
||||
const origianlMainGoContents = await readFile(
|
||||
join(__dirname, 'main.go'),
|
||||
'utf8'
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName
|
||||
);
|
||||
|
||||
// in order to allow the user to have `main.go`,
|
||||
// we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||
// and download any packages that aren't part of the stdlib
|
||||
console.log('Running `go get`...');
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build(src, destPath);
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: { ...(await glob('**', outDir)), ...includedFiles },
|
||||
handler: 'handler',
|
||||
runtime: 'go1.x',
|
||||
environment: {},
|
||||
});
|
||||
const output = {
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
|
||||
let watch = parsedAnalyzed.watch;
|
||||
let watchSub: string[] = [];
|
||||
// if `entrypoint` located in subdirectory
|
||||
// we will need to concat it with return watch array
|
||||
if (entrypointArr.length > 1) {
|
||||
entrypointArr.pop();
|
||||
watchSub = parsedAnalyzed.watch.map(file => join(...entrypointArr, file));
|
||||
}
|
||||
|
||||
return {
|
||||
output,
|
||||
watch: watch.concat(watchSub),
|
||||
};
|
||||
}
|
||||
|
||||
export { shouldServe };
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.4.0",
|
||||
"version": "0.5.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,7 +8,9 @@
|
||||
"directory": "packages/now-go"
|
||||
},
|
||||
"scripts": {
|
||||
"postinstall": "node ./util/install"
|
||||
"build": "tsc",
|
||||
"test": "tsc && jest",
|
||||
"prepublish": "tsc"
|
||||
},
|
||||
"files": [
|
||||
"*.js",
|
||||
@@ -20,8 +22,15 @@
|
||||
"debug": "^4.1.1",
|
||||
"execa": "^1.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"mkdirp-promise": "5.0.1",
|
||||
"node-fetch": "^2.2.1",
|
||||
"tar": "4.4.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/debug": "^4.1.3",
|
||||
"@types/execa": "^0.9.0",
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"typescript": "^3.4.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,4 +8,4 @@
|
||||
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
1
packages/now-go/test/fixtures/03-env-vars/build-env/go.mod
vendored
Normal file
1
packages/now-go/test/fixtures/03-env-vars/build-env/go.mod
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module build-env
|
||||
17
packages/now-go/test/fixtures/03-env-vars/build-env/index.go
vendored
Normal file
17
packages/now-go/test/fixtures/03-env-vars/build-env/index.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
package buildenv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Handler function
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
rdm := os.Getenv("RANDOMNESS_BUILD_ENV")
|
||||
if rdm == "" {
|
||||
fmt.Println("No build env received")
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, rdm+":build-env")
|
||||
}
|
||||
1
packages/now-go/test/fixtures/03-env-vars/env/go.mod
vendored
Normal file
1
packages/now-go/test/fixtures/03-env-vars/env/go.mod
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module env
|
||||
17
packages/now-go/test/fixtures/03-env-vars/env/index.go
vendored
Normal file
17
packages/now-go/test/fixtures/03-env-vars/env/index.go
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
package env
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
)
|
||||
|
||||
// Handler function
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
rdm := os.Getenv("RANDOMNESS_ENV")
|
||||
if rdm == "" {
|
||||
fmt.Println("No env received")
|
||||
}
|
||||
|
||||
fmt.Fprintf(w, rdm)
|
||||
}
|
||||
18
packages/now-go/test/fixtures/03-env-vars/now.json
vendored
Normal file
18
packages/now-go/test/fixtures/03-env-vars/now.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{
|
||||
"src": "env/index.go",
|
||||
"use": "@now/go"
|
||||
}
|
||||
],
|
||||
"env": {
|
||||
"RANDOMNESS_ENV": "RANDOMNESS_PLACEHOLDER"
|
||||
},
|
||||
"probes": [
|
||||
{
|
||||
"path": "/env",
|
||||
"mustContain": "RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
]
|
||||
}
|
||||
11
packages/now-go/test/fixtures/06-content-type/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/06-content-type/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package function
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler function
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
4
packages/now-go/test/fixtures/06-content-type/now.json
vendored
Normal file
4
packages/now-go/test/fixtures/06-content-type/now.json
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.go", "use": "@now/go" }]
|
||||
}
|
||||
7
packages/now-go/test/fixtures/07-content-length/now.json
vendored
Normal file
7
packages/now-go/test/fixtures/07-content-length/now.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "*.go", "use": "@now/go" }],
|
||||
"env": {
|
||||
"RANDOMNESS_ENV_VAR": "RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
}
|
||||
16
packages/now-go/test/fixtures/07-content-length/test1.go
vendored
Normal file
16
packages/now-go/test/fixtures/07-content-length/test1.go
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
package function
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
)
|
||||
|
||||
// HandlerTest1 function
|
||||
func HandlerTest1(w http.ResponseWriter, r *http.Request) {
|
||||
rdm := os.Getenv("RANDOMNESS_ENV_VAR")
|
||||
|
||||
w.WriteHeader(401)
|
||||
w.Header().Set("content-length", strconv.Itoa(len(rdm+":content-length")))
|
||||
w.Write([]byte(rdm + ":content-length"))
|
||||
}
|
||||
12
packages/now-go/test/fixtures/07-content-length/test2.go
vendored
Normal file
12
packages/now-go/test/fixtures/07-content-length/test2.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
package function
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// HandlerTest2 function
|
||||
func HandlerTest2(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Content-Length", "2")
|
||||
w.WriteHeader(401)
|
||||
w.Write([]byte(""))
|
||||
}
|
||||
13
packages/now-go/test/fixtures/07-content-length/test3.go
vendored
Normal file
13
packages/now-go/test/fixtures/07-content-length/test3.go
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
package function
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"os"
|
||||
)
|
||||
|
||||
// HandlerTest3 function
|
||||
func HandlerTest3(w http.ResponseWriter, r *http.Request) {
|
||||
rev := os.Getenv("RANDOMNESS_ENV_VAR")
|
||||
w.WriteHeader(401)
|
||||
w.Write([]byte(rev + ":content-length"))
|
||||
}
|
||||
16
packages/now-go/test/fixtures/08-include-files/index.go
vendored
Normal file
16
packages/now-go/test/fixtures/08-include-files/index.go
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
package cowsay
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler function
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
bts, err := ioutil.ReadFile("templates/foo.txt")
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
}
|
||||
fmt.Fprintf(w, string(bts))
|
||||
}
|
||||
18
packages/now-go/test/fixtures/08-include-files/now.json
vendored
Normal file
18
packages/now-go/test/fixtures/08-include-files/now.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{
|
||||
"src": "index.go",
|
||||
"use": "@now/go",
|
||||
"config": {
|
||||
"includeFiles": ["templates/**"]
|
||||
}
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "foobar from file"
|
||||
}
|
||||
]
|
||||
}
|
||||
1
packages/now-go/test/fixtures/08-include-files/templates/foo.txt
vendored
Normal file
1
packages/now-go/test/fixtures/08-include-files/templates/foo.txt
vendored
Normal file
@@ -0,0 +1 @@
|
||||
foobar from file
|
||||
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
package function
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Person struct
|
||||
type Person struct {
|
||||
name string
|
||||
age int
|
||||
}
|
||||
|
||||
// NewPerson struct method
|
||||
func NewPerson(name string, age int) *Person {
|
||||
return &Person{name: name, age: age}
|
||||
}
|
||||
|
||||
// H func
|
||||
func H(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module go-mod
|
||||
|
||||
go 1.12
|
||||
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"with-shared/shared"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, shared.Say("RANDOMNESS_PLACEHOLDER"))
|
||||
}
|
||||
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module with-shared
|
||||
|
||||
go 1.12
|
||||
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "api/*.go", "use": "@now/go" }],
|
||||
"probes": [{ "path": "/api", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
package shared
|
||||
|
||||
// Say func
|
||||
func Say(text string) string {
|
||||
return text
|
||||
}
|
||||
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "**/*.go", "use": "@now/go" }],
|
||||
"probes": [
|
||||
{ "path": "/sub-1", "mustContain": "RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/sub-2", "mustContain": "RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module sub-1
|
||||
|
||||
go 1.12
|
||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package sub1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module sub-2
|
||||
|
||||
go 1.12
|
||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package sub2
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
@@ -26,8 +26,8 @@ for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture),
|
||||
),
|
||||
path.join(fixturesPath, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
|
||||
18
packages/now-go/tsconfig.json
Normal file
18
packages/now-go/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"noImplicitThis": false,
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
215
packages/now-go/util/analyze.go
Normal file
215
packages/now-go/util/analyze.go
Normal file
@@ -0,0 +1,215 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var ignoredFoldersRegex []*regexp.Regexp
|
||||
|
||||
func init() {
|
||||
ignoredFolders := []string{"vendor", "testdata", ".now"}
|
||||
|
||||
// Build the regex that matches if a path contains the respective ignored folder
|
||||
// The pattern will look like: (.*/)?vendor/.*, which matches every path that contains a vendor folder
|
||||
for _, folder := range ignoredFolders {
|
||||
ignoredFoldersRegex = append(ignoredFoldersRegex, regexp.MustCompile("(.*/)?"+folder+"/.*"))
|
||||
}
|
||||
}
|
||||
|
||||
type analyze struct {
|
||||
PackageName string `json:"packageName"`
|
||||
FuncName string `json:"functionName"`
|
||||
Watch []string `json:"watch"`
|
||||
}
|
||||
|
||||
// parse go file
|
||||
func parse(fileName string) *ast.File {
|
||||
fset := token.NewFileSet()
|
||||
parsed, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
|
||||
if err != nil {
|
||||
log.Fatalf("Could not parse Go file \"%s\"\n", fileName)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
return parsed
|
||||
}
|
||||
|
||||
// ensure we only working with interest go file(s)
|
||||
func visit(files *[]string) filepath.WalkFunc {
|
||||
return func(path string, info os.FileInfo, err error) error {
|
||||
itf, err := filepath.Match("*test.go", path)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// we don't need Dirs, or test files
|
||||
// we only want `.go` files. Further, we ignore
|
||||
// every file that is in one of the ignored folders.
|
||||
if info.IsDir() || itf || filepath.Ext(path) != ".go" || isInIgnoredFolder(path) {
|
||||
return nil
|
||||
}
|
||||
|
||||
*files = append(*files, path)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// isInIgnoredFolder checks if the given path is in one of the ignored folders.
|
||||
func isInIgnoredFolder(path string) bool {
|
||||
// Make sure the regex works for Windows paths
|
||||
path = filepath.ToSlash(path)
|
||||
|
||||
for _, pattern := range ignoredFoldersRegex {
|
||||
if pattern.MatchString(path) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// return unique file
|
||||
func unique(files []string) []string {
|
||||
encountered := map[string]bool{}
|
||||
for v := range files {
|
||||
encountered[files[v]] = true
|
||||
}
|
||||
|
||||
result := []string{}
|
||||
for key := range encountered {
|
||||
result = append(result, key)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 3 {
|
||||
// Args should have the program name on `0`
|
||||
// and the file name on `1`
|
||||
fmt.Println("Wrong number of args; Usage is:\n ./go-analyze -modpath=module-path file_name.go")
|
||||
os.Exit(1)
|
||||
}
|
||||
fileName := os.Args[2]
|
||||
rf, err := ioutil.ReadFile(fileName)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
se := string(rf)
|
||||
|
||||
var files []string
|
||||
var relatedFiles []string
|
||||
|
||||
// Add entrypoint to watchlist
|
||||
relFileName, err := filepath.Rel(filepath.Dir(fileName), fileName)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
relatedFiles = append(relatedFiles, relFileName)
|
||||
|
||||
// looking for all go files that have export func
|
||||
// using in entrypoint
|
||||
err = filepath.Walk(filepath.Dir(fileName), visit(&files))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// looking related packages
|
||||
var modPath string
|
||||
flag.StringVar(&modPath, "modpath", "", "module path")
|
||||
flag.Parse()
|
||||
if len(modPath) > 1 {
|
||||
err = filepath.Walk(modPath, visit(&files))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
absFileName, _ := filepath.Abs(fileName)
|
||||
absFile, _ := filepath.Abs(file)
|
||||
// if it isn't entrypoint
|
||||
if absFileName != absFile {
|
||||
// find all export structs and functions
|
||||
pf := parse(file)
|
||||
var exportedDecl []string
|
||||
|
||||
ast.Inspect(pf, func(n ast.Node) bool {
|
||||
switch t := n.(type) {
|
||||
case *ast.FuncDecl:
|
||||
if t.Name.IsExported() {
|
||||
exportedDecl = append(exportedDecl, t.Name.Name)
|
||||
}
|
||||
// find variable declarations
|
||||
case *ast.TypeSpec:
|
||||
// which are public
|
||||
if t.Name.IsExported() {
|
||||
switch t.Type.(type) {
|
||||
// and are interfaces
|
||||
case *ast.StructType:
|
||||
exportedDecl = append(exportedDecl, t.Name.Name)
|
||||
}
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
for _, ed := range exportedDecl {
|
||||
if strings.Contains(se, ed) {
|
||||
// find relative path of related file
|
||||
var basePath string
|
||||
if modPath == "" {
|
||||
basePath = filepath.Dir(fileName)
|
||||
} else {
|
||||
basePath = modPath
|
||||
}
|
||||
|
||||
rel, err := filepath.Rel(basePath, file)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
relatedFiles = append(relatedFiles, rel)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
parsed := parse(fileName)
|
||||
offset := parsed.Pos()
|
||||
reqRep := "*http.Request http.ResponseWriter"
|
||||
|
||||
for _, decl := range parsed.Decls {
|
||||
fn, ok := decl.(*ast.FuncDecl)
|
||||
if !ok {
|
||||
// this declaration is not a function
|
||||
// so we're not interested
|
||||
continue
|
||||
}
|
||||
if fn.Name.IsExported() == true {
|
||||
// find a valid `net/http` handler function
|
||||
for _, param := range fn.Type.Params.List {
|
||||
if strings.Contains(reqRep, string(rf[param.Type.Pos()-offset:param.Type.End()-offset])) {
|
||||
// we found the first exported function with `net/http`
|
||||
// we're done!
|
||||
analyzed := analyze{
|
||||
PackageName: parsed.Name.Name,
|
||||
FuncName: fn.Name.Name,
|
||||
Watch: unique(relatedFiles),
|
||||
}
|
||||
analyzedJSON, _ := json.Marshal(analyzed)
|
||||
fmt.Print(string(analyzedJSON))
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"os"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 2 {
|
||||
// Args should have the program name on `0`
|
||||
// and the file name on `1`
|
||||
fmt.Println("Wrong number of args; Usage is:\n ./get-exported-function-name file_name.go")
|
||||
os.Exit(1)
|
||||
}
|
||||
fileName := os.Args[1]
|
||||
fset := token.NewFileSet()
|
||||
|
||||
parsed, err := parser.ParseFile(fset, fileName, nil, parser.ParseComments)
|
||||
if err != nil {
|
||||
fmt.Printf("Could not parse Go file \"%s\"\n", fileName)
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for _, decl := range parsed.Decls {
|
||||
fn, ok := decl.(*ast.FuncDecl)
|
||||
if !ok {
|
||||
// this declaraction is not a function
|
||||
// so we're not interested
|
||||
continue
|
||||
}
|
||||
if fn.Name.IsExported() == true {
|
||||
// we found the first exported function
|
||||
// we're done!
|
||||
fmt.Print(fn.Name.Name, ",", parsed.Name.Name)
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
const { join } = require('path');
|
||||
const { downloadGo } = require('../go-helpers');
|
||||
|
||||
async function main() {
|
||||
// First download the `go` binary for this platform/arch.
|
||||
const go = await downloadGo();
|
||||
|
||||
// Build the `get-exported-function-name` helper program.
|
||||
// `go get` is not necessary because the program has no external deps.
|
||||
const src = join(__dirname, 'get-exported-function-name.go');
|
||||
const dest = join(__dirname, '../get-exported-function-name');
|
||||
await go.build({ src, dest });
|
||||
}
|
||||
|
||||
main().catch((err) => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
const FileBlob = require('@now/build-utils/file-blob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { FileBlob, shouldServe } = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { minify } = require('html-minifier');
|
||||
|
||||
const defaultOptions = {
|
||||
@@ -28,3 +28,5 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
|
||||
return { [entrypoint]: result };
|
||||
};
|
||||
|
||||
exports.shouldServe = shouldServe;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/html-minifier",
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.3",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { Lambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const streamToBuffer = require('@now/build-utils/fs/stream-to-buffer.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { shouldServe } = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
if (!files[entrypoint]) throw new Error('Entrypoint not found in files');
|
||||
@@ -10,3 +11,5 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
const lambda = new Lambda({ zipBuffer, handler, runtime });
|
||||
return { [entrypoint]: lambda };
|
||||
};
|
||||
|
||||
exports.shouldServe = shouldServe;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/lambda",
|
||||
"version": "0.5.0",
|
||||
"version": "0.5.4",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,11 +1,22 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } },
|
||||
{ "src": "subdirectory/index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } }
|
||||
{
|
||||
"src": "index.zip",
|
||||
"use": "@now/lambda",
|
||||
"config": { "handler": "index.handler", "runtime": "nodejs8.10" }
|
||||
},
|
||||
{
|
||||
"src": "subdirectory/index.zip",
|
||||
"use": "@now/lambda",
|
||||
"config": { "handler": "index.handler", "runtime": "nodejs8.10" }
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:NO_REPLACE_TO_AVOID_CRC_MISMATCH" },
|
||||
{ "path": "/subdirectory/", "mustContain": "yoda:NO_REPLACE_TO_AVOID_CRC_MISMATCH" }
|
||||
{
|
||||
"path": "/subdirectory/",
|
||||
"mustContain": "yoda:NO_REPLACE_TO_AVOID_CRC_MISMATCH"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
1
packages/now-layer-node/.gitignore
vendored
Normal file
1
packages/now-layer-node/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/dist
|
||||
32
packages/now-layer-node/package.json
Normal file
32
packages/now-layer-node/package.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "@now/layer-node",
|
||||
"version": "0.0.2",
|
||||
"main": "./dist/src/index",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-layer-node"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "tsc && jest",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"fs-extra": "7.0.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"promisepipe": "3.0.0",
|
||||
"stream-to-promise": "2.2.0",
|
||||
"tar": "4.4.6",
|
||||
"yauzl-promise": "2.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tar": "4.0.0",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"typescript": "3.3.3"
|
||||
}
|
||||
}
|
||||
37
packages/now-layer-node/src/index.ts
Normal file
37
packages/now-layer-node/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { glob, Files } from '@now/build-utils';
|
||||
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||
import { install } from './install';
|
||||
|
||||
interface BuildLayerConfig {
|
||||
runtimeVersion: string;
|
||||
platform: string;
|
||||
arch: string;
|
||||
}
|
||||
|
||||
interface BuildLayerResult {
|
||||
files: Files;
|
||||
entrypoint: string;
|
||||
}
|
||||
|
||||
export async function buildLayer({
|
||||
runtimeVersion,
|
||||
platform,
|
||||
arch,
|
||||
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||
const dir = join(
|
||||
tmpdir(),
|
||||
`now-layer-node-${runtimeVersion}-${platform}-${arch}`
|
||||
);
|
||||
const exists = await pathExists(dir);
|
||||
if (exists) {
|
||||
await remove(dir);
|
||||
}
|
||||
await mkdir(dir);
|
||||
const { entrypoint } = await install(dir, runtimeVersion, platform, arch);
|
||||
const files = await glob('{bin/node,bin/node.exe,include/**}', {
|
||||
cwd: dir,
|
||||
});
|
||||
return { files, entrypoint };
|
||||
}
|
||||
68
packages/now-layer-node/src/install.ts
Normal file
68
packages/now-layer-node/src/install.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { basename, join } from 'path';
|
||||
import fetch from 'node-fetch';
|
||||
import { extract } from 'tar';
|
||||
import pipe from 'promisepipe';
|
||||
import { createWriteStream } from 'fs-extra';
|
||||
import { unzip, zipFromFile } from './unzip';
|
||||
|
||||
export async function install(
|
||||
dest: string,
|
||||
version: string,
|
||||
platform: string,
|
||||
arch: string
|
||||
) {
|
||||
const tarballUrl = getUrl(version, platform, arch);
|
||||
console.log('Downloading from ' + tarballUrl);
|
||||
console.log('Downloading to ' + dest);
|
||||
const res = await fetch(tarballUrl);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP request failed: ${res.status}`);
|
||||
}
|
||||
let entrypoint: string;
|
||||
if (platform === 'win32') {
|
||||
// Put it in the `bin` dir for consistency with the tarballs
|
||||
const finalDest = join(dest, 'bin');
|
||||
const zipName = basename(tarballUrl);
|
||||
const zipPath = join(dest, zipName);
|
||||
|
||||
await pipe(
|
||||
res.body,
|
||||
createWriteStream(zipPath)
|
||||
);
|
||||
|
||||
const zipFile = await zipFromFile(zipPath);
|
||||
await unzip(zipFile, finalDest, { strip: 1 });
|
||||
entrypoint = join('bin', 'node.exe');
|
||||
} else {
|
||||
const extractStream = extract({ strip: 1, C: dest });
|
||||
if (!extractStream.destroy) {
|
||||
// If there is an error in promisepipe,
|
||||
// it expects a destroy method
|
||||
extractStream.destroy = () => {};
|
||||
}
|
||||
await pipe(
|
||||
res.body,
|
||||
extractStream
|
||||
);
|
||||
entrypoint = join('bin', 'node');
|
||||
}
|
||||
|
||||
return { entrypoint };
|
||||
}
|
||||
|
||||
export function getUrl(
|
||||
version: string,
|
||||
platform: string = process.platform,
|
||||
arch: string = process.arch
|
||||
): string {
|
||||
let ext: string;
|
||||
let plat: string;
|
||||
if (platform === 'win32') {
|
||||
ext = 'zip';
|
||||
plat = 'win';
|
||||
} else {
|
||||
ext = 'tar.gz';
|
||||
plat = platform;
|
||||
}
|
||||
return `https://nodejs.org/dist/v${version}/node-v${version}-${plat}-${arch}.${ext}`;
|
||||
}
|
||||
96
packages/now-layer-node/src/unzip.ts
Normal file
96
packages/now-layer-node/src/unzip.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { tmpdir } from 'os';
|
||||
import pipe from 'promisepipe';
|
||||
import { dirname, join } from 'path';
|
||||
import { createWriteStream, mkdirp, symlink, unlink } from 'fs-extra';
|
||||
import streamToPromise from 'stream-to-promise';
|
||||
import {
|
||||
Entry,
|
||||
ZipFile,
|
||||
open as zipFromFile,
|
||||
fromBuffer as zipFromBuffer,
|
||||
} from 'yauzl-promise';
|
||||
|
||||
export { zipFromFile, zipFromBuffer, ZipFile };
|
||||
|
||||
export async function unzipToTemp(
|
||||
data: Buffer | string,
|
||||
tmpDir: string = tmpdir()
|
||||
): Promise<string> {
|
||||
const dir = join(
|
||||
tmpDir,
|
||||
`zeit-fun-${Math.random()
|
||||
.toString(16)
|
||||
.substring(2)}`
|
||||
);
|
||||
let zip: ZipFile;
|
||||
if (Buffer.isBuffer(data)) {
|
||||
zip = await zipFromBuffer(data);
|
||||
} else {
|
||||
zip = await zipFromFile(data);
|
||||
}
|
||||
await unzip(zip, dir);
|
||||
await zip.close();
|
||||
return dir;
|
||||
}
|
||||
|
||||
interface UnzipOptions {
|
||||
strip?: number;
|
||||
}
|
||||
|
||||
export async function unzip(
|
||||
zipFile: ZipFile,
|
||||
dir: string,
|
||||
opts: UnzipOptions = {}
|
||||
): Promise<void> {
|
||||
let entry: Entry;
|
||||
const strip = opts.strip || 0;
|
||||
while ((entry = await zipFile.readEntry()) !== null) {
|
||||
const fileName =
|
||||
strip === 0
|
||||
? entry.fileName
|
||||
: entry.fileName
|
||||
.split('/')
|
||||
.slice(strip)
|
||||
.join('/');
|
||||
const destPath = join(dir, fileName);
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
await mkdirp(destPath);
|
||||
} else {
|
||||
const [entryStream] = await Promise.all([
|
||||
entry.openReadStream(),
|
||||
// ensure parent directory exists
|
||||
mkdirp(dirname(destPath)),
|
||||
]);
|
||||
const mode = entry.externalFileAttributes >>> 16;
|
||||
if (isSymbolicLink(mode)) {
|
||||
const linkDest = String(await streamToPromise(entryStream));
|
||||
await symlink(linkDest, destPath);
|
||||
} else {
|
||||
const octal = mode & 4095 /* 07777 */;
|
||||
const modeOctal = ('0000' + octal.toString(8)).slice(-4);
|
||||
const modeVal = parseInt(modeOctal, 8);
|
||||
try {
|
||||
await unlink(destPath);
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const destStream = createWriteStream(destPath, {
|
||||
mode: modeVal,
|
||||
});
|
||||
await pipe(
|
||||
entryStream,
|
||||
destStream
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const S_IFMT = 61440; /* 0170000 type of file */
|
||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||
|
||||
export function isSymbolicLink(mode: number): boolean {
|
||||
return (mode & S_IFMT) === S_IFLNK;
|
||||
}
|
||||
54
packages/now-layer-node/test/test.js
Normal file
54
packages/now-layer-node/test/test.js
Normal file
@@ -0,0 +1,54 @@
|
||||
/* global jest, expect, it */
|
||||
jest.setTimeout(30 * 1000);
|
||||
const { buildLayer } = require('../');
|
||||
|
||||
describe('buildLayer', () => {
|
||||
it('should get node 10 and metadata for windows', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '10.16.0',
|
||||
platform: 'win32',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(entrypoint).toBe('bin/node.exe');
|
||||
expect(names.has('bin/node.exe')).toBeTruthy();
|
||||
expect(names.has('bin/npm.cmd')).toBeFalsy();
|
||||
expect(names.has('bin/npx.cmd')).toBeFalsy();
|
||||
expect(names.has('bin/node_modules')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get node 10 and metadata for macos', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '10.16.0',
|
||||
platform: 'darwin',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(entrypoint).toBe('bin/node');
|
||||
expect(names.has('bin/node')).toBeTruthy();
|
||||
expect(names.has('bin/npm')).toBeFalsy();
|
||||
expect(names.has('bin/npx')).toBeFalsy();
|
||||
expect(names.has('lib/node_modules')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get node 10 and metadata for linux', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '10.16.0',
|
||||
platform: 'linux',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(entrypoint).toBe('bin/node');
|
||||
expect(names.has('bin/node')).toBeTruthy();
|
||||
expect(names.has('include/node/node.h')).toBeTruthy();
|
||||
expect(names.has('bin/npm')).toBeFalsy();
|
||||
expect(names.has('bin/npx')).toBeFalsy();
|
||||
expect(names.has('lib/node_modules')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
18
packages/now-layer-node/tsconfig.json
Normal file
18
packages/now-layer-node/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
4
packages/now-layer-node/types/promisepipe.ts
Normal file
4
packages/now-layer-node/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module 'promisepipe' {
|
||||
import { Stream } from 'stream';
|
||||
export default function pipe(...args: Stream[]): Promise<void>;
|
||||
}
|
||||
6
packages/now-layer-node/types/stream-to-promise.ts
Normal file
6
packages/now-layer-node/types/stream-to-promise.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
declare module 'stream-to-promise' {
|
||||
import { Stream } from 'stream';
|
||||
export default function streamToPromise(
|
||||
stream: NodeJS.ReadableStream
|
||||
): Promise<string>;
|
||||
}
|
||||
1
packages/now-layer-npm/.gitignore
vendored
Normal file
1
packages/now-layer-npm/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/dist
|
||||
29
packages/now-layer-npm/package.json
Normal file
29
packages/now-layer-npm/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@now/layer-npm",
|
||||
"version": "0.0.2",
|
||||
"main": "./dist/src/index",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-layer-npm"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "tsc && jest",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"fs-extra": "7.0.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"promisepipe": "3.0.0",
|
||||
"tar": "4.4.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tar": "4.0.0",
|
||||
"typescript": "3.3.3"
|
||||
}
|
||||
}
|
||||
37
packages/now-layer-npm/src/index.ts
Normal file
37
packages/now-layer-npm/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { glob, Files } from '@now/build-utils';
|
||||
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||
import { install } from './install';
|
||||
|
||||
interface BuildLayerConfig {
|
||||
runtimeVersion: string;
|
||||
platform: string;
|
||||
arch: string;
|
||||
}
|
||||
|
||||
interface BuildLayerResult {
|
||||
files: Files;
|
||||
entrypoint: string;
|
||||
}
|
||||
|
||||
export async function buildLayer({
|
||||
runtimeVersion,
|
||||
platform,
|
||||
arch,
|
||||
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||
const dir = join(
|
||||
tmpdir(),
|
||||
`now-layer-npm-${runtimeVersion}-${platform}-${arch}`
|
||||
);
|
||||
const exists = await pathExists(dir);
|
||||
if (exists) {
|
||||
await remove(dir);
|
||||
}
|
||||
await mkdir(dir);
|
||||
const { entrypoint } = await install(dir, runtimeVersion);
|
||||
const files = await glob('{bin/**,lib/**,node_modules/**}', {
|
||||
cwd: dir,
|
||||
});
|
||||
return { files, entrypoint };
|
||||
}
|
||||
29
packages/now-layer-npm/src/install.ts
Normal file
29
packages/now-layer-npm/src/install.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { join } from 'path';
|
||||
import fetch from 'node-fetch';
|
||||
import { extract } from 'tar';
|
||||
import pipe from 'promisepipe';
|
||||
|
||||
export async function install(dest: string, version: string) {
|
||||
const tarballUrl = `https://registry.npmjs.org/npm/-/npm-${version}.tgz`;
|
||||
console.log('Downloading from ' + tarballUrl);
|
||||
console.log('Downloading to ' + dest);
|
||||
const res = await fetch(tarballUrl);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP request failed: ${res.status}`);
|
||||
}
|
||||
const extractStream = extract({ strip: 1, C: dest });
|
||||
if (!extractStream.destroy) {
|
||||
// If there is an error in promisepipe,
|
||||
// it expects a destroy method
|
||||
extractStream.destroy = () => {};
|
||||
}
|
||||
await pipe(
|
||||
res.body,
|
||||
extractStream
|
||||
);
|
||||
|
||||
const pathToManifest = join(dest, 'package.json');
|
||||
const manifest = require(pathToManifest);
|
||||
const entrypoint = manifest.bin.npm;
|
||||
return { entrypoint };
|
||||
}
|
||||
50
packages/now-layer-npm/test/test.js
Normal file
50
packages/now-layer-npm/test/test.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/* global jest, expect, it */
|
||||
jest.setTimeout(30 * 1000);
|
||||
const { buildLayer } = require('../');
|
||||
|
||||
describe('buildLayer', () => {
|
||||
it('should get npm 6 but not npm for windows', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '6.9.0',
|
||||
platform: 'win32',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/npm.cmd')).toBeTruthy();
|
||||
expect(names.has('bin/npx.cmd')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get npm 6 but not npm for macos', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '6.9.0',
|
||||
platform: 'darwin',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/npm')).toBeTruthy();
|
||||
expect(names.has('bin/npx')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get npm 6 but not npm for linux', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '6.9.0',
|
||||
platform: 'linux',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/npm')).toBeTruthy();
|
||||
expect(names.has('bin/npx')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
18
packages/now-layer-npm/tsconfig.json
Normal file
18
packages/now-layer-npm/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
4
packages/now-layer-npm/types/promisepipe.ts
Normal file
4
packages/now-layer-npm/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module 'promisepipe' {
|
||||
import { Stream } from 'stream';
|
||||
export default function pipe(...args: Stream[]): Promise<void>;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user