mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
95 Commits
@now/build
...
@now/pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
35d56a34cb | ||
|
|
9dfd37e135 | ||
|
|
6f815f2645 | ||
|
|
e186f89cfd | ||
|
|
50cade8bba | ||
|
|
13866e61f6 | ||
|
|
b72f902271 | ||
|
|
159cfe99dd | ||
|
|
1d9a96d104 | ||
|
|
245f846d3e | ||
|
|
c5ef7f3f35 | ||
|
|
ccba15a5aa | ||
|
|
f49aefa8e4 | ||
|
|
d6b36df4ce | ||
|
|
3e4dd10a79 | ||
|
|
73956706bd | ||
|
|
bd8da5360d | ||
|
|
6d5a2a4438 | ||
|
|
c88dc78e33 | ||
|
|
63ac11e9f7 | ||
|
|
1840632729 | ||
|
|
00d8eb0f65 | ||
|
|
3db58ac373 | ||
|
|
92a1720eea | ||
|
|
9abbfbe3f3 | ||
|
|
11ef8aa816 | ||
|
|
3a122ea950 | ||
|
|
737e50630a | ||
|
|
fb27b7b9be | ||
|
|
d1a4aecd2f | ||
|
|
5ef7014ed8 | ||
|
|
0ff2c9950e | ||
|
|
ddcdcdf3e2 | ||
|
|
bfc99f19d2 | ||
|
|
de2c08cfe8 | ||
|
|
9679f07124 | ||
|
|
6ce24d6a4e | ||
|
|
e3e029f5f6 | ||
|
|
89172a6e89 | ||
|
|
e8f1dbaa46 | ||
|
|
16b5b6fdf3 | ||
|
|
3bab29ff76 | ||
|
|
d675d2e668 | ||
|
|
2dda88e676 | ||
|
|
5a0090eb1f | ||
|
|
d438b4ec4e | ||
|
|
f8810fd7e6 | ||
|
|
a642cfea96 | ||
|
|
2daa20a9f2 | ||
|
|
4d5c0c40f0 | ||
|
|
29051681df | ||
|
|
96d5e81538 | ||
|
|
9ba9dd6949 | ||
|
|
b362d57270 | ||
|
|
4ff95e1718 | ||
|
|
ef02bedd4d | ||
|
|
ed68a09c3e | ||
|
|
ac7ae5fc5d | ||
|
|
9727b1f020 | ||
|
|
2dc454f15f | ||
|
|
4463af5c7a | ||
|
|
c00fb37cf6 | ||
|
|
4deb426f9c | ||
|
|
008b04413a | ||
|
|
f177ba46e9 | ||
|
|
c030fce589 | ||
|
|
50a5150bb5 | ||
|
|
0578ccf47e | ||
|
|
e32cd36ded | ||
|
|
6ac0ab121c | ||
|
|
05db2e6a73 | ||
|
|
0b89d30d6c | ||
|
|
8a021c9417 | ||
|
|
f218771382 | ||
|
|
17309291ed | ||
|
|
86300577ae | ||
|
|
f9594e0d61 | ||
|
|
20fd4b2e12 | ||
|
|
718e4d0e0c | ||
|
|
dc3584cd08 | ||
|
|
b41788b241 | ||
|
|
af9a2f9792 | ||
|
|
f8b8e760de | ||
|
|
93d6ec8024 | ||
|
|
7ed6b84056 | ||
|
|
31da488365 | ||
|
|
8eaf05f782 | ||
|
|
9311e90f27 | ||
|
|
c0de970de2 | ||
|
|
465ac2093d | ||
|
|
19ab0e8698 | ||
|
|
02fa98e5e3 | ||
|
|
4aef9d48b0 | ||
|
|
bd2d05344e | ||
|
|
edc7696623 |
@@ -29,14 +29,8 @@ jobs:
|
||||
- run:
|
||||
name: Tests and Coverage
|
||||
command: yarn test-coverage
|
||||
- run:
|
||||
name: Potentially save npm token
|
||||
command: "([[ ! -z $NPM_TOKEN ]] && echo \"//registry.npmjs.org/:_authToken=$NPM_TOKEN\" >> ~/.npmrc) || echo \"Did not write npm token\""
|
||||
- run:
|
||||
name: Potentially publish releases to npm
|
||||
command: ./.circleci/publish.sh
|
||||
workflows:
|
||||
version: 2
|
||||
build-and-deploy:
|
||||
build-and-test:
|
||||
jobs:
|
||||
- build
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
if [ -z "$NPM_TOKEN" ]; then
|
||||
echo "NPM_TOKEN not found. Did you forget to assign the GitHub Action secret?"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
|
||||
if [ ! -e ~/.npmrc ]; then
|
||||
echo "~/.npmrc file does not exist, skipping publish"
|
||||
exit 0
|
||||
|
||||
43
.editorconfig
Normal file
43
.editorconfig
Normal file
@@ -0,0 +1,43 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
charset = utf-8
|
||||
trim_trailing_whitespace = true
|
||||
insert_final_newline = true
|
||||
|
||||
[{*.json,*.json.example,*.gyp,*.yml,*.yaml,*.workflow}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[{*.py,*.asm}]
|
||||
indent_style = space
|
||||
|
||||
[*.py]
|
||||
indent_size = 4
|
||||
|
||||
[*.asm]
|
||||
indent_size = 8
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
# Ideal settings - some plugins might support these
|
||||
[*.js,*.jsx,*.ts,*.tsx]
|
||||
quote_type = single
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.java,*.go,*.rs,*.php,*.ng,*.d,*.cs,*.swift}]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
tab_width = 4
|
||||
|
||||
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.tsx,*.d,*.cs,*.swift}]
|
||||
curly_bracket_next_line = false
|
||||
spaces_around_operators = true
|
||||
spaces_around_brackets = outside
|
||||
# close enough to 1TB
|
||||
indent_brace_style = K&R
|
||||
@@ -3,9 +3,15 @@
|
||||
/**/node_modules/*
|
||||
/packages/now-go/go/*
|
||||
/packages/now-build-utils/dist/*
|
||||
/packages/now-build-utils/src/*.js
|
||||
/packages/now-build-utils/src/fs/*.js
|
||||
/packages/now-node/dist/*
|
||||
/packages/now-layer-node/dist/*
|
||||
/packages/now-layer-npm/dist/*
|
||||
/packages/now-layer-yarn/dist/*
|
||||
/packages/now-next/dist/*
|
||||
/packages/now-node-bridge/*
|
||||
/packages/now-python/*
|
||||
/packages/now-python/dist/*
|
||||
/packages/now-optipng/dist/*
|
||||
/packages/now-go/*
|
||||
/packages/now-rust/dist/*
|
||||
|
||||
9
.github/CODEOWNERS
vendored
Normal file
9
.github/CODEOWNERS
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
# Documentation
|
||||
# https://help.github.com/en/articles/about-code-owners
|
||||
|
||||
* @styfle
|
||||
/packages/now-node @styfle @tootallnate
|
||||
/packages/now-next @timer @dav-is
|
||||
/packages/now-go @styfle @sophearak
|
||||
/packages/now-python @styfle @sophearak
|
||||
/packages/now-rust @styfle @mike-engel @anmonteiro
|
||||
76
.github/main.workflow
vendored
Normal file
76
.github/main.workflow
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
workflow "Canary publish" {
|
||||
on = "push"
|
||||
resolves = ["3. Canary yarn run publish"]
|
||||
}
|
||||
|
||||
action "0. Canary filter" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
args = "branch canary"
|
||||
}
|
||||
|
||||
action "0. Canary PR not deleted" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
needs = ["0. Canary filter"]
|
||||
args = "not deleted"
|
||||
}
|
||||
|
||||
action "1. Canary yarn install" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["0. Canary PR not deleted"]
|
||||
runs = "yarn"
|
||||
args = "install"
|
||||
}
|
||||
|
||||
action "2. Canary yarn run build" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["1. Canary yarn install"]
|
||||
runs = "yarn"
|
||||
args = "run build"
|
||||
}
|
||||
|
||||
action "3. Canary yarn run publish" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["2. Canary yarn run build"]
|
||||
runs = "yarn"
|
||||
args = "run publish-from-github"
|
||||
secrets = ["NPM_TOKEN"]
|
||||
}
|
||||
|
||||
|
||||
workflow "Master publish" {
|
||||
on = "push"
|
||||
resolves = ["3. Master yarn run publish"]
|
||||
}
|
||||
|
||||
action "0. Master filter" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
args = "branch master"
|
||||
}
|
||||
|
||||
action "0. Master PR not deleted" {
|
||||
uses = "actions/bin/filter@3c0b4f0e63ea54ea5df2914b4fabf383368cd0da"
|
||||
needs = ["0. Master filter"]
|
||||
args = "not deleted"
|
||||
}
|
||||
|
||||
action "1. Master yarn install" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["0. Master PR not deleted"]
|
||||
runs = "yarn"
|
||||
args = "install"
|
||||
}
|
||||
|
||||
action "2. Master yarn run build" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["1. Master yarn install"]
|
||||
runs = "yarn"
|
||||
args = "run build"
|
||||
}
|
||||
|
||||
action "3. Master yarn run publish" {
|
||||
uses = "actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680"
|
||||
needs = ["2. Master yarn run build"]
|
||||
runs = "yarn"
|
||||
args = "run publish-from-github"
|
||||
secrets = ["NPM_TOKEN"]
|
||||
}
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -1,4 +1,6 @@
|
||||
node_modules
|
||||
tmp
|
||||
target/
|
||||
.next
|
||||
.next
|
||||
coverage
|
||||
*.tgz
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
44
README.md
44
README.md
@@ -1,27 +1,43 @@
|
||||
# now-builders
|
||||
|
||||
This is the full list of official Builders provided by the ZEIT team.
|
||||
This is a monorepo containing the [Official Builders](https://zeit.co/docs/v2/deployments/builders/overview) provided by the ZEIT team.
|
||||
|
||||
More details here: https://zeit.co/docs/v2/deployments/builders/overview/
|
||||
## Channels
|
||||
|
||||
There are two Channels:
|
||||
|
||||
| Channel | Git Branch | npm dist-tag | use example |
|
||||
| ------- | ---------- | ------------ | ------------------ |
|
||||
| Canary | `canary` | `@canary` | `@now/node@canary` |
|
||||
| Stable | `master` | `@latest` | `@now/node@latest` |
|
||||
|
||||
All PRs should be submitted to the `canary` branch.
|
||||
|
||||
Once a PR is merged into the `canary` branch, it should be published to npm immediately using the Canary Channel.
|
||||
|
||||
### Publishing to npm
|
||||
|
||||
Run the following command to publish modified builders to npm:
|
||||
|
||||
For the stable channel use:
|
||||
|
||||
```
|
||||
yarn publish-stable
|
||||
```
|
||||
|
||||
For the canary channel use:
|
||||
For the Canary Channel, publish the modified Builders to npm with the following:
|
||||
|
||||
```
|
||||
yarn publish-canary
|
||||
```
|
||||
|
||||
CircleCI will take care of publishing the updated packages to npm from there.
|
||||
For the Stable Channel, you must cherry pick each commit from canary to master and then deploy the modified Builders:
|
||||
|
||||
If for some reason CircleCI fails to publish the npm package, you may do so
|
||||
```
|
||||
git checkout master
|
||||
git pull # make sure you're up to date
|
||||
git cherry-pick <PR501_COMMIT_SHA>
|
||||
git cherry-pick <PR502_COMMIT_SHA>
|
||||
git cherry-pick <PR503_COMMIT_SHA>
|
||||
git cherry-pick <PR504_COMMIT_SHA>
|
||||
# ... etc ...
|
||||
yarn publish-stable
|
||||
```
|
||||
|
||||
After running this publish step, GitHub Actions will take care of publishing the modified Builder packages to npm.
|
||||
|
||||
If for some reason GitHub Actions fails to publish the npm package, you may do so
|
||||
manually by running `npm publish` from the package directory. Make sure to
|
||||
include the `--tag canary` parameter if you are publishing a canary release!
|
||||
use `npm publish --tag canary` if you are publishing a canary release!
|
||||
|
||||
@@ -29,12 +29,11 @@ Serverless:
|
||||
- No runtime dependencies, meaning smaller lambda functions
|
||||
- Optimized for fast [cold start](https://zeit.co/blog/serverless-ssr#cold-start)
|
||||
|
||||
|
||||
#### Possible Ways to Fix It
|
||||
|
||||
In order to create the smallest possible lambdas Next.js has to be configured to build for the `serverless` target.
|
||||
|
||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||
1. Serverless Next.js requires Next.js 8 or later, to upgrade you can install the `latest` version:
|
||||
|
||||
```
|
||||
npm install next --save
|
||||
@@ -46,7 +45,7 @@ npm install next --save
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -54,9 +53,9 @@ npm install next --save
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
target: 'serverless'
|
||||
target: 'serverless',
|
||||
// Other options are still valid
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||
@@ -70,4 +69,4 @@ module.exports = {
|
||||
|
||||
### Useful Links
|
||||
|
||||
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||
- [Serverless target implementation](https://github.com/zeit/now-builders/pull/150)
|
||||
|
||||
@@ -20,7 +20,7 @@ npm install next --save
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
@@ -28,9 +28,9 @@ npm install next --save
|
||||
|
||||
```js
|
||||
module.exports = {
|
||||
target: 'serverless'
|
||||
target: 'serverless',
|
||||
// Other options
|
||||
}
|
||||
};
|
||||
```
|
||||
|
||||
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||
|
||||
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
38
errors/now-static-build-failed-to-detect-a-server.md
Normal file
@@ -0,0 +1,38 @@
|
||||
# `@now/static-build` Failed to detect a server running
|
||||
|
||||
#### Why This Warning Occurred
|
||||
|
||||
When running `now dev`, the `@now/static-build` builder proxies relevant HTTP
|
||||
requests to the server that is created by the `now-dev` script in the
|
||||
`package.json` file.
|
||||
|
||||
In order for `now dev` to know which port the server is running on, the builder
|
||||
is provided a `$PORT` environment variable that the server _must_ bind to. The
|
||||
error "Failed to detect a server running on port" is printed if the builder fails
|
||||
to detect a server listening on that specific port within five minutes.
|
||||
|
||||
#### Possible Ways to Fix It
|
||||
|
||||
Please ensure that your `now-dev` script binds the spawned development server on
|
||||
the provided `$PORT` that the builder expects the server to bind to.
|
||||
|
||||
For example, if you are using Gatsby, your `now-dev` script must use the `-p`
|
||||
(port) option to bind to the `$PORT` specified from the builder:
|
||||
|
||||
```
|
||||
{
|
||||
...
|
||||
"scripts": {
|
||||
...
|
||||
"now-dev": "gatsby develop -p $PORT"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Consult your static builder program's `--help` or documentation to figure out what
|
||||
the command line flag to bind to a specific port is (in many cases, it is one of:
|
||||
`-p` / `-P` / `--port`).
|
||||
|
||||
### Useful Links
|
||||
|
||||
- [`@now/static-build` Local Development Documentation](https://zeit.co/docs/v2/deployments/official-builders/static-build-now-static-build#local-development)
|
||||
@@ -1,5 +1,37 @@
|
||||
const childProcess = require('child_process');
|
||||
const path = require('path');
|
||||
|
||||
const command = 'git diff HEAD~1 --name-only';
|
||||
const diff = childProcess.execSync(command).toString();
|
||||
|
||||
const changed = diff
|
||||
.split('\n')
|
||||
.filter(item => Boolean(item) && item.includes('packages/'))
|
||||
.map(item => path.relative('packages', item).split('/')[0]);
|
||||
|
||||
const matches = [];
|
||||
|
||||
if (changed.length > 0) {
|
||||
console.log('The following packages have changed:');
|
||||
|
||||
changed.map((item) => {
|
||||
matches.push(item);
|
||||
console.log(item);
|
||||
|
||||
return null;
|
||||
});
|
||||
} else {
|
||||
matches.push('now-node');
|
||||
console.log(`No packages changed, defaulting to ${matches[0]}`);
|
||||
}
|
||||
|
||||
const testMatch = Array.from(new Set(matches)).map(
|
||||
item => `**/${item}/**/?(*.)+(spec|test).[jt]s?(x)`,
|
||||
);
|
||||
|
||||
module.exports = {
|
||||
testEnvironment: 'node',
|
||||
testMatch,
|
||||
collectCoverageFrom: [
|
||||
'packages/(!test)/**/*.{js,jsx}',
|
||||
'!**/node_modules/**',
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"packages": [
|
||||
"packages/*"
|
||||
],
|
||||
"packages": ["packages/*"],
|
||||
"command": {
|
||||
"publish": {
|
||||
"npmClient": "npm",
|
||||
|
||||
15
package.json
15
package.json
@@ -12,8 +12,9 @@
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
"bootstrap": "lerna bootstrap",
|
||||
"publish-stable": "lerna version",
|
||||
"publish-canary": "lerna version prerelease --preid canary",
|
||||
"publish-stable": "git checkout master && git pull && lerna version",
|
||||
"publish-canary": "git checkout canary && git pull && lerna version prerelease --preid canary",
|
||||
"publish-from-github": "./.circleci/publish.sh",
|
||||
"build": "./.circleci/build.sh",
|
||||
"lint": "eslint .",
|
||||
"codecov": "codecov",
|
||||
@@ -31,6 +32,10 @@
|
||||
"*.ts": [
|
||||
"prettier --write",
|
||||
"git add"
|
||||
],
|
||||
"*.md": [
|
||||
"prettier --write",
|
||||
"git add"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -51,6 +56,10 @@
|
||||
"lint-staged": "^8.0.4",
|
||||
"node-fetch": "^2.3.0",
|
||||
"pre-commit": "^1.2.2",
|
||||
"prettier": "^1.15.2"
|
||||
"prettier": "1.17.1"
|
||||
},
|
||||
"prettier": {
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.5.4",
|
||||
"version": "0.5.6",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
|
||||
@@ -31,7 +31,10 @@ class FileFsRef implements File {
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
static async fromFsPath({ mode, fsPath }: FileFsRefOptions): Promise<FileFsRef> {
|
||||
static async fromFsPath({
|
||||
mode,
|
||||
fsPath,
|
||||
}: FileFsRefOptions): Promise<FileFsRef> {
|
||||
let m = mode;
|
||||
if (!m) {
|
||||
const stat = await fs.lstat(fsPath);
|
||||
@@ -40,7 +43,11 @@ class FileFsRef implements File {
|
||||
return new FileFsRef({ mode: m, fsPath });
|
||||
}
|
||||
|
||||
static async fromStream({ mode = 0o100644, stream, fsPath }: FromStreamOptions): Promise<FileFsRef> {
|
||||
static async fromStream({
|
||||
mode = 0o100644,
|
||||
stream,
|
||||
fsPath,
|
||||
}: FromStreamOptions): Promise<FileFsRef> {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
assert(typeof fsPath === 'string');
|
||||
@@ -48,7 +55,7 @@ class FileFsRef implements File {
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const dest = fs.createWriteStream(fsPath, {
|
||||
mode: mode & 0o777
|
||||
mode: mode & 0o777,
|
||||
});
|
||||
stream.pipe(dest);
|
||||
stream.on('error', reject);
|
||||
@@ -72,15 +79,15 @@ class FileFsRef implements File {
|
||||
let flag = false;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return multiStream((cb) => {
|
||||
return multiStream(cb => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
.then(stream => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
.catch(error => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,11 +4,11 @@ import { File, Files, Meta } from '../types';
|
||||
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
||||
|
||||
export interface DownloadedFiles {
|
||||
[filePath: string]: FileFsRef
|
||||
[filePath: string]: FileFsRef;
|
||||
}
|
||||
|
||||
const S_IFMT = 61440; /* 0170000 type of file */
|
||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||
const S_IFMT = 61440; /* 0170000 type of file */
|
||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||
|
||||
export function isSymbolicLink(mode: number): boolean {
|
||||
return (mode & S_IFMT) === S_IFLNK;
|
||||
@@ -17,9 +17,9 @@ export function isSymbolicLink(mode: number): boolean {
|
||||
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||
const { mode } = file;
|
||||
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
||||
const [ target ] = await Promise.all([
|
||||
const [target] = await Promise.all([
|
||||
readlink((file as FileFsRef).fsPath),
|
||||
mkdirp(path.dirname(fsPath))
|
||||
mkdirp(path.dirname(fsPath)),
|
||||
]);
|
||||
await symlink(target, fsPath);
|
||||
return FileFsRef.fromFsPath({ mode, fsPath });
|
||||
@@ -34,12 +34,25 @@ async function removeFile(basePath: string, fileMatched: string) {
|
||||
await remove(file);
|
||||
}
|
||||
|
||||
export default async function download(files: Files, basePath: string, meta?: Meta): Promise<DownloadedFiles> {
|
||||
export default async function download(
|
||||
files: Files,
|
||||
basePath: string,
|
||||
meta?: Meta
|
||||
): Promise<DownloadedFiles> {
|
||||
const { isDev = false, filesChanged = null, filesRemoved = null } =
|
||||
meta || {};
|
||||
|
||||
if (isDev) {
|
||||
// In `now dev`, the `download()` function is a no-op because
|
||||
// the `basePath` matches the `cwd` of the dev server, so the
|
||||
// source files are already available.
|
||||
return files as DownloadedFiles;
|
||||
}
|
||||
|
||||
const files2: DownloadedFiles = {};
|
||||
const { filesChanged = null, filesRemoved = null } = meta || {};
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(files).map(async (name) => {
|
||||
Object.keys(files).map(async name => {
|
||||
// If the file does not exist anymore, remove it.
|
||||
if (Array.isArray(filesRemoved) && filesRemoved.includes(name)) {
|
||||
await removeFile(basePath, name);
|
||||
@@ -55,7 +68,7 @@ export default async function download(files: Files, basePath: string, meta?: Me
|
||||
const fsPath = path.join(basePath, name);
|
||||
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}),
|
||||
})
|
||||
);
|
||||
|
||||
return files2;
|
||||
|
||||
@@ -8,12 +8,16 @@ import FileFsRef from '../file-fs-ref';
|
||||
type GlobOptions = vanillaGlob_.IOptions;
|
||||
|
||||
interface FsFiles {
|
||||
[filePath: string]: FileFsRef
|
||||
[filePath: string]: FileFsRef;
|
||||
}
|
||||
|
||||
const vanillaGlob = promisify(vanillaGlob_);
|
||||
|
||||
export default async function glob(pattern: string, opts: GlobOptions | string, mountpoint?: string): Promise<FsFiles> {
|
||||
export default async function glob(
|
||||
pattern: string,
|
||||
opts: GlobOptions | string,
|
||||
mountpoint?: string
|
||||
): Promise<FsFiles> {
|
||||
let options: GlobOptions;
|
||||
if (typeof opts === 'string') {
|
||||
options = { cwd: opts };
|
||||
@@ -23,7 +27,7 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
||||
|
||||
if (!options.cwd) {
|
||||
throw new Error(
|
||||
'Second argument (basePath) must be specified for names of resulting files',
|
||||
'Second argument (basePath) must be specified for names of resulting files'
|
||||
);
|
||||
}
|
||||
|
||||
@@ -41,11 +45,11 @@ export default async function glob(pattern: string, opts: GlobOptions | string,
|
||||
const files = await vanillaGlob(pattern, options);
|
||||
|
||||
for (const relativePath of files) {
|
||||
const fsPath = path.join(options.cwd!, relativePath);
|
||||
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||
assert(
|
||||
stat,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
|
||||
);
|
||||
if (stat.isFile()) {
|
||||
const isSymlink = options.symlinks![fsPath];
|
||||
|
||||
@@ -7,6 +7,6 @@ export default function rename(files: Files, delegate: Delegate): Files {
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}),
|
||||
{},
|
||||
{}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -109,7 +109,7 @@ export async function installDependencies(
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync(
|
||||
'npm',
|
||||
['install'].concat(commandArgs),
|
||||
['install', '--unsafe-perm'].concat(commandArgs),
|
||||
destPath,
|
||||
opts as SpawnOptions
|
||||
);
|
||||
|
||||
@@ -1,26 +1,28 @@
|
||||
import eos from 'end-of-stream';
|
||||
|
||||
export default function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
export default function streamToBuffer(
|
||||
stream: NodeJS.ReadableStream
|
||||
): Promise<Buffer> {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const buffers: Buffer[] = [];
|
||||
|
||||
stream.on('data', buffers.push.bind(buffers))
|
||||
stream.on('data', buffers.push.bind(buffers));
|
||||
|
||||
eos(stream, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
resolve(Buffer.allocUnsafe(0));
|
||||
break;
|
||||
case 1:
|
||||
resolve(buffers[0]);
|
||||
break;
|
||||
default:
|
||||
resolve(Buffer.concat(buffers));
|
||||
}
|
||||
eos(stream, err => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
resolve(Buffer.allocUnsafe(0));
|
||||
break;
|
||||
case 1:
|
||||
resolve(buffers[0]);
|
||||
break;
|
||||
default:
|
||||
resolve(Buffer.concat(buffers));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,51 @@
|
||||
import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { File, Files, AnalyzeOptions, BuildOptions, PrepareCacheOptions, ShouldServeOptions, Meta } from './types';
|
||||
import {
|
||||
File,
|
||||
Files,
|
||||
AnalyzeOptions,
|
||||
BuildOptions,
|
||||
PrepareCacheOptions,
|
||||
ShouldServeOptions,
|
||||
Meta,
|
||||
} from './types';
|
||||
import { Lambda, createLambda } from './lambda';
|
||||
import download from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory'
|
||||
import download, { DownloadedFiles } from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory';
|
||||
import glob from './fs/glob';
|
||||
import rename from './fs/rename';
|
||||
import { installDependencies, runPackageJsonScript, runNpmInstall, runShellScript } from './fs/run-user-scripts';
|
||||
import {
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runShellScript,
|
||||
} from './fs/run-user-scripts';
|
||||
import streamToBuffer from './fs/stream-to-buffer';
|
||||
import shouldServe from './should-serve';
|
||||
|
||||
export {
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
FileRef,
|
||||
Files,
|
||||
File,
|
||||
Meta,
|
||||
Lambda,
|
||||
createLambda,
|
||||
download,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
rename,
|
||||
installDependencies, runPackageJsonScript, runNpmInstall, runShellScript,
|
||||
streamToBuffer,
|
||||
AnalyzeOptions,
|
||||
BuildOptions,
|
||||
PrepareCacheOptions,
|
||||
ShouldServeOptions,
|
||||
shouldServe,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
FileRef,
|
||||
Files,
|
||||
File,
|
||||
Meta,
|
||||
Lambda,
|
||||
createLambda,
|
||||
download,
|
||||
DownloadedFiles,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
rename,
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runShellScript,
|
||||
streamToBuffer,
|
||||
AnalyzeOptions,
|
||||
BuildOptions,
|
||||
PrepareCacheOptions,
|
||||
ShouldServeOptions,
|
||||
shouldServe,
|
||||
};
|
||||
|
||||
@@ -32,9 +32,7 @@ export class Lambda {
|
||||
public runtime: string;
|
||||
public environment: Environment;
|
||||
|
||||
constructor({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
}: LambdaOptions) {
|
||||
constructor({ zipBuffer, handler, runtime, environment }: LambdaOptions) {
|
||||
this.type = 'Lambda';
|
||||
this.zipBuffer = zipBuffer;
|
||||
this.handler = handler;
|
||||
@@ -47,7 +45,10 @@ const sema = new Sema(10);
|
||||
const mtime = new Date(1540000000000);
|
||||
|
||||
export async function createLambda({
|
||||
files, handler, runtime, environment = {},
|
||||
files,
|
||||
handler,
|
||||
runtime,
|
||||
environment = {},
|
||||
}: CreateLambdaOptions): Promise<Lambda> {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
assert(typeof handler === 'string', '"handler" is not a string');
|
||||
@@ -97,7 +98,9 @@ export async function createZip(files: Files): Promise<Buffer> {
|
||||
}
|
||||
|
||||
zipFile.end();
|
||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
||||
streamToBuffer(zipFile.outputStream)
|
||||
.then(resolve)
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
return zipBuffer;
|
||||
|
||||
@@ -5,7 +5,7 @@ import FileFsRef from './file-fs-ref';
|
||||
export default function shouldServe({
|
||||
entrypoint,
|
||||
files,
|
||||
requestPath
|
||||
requestPath,
|
||||
}: ShouldServeOptions): boolean {
|
||||
requestPath = requestPath.replace(/\/$/, ''); // sanitize trailing '/'
|
||||
entrypoint = entrypoint.replace(/\\/, '/'); // windows compatibility
|
||||
@@ -23,5 +23,5 @@ export default function shouldServe({
|
||||
}
|
||||
|
||||
function hasProp(obj: { [path: string]: FileFsRef }, key: string): boolean {
|
||||
return Object.hasOwnProperty.call(obj, key)
|
||||
return Object.hasOwnProperty.call(obj, key);
|
||||
}
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "api/index.js", "use": "@now/node" }
|
||||
],
|
||||
"builds": [{ "src": "api/index.js", "use": "@now/node" }],
|
||||
"probes": [
|
||||
{ "path": "/api/index.js", "mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER" }
|
||||
{
|
||||
"path": "/api/index.js",
|
||||
"mustContain": "cross-cow:RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.js", "use": "@now/node", "config": { "maxLambdaSize": "15mb" } }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "found:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{
|
||||
"src": "index.js",
|
||||
"use": "@now/node",
|
||||
"config": { "maxLambdaSize": "15mb" }
|
||||
}
|
||||
],
|
||||
"probes": [{ "path": "/", "mustContain": "found:RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
|
||||
@@ -15,10 +15,6 @@
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
|
||||
@@ -9,12 +9,13 @@ const { shouldServe } = require('@now/build-utils'); // eslint-disable-line impo
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
exports.build = async ({
|
||||
workPath, files, entrypoint, meta,
|
||||
}) => {
|
||||
console.log('downloading files...');
|
||||
const srcDir = await getWritableDirectory();
|
||||
const outDir = await getWritableDirectory();
|
||||
|
||||
await download(files, srcDir);
|
||||
await download(files, workPath, meta);
|
||||
|
||||
const handlerPath = path.join(__dirname, 'handler');
|
||||
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
||||
@@ -24,7 +25,7 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
|
||||
// For now only the entrypoint file is copied into the lambda
|
||||
await copyFile(
|
||||
path.join(srcDir, entrypoint),
|
||||
path.join(workPath, entrypoint),
|
||||
path.join(outDir, entrypoint),
|
||||
);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/cgi",
|
||||
"version": "0.1.3",
|
||||
"version": "0.1.4",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -23,7 +23,7 @@ const getGoUrl = (version: string, platform: string, arch: string) => {
|
||||
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||
};
|
||||
|
||||
export async function getAnalyzedEntrypoint(filePath: string) {
|
||||
export async function getAnalyzedEntrypoint(filePath: string, modulePath = '') {
|
||||
debug('Analyzing entrypoint %o', filePath);
|
||||
const bin = join(__dirname, 'analyze');
|
||||
|
||||
@@ -35,7 +35,8 @@ export async function getAnalyzedEntrypoint(filePath: string) {
|
||||
await go.build(src, dest);
|
||||
}
|
||||
|
||||
const args = [filePath];
|
||||
const args = [`-modpath=${modulePath}`, filePath];
|
||||
|
||||
const analyzed = await execa.stdout(bin, args);
|
||||
debug('Analyzed entrypoint %o', analyzed);
|
||||
return analyzed;
|
||||
@@ -125,7 +126,15 @@ export async function downloadGo(
|
||||
// If we found GOPATH in ENV, or default `Go` path exists
|
||||
// asssume that user have `Go` installed
|
||||
if (isUserGo || process.env.GOPATH !== undefined) {
|
||||
return createGo(dir, platform, arch);
|
||||
const { stdout } = await execa('go', ['version']);
|
||||
|
||||
if (parseInt(stdout.split('.')[1]) >= 11) {
|
||||
return createGo(dir, platform, arch);
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
`Your current ${stdout} doesn't support Go Modules. Please update.`
|
||||
);
|
||||
} else {
|
||||
// Check `Go` bin in builder CWD
|
||||
const isGoExist = await pathExists(join(dir, 'bin'));
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
import { join, sep, dirname } from 'path';
|
||||
import { join, sep, dirname, basename } from 'path';
|
||||
import { readFile, writeFile, pathExists, move } from 'fs-extra';
|
||||
import { homedir } from 'os';
|
||||
import execa from 'execa';
|
||||
|
||||
import {
|
||||
glob,
|
||||
@@ -14,6 +16,7 @@ import {
|
||||
import { createGo, getAnalyzedEntrypoint } from './go-helpers';
|
||||
|
||||
interface Analyzed {
|
||||
found?: boolean;
|
||||
packageName: string;
|
||||
functionName: string;
|
||||
watch: string[];
|
||||
@@ -28,6 +31,18 @@ interface BuildParamsType extends BuildOptions {
|
||||
meta: BuildParamsMeta;
|
||||
}
|
||||
|
||||
// Initialize private git repo for Go Modules
|
||||
async function initPrivateGit(credentials: string) {
|
||||
await execa('git', [
|
||||
'config',
|
||||
'--global',
|
||||
'credential.helper',
|
||||
`store --file ${join(homedir(), '.git-credentials')}`,
|
||||
]);
|
||||
|
||||
await writeFile(join(homedir(), '.git-credentials'), credentials);
|
||||
}
|
||||
|
||||
export const version = 2;
|
||||
|
||||
export const config = {
|
||||
@@ -38,8 +53,14 @@ export async function build({
|
||||
files,
|
||||
entrypoint,
|
||||
config,
|
||||
workPath,
|
||||
meta = {} as BuildParamsMeta,
|
||||
}: BuildParamsType) {
|
||||
if (process.env.GIT_CREDENTIALS && !meta.isDev) {
|
||||
console.log('Initialize Git credentials...');
|
||||
await initPrivateGit(process.env.GIT_CREDENTIALS);
|
||||
}
|
||||
|
||||
console.log('Downloading user files...');
|
||||
const entrypointArr = entrypoint.split(sep);
|
||||
|
||||
@@ -48,17 +69,82 @@ export async function build({
|
||||
getWriteableDirectory(),
|
||||
]);
|
||||
|
||||
const srcPath = join(goPath, 'src', 'lambda');
|
||||
let downloadedFiles;
|
||||
if (meta.isDev) {
|
||||
const devGoPath = `dev${entrypointArr[entrypointArr.length - 1]}`;
|
||||
const goPathArr = goPath.split(sep);
|
||||
goPathArr.pop();
|
||||
goPathArr.push(devGoPath);
|
||||
goPath = goPathArr.join(sep);
|
||||
downloadedFiles = await download(files, workPath, meta);
|
||||
} else {
|
||||
downloadedFiles = await download(files, srcPath);
|
||||
}
|
||||
|
||||
const srcPath = join(goPath, 'src', 'lambda');
|
||||
const downloadedFiles = await download(files, srcPath);
|
||||
const input = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
console.log(`Parsing AST for "${entrypoint}"`);
|
||||
let analyzed: string;
|
||||
try {
|
||||
let goModAbsPathDir = '';
|
||||
for (const file of Object.keys(downloadedFiles)) {
|
||||
if (file === 'go.mod') {
|
||||
goModAbsPathDir = dirname(downloadedFiles[file].fsPath);
|
||||
}
|
||||
}
|
||||
analyzed = await getAnalyzedEntrypoint(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
goModAbsPathDir
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!analyzed) {
|
||||
const err = new Error(
|
||||
`Could not find an exported function in "${entrypoint}"
|
||||
Learn more: https://zeit.co/docs/v2/deployments/official-builders/go-now-go/#entrypoint
|
||||
`
|
||||
);
|
||||
console.log(err.message);
|
||||
throw err;
|
||||
}
|
||||
|
||||
const parsedAnalyzed = JSON.parse(analyzed) as Analyzed;
|
||||
|
||||
if (meta.isDev) {
|
||||
const base = dirname(downloadedFiles['now.json'].fsPath);
|
||||
const destNow = join(
|
||||
base,
|
||||
'.now',
|
||||
'cache',
|
||||
basename(entrypoint, '.go'),
|
||||
'src',
|
||||
'lambda'
|
||||
);
|
||||
// this will ensure Go rebuilt fast
|
||||
goPath = join(base, '.now', 'cache', basename(entrypoint, '.go'));
|
||||
await download(downloadedFiles, destNow);
|
||||
|
||||
downloadedFiles = await glob('**', destNow);
|
||||
}
|
||||
|
||||
// find `go.mod` in downloadedFiles
|
||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
let isGoModExist = false;
|
||||
let goModPath = '';
|
||||
let goModPathArr: string[] = [];
|
||||
for (const file of Object.keys(downloadedFiles)) {
|
||||
const fileDirname = dirname(downloadedFiles[file].fsPath);
|
||||
if (file === 'go.mod') {
|
||||
isGoModExist = true;
|
||||
goModPath = fileDirname;
|
||||
goModPathArr = goModPath.split(sep);
|
||||
} else if (file.includes('go.mod')) {
|
||||
isGoModExist = true;
|
||||
if (entrypointDirname === fileDirname) {
|
||||
goModPath = fileDirname;
|
||||
goModPathArr = goModPath.split(sep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const input = entrypointDirname;
|
||||
var includedFiles: Files = {};
|
||||
|
||||
if (config && config.includeFiles) {
|
||||
@@ -70,37 +156,19 @@ export async function build({
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Parsing AST for "${entrypoint}"`);
|
||||
let analyzed: string;
|
||||
try {
|
||||
analyzed = await getAnalyzedEntrypoint(downloadedFiles[entrypoint].fsPath);
|
||||
} catch (err) {
|
||||
console.log(`Failed to parse AST for "${entrypoint}"`);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!analyzed) {
|
||||
const err = new Error(
|
||||
`Could not find an exported function in "${entrypoint}"`
|
||||
);
|
||||
console.log(err.message);
|
||||
throw err;
|
||||
}
|
||||
|
||||
const parsedAnalyzed = JSON.parse(analyzed) as Analyzed;
|
||||
|
||||
const handlerFunctionName = parsedAnalyzed.functionName;
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`
|
||||
);
|
||||
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
// check if package name other than main
|
||||
// using `go.mod` way building the handler
|
||||
const packageName = parsedAnalyzed.packageName;
|
||||
const isGoModExist = await pathExists(join(entrypointDirname, 'go.mod'));
|
||||
|
||||
if (isGoModExist && packageName === 'main') {
|
||||
throw new Error('Please change `package main` to `package handler`');
|
||||
}
|
||||
|
||||
if (packageName !== 'main') {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
@@ -132,10 +200,7 @@ export async function build({
|
||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||
|
||||
if (isGoModExist) {
|
||||
const goModContents = await readFile(
|
||||
join(entrypointDirname, 'go.mod'),
|
||||
'utf8'
|
||||
);
|
||||
const goModContents = await readFile(join(goModPath, 'go.mod'), 'utf8');
|
||||
const usrModName = goModContents.split('\n')[0].split(' ')[1];
|
||||
goPackageName = `${usrModName}/${packageName}`;
|
||||
}
|
||||
@@ -144,11 +209,16 @@ export async function build({
|
||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||
|
||||
// write main__mod__.go
|
||||
await writeFile(
|
||||
join(entrypointDirname, mainModGoFileName),
|
||||
mainModGoContents
|
||||
);
|
||||
if (goModPathArr.length > 1) {
|
||||
// using `go.mod` path to write main__mod__.go
|
||||
await writeFile(join(goModPath, mainModGoFileName), mainModGoContents);
|
||||
} else {
|
||||
// using `entrypointDirname` to write main__mod__.go
|
||||
await writeFile(
|
||||
join(entrypointDirname, mainModGoFileName),
|
||||
mainModGoContents
|
||||
);
|
||||
}
|
||||
|
||||
// move user go file to folder
|
||||
try {
|
||||
@@ -169,25 +239,34 @@ export async function build({
|
||||
);
|
||||
}
|
||||
|
||||
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
||||
overwrite: forceMove,
|
||||
});
|
||||
if (
|
||||
dirname(downloadedFiles[entrypoint].fsPath) === goModPath ||
|
||||
!isGoModExist
|
||||
) {
|
||||
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
||||
overwrite: forceMove,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('failed to move entry to package folder');
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (meta.isDev) {
|
||||
const isGoModBk = await pathExists(join(entrypointDirname, 'go.mod.bk'));
|
||||
let entrypointDir = entrypointDirname;
|
||||
if (goModPathArr.length > 1) {
|
||||
entrypointDir = goModPath;
|
||||
}
|
||||
const isGoModBk = await pathExists(join(entrypointDir, 'go.mod.bk'));
|
||||
if (isGoModBk) {
|
||||
await move(
|
||||
join(entrypointDirname, 'go.mod.bk'),
|
||||
join(entrypointDirname, 'go.mod'),
|
||||
join(entrypointDir, 'go.mod.bk'),
|
||||
join(entrypointDir, 'go.mod'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
await move(
|
||||
join(entrypointDirname, 'go.sum.bk'),
|
||||
join(entrypointDirname, 'go.sum'),
|
||||
join(entrypointDir, 'go.sum.bk'),
|
||||
join(entrypointDir, 'go.sum'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
}
|
||||
@@ -204,8 +283,11 @@ export async function build({
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
const isGoModInRootDir = goModPathArr.length === 1;
|
||||
const baseGoModPath = isGoModInRootDir ? entrypointDirname : goModPath;
|
||||
try {
|
||||
const src = [join(entrypointDirname, mainModGoFileName)];
|
||||
let src = [join(baseGoModPath, mainModGoFileName)];
|
||||
|
||||
await go.build(src, destPath, config.ldsflags);
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
@@ -214,17 +296,20 @@ export async function build({
|
||||
if (meta.isDev) {
|
||||
// caching for `now dev`
|
||||
await move(
|
||||
join(entrypointDirname, 'go.mod'),
|
||||
join(entrypointDirname, 'go.mod.bk'),
|
||||
join(baseGoModPath, 'go.mod'),
|
||||
join(baseGoModPath, 'go.mod.bk'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
await move(
|
||||
join(entrypointDirname, 'go.sum'),
|
||||
join(entrypointDirname, 'go.sum.bk'),
|
||||
join(baseGoModPath, 'go.sum'),
|
||||
join(baseGoModPath, 'go.sum.bk'),
|
||||
{ overwrite: true }
|
||||
);
|
||||
}
|
||||
} else {
|
||||
// legacy mode
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
@@ -286,16 +371,17 @@ export async function build({
|
||||
};
|
||||
|
||||
let watch = parsedAnalyzed.watch;
|
||||
let watchSub: string[] = [];
|
||||
// if `entrypoint` located in subdirectory
|
||||
// we will need to concat it with return watch array
|
||||
if (entrypointArr.length > 1) {
|
||||
entrypointArr.pop();
|
||||
watch = parsedAnalyzed.watch.map(file => join(...entrypointArr, file));
|
||||
watchSub = parsedAnalyzed.watch.map(file => join(...entrypointArr, file));
|
||||
}
|
||||
|
||||
return {
|
||||
output,
|
||||
watch,
|
||||
watch: watch.concat(watchSub),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.4.5",
|
||||
"version": "0.5.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -8,4 +8,4 @@
|
||||
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/subdirectory", "mustContain": "subcow:RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,4 +15,4 @@
|
||||
"mustContain": "RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.go", "use": "@now/go" }
|
||||
]
|
||||
}
|
||||
"builds": [{ "src": "index.go", "use": "@now/go" }]
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "*.go", "use": "@now/go" }
|
||||
],
|
||||
"builds": [{ "src": "*.go", "use": "@now/go" }],
|
||||
"env": {
|
||||
"RANDOMNESS_ENV_VAR": "RANDOMNESS_PLACEHOLDER"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,9 +5,7 @@
|
||||
"src": "index.go",
|
||||
"use": "@now/go",
|
||||
"config": {
|
||||
"includeFiles": [
|
||||
"templates/**"
|
||||
]
|
||||
"includeFiles": ["templates/**"]
|
||||
}
|
||||
}
|
||||
],
|
||||
|
||||
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
22
packages/now-go/test/fixtures/09-exported-function/index.go
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
package function
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Person struct
|
||||
type Person struct {
|
||||
name string
|
||||
age int
|
||||
}
|
||||
|
||||
// NewPerson struct method
|
||||
func NewPerson(name string, age int) *Person {
|
||||
return &Person{name: name, age: age}
|
||||
}
|
||||
|
||||
// H func
|
||||
func H(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/09-exported-function/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/10-go-mod/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module go-mod
|
||||
|
||||
go 1.12
|
||||
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/10-go-mod/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package handler
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/10-go-mod/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "index.go", "use": "@now/go" }],
|
||||
"probes": [{ "path": "/", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
12
packages/now-go/test/fixtures/11-go-mod-shared/api/index.go
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
package api
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"with-shared/shared"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, shared.Say("RANDOMNESS_PLACEHOLDER"))
|
||||
}
|
||||
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/11-go-mod-shared/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module with-shared
|
||||
|
||||
go 1.12
|
||||
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
5
packages/now-go/test/fixtures/11-go-mod-shared/now.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "api/*.go", "use": "@now/go" }],
|
||||
"probes": [{ "path": "/api", "mustContain": "RANDOMNESS_PLACEHOLDER" }]
|
||||
}
|
||||
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
6
packages/now-go/test/fixtures/11-go-mod-shared/shared/shared.go
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
package shared
|
||||
|
||||
// Say func
|
||||
func Say(text string) string {
|
||||
return text
|
||||
}
|
||||
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
8
packages/now-go/test/fixtures/12-go-mod-subs/now.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "**/*.go", "use": "@now/go" }],
|
||||
"probes": [
|
||||
{ "path": "/sub-1", "mustContain": "RANDOMNESS_PLACEHOLDER" },
|
||||
{ "path": "/sub-2", "mustContain": "RANDOMNESS_PLACEHOLDER" }
|
||||
]
|
||||
}
|
||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module sub-1
|
||||
|
||||
go 1.12
|
||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-1/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package sub1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module sub-2
|
||||
|
||||
go 1.12
|
||||
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
11
packages/now-go/test/fixtures/12-go-mod-subs/sub-2/index.go
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
package sub2
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
// Handler func
|
||||
func Handler(w http.ResponseWriter, r *http.Request) {
|
||||
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
|
||||
}
|
||||
@@ -26,8 +26,8 @@ for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture),
|
||||
),
|
||||
path.join(fixturesPath, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ package main
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"flag"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
@@ -10,9 +11,22 @@ import (
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var ignoredFoldersRegex []*regexp.Regexp
|
||||
|
||||
func init() {
|
||||
ignoredFolders := []string{"vendor", "testdata", ".now"}
|
||||
|
||||
// Build the regex that matches if a path contains the respective ignored folder
|
||||
// The pattern will look like: (.*/)?vendor/.*, which matches every path that contains a vendor folder
|
||||
for _, folder := range ignoredFolders {
|
||||
ignoredFoldersRegex = append(ignoredFoldersRegex, regexp.MustCompile("(.*/)?"+folder+"/.*"))
|
||||
}
|
||||
}
|
||||
|
||||
type analyze struct {
|
||||
PackageName string `json:"packageName"`
|
||||
FuncName string `json:"functionName"`
|
||||
@@ -40,8 +54,9 @@ func visit(files *[]string) filepath.WalkFunc {
|
||||
}
|
||||
|
||||
// we don't need Dirs, or test files
|
||||
// we only want `.go` files
|
||||
if info.IsDir() || itf || filepath.Ext(path) != ".go" {
|
||||
// we only want `.go` files. Further, we ignore
|
||||
// every file that is in one of the ignored folders.
|
||||
if info.IsDir() || itf || filepath.Ext(path) != ".go" || isInIgnoredFolder(path) {
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -50,6 +65,19 @@ func visit(files *[]string) filepath.WalkFunc {
|
||||
}
|
||||
}
|
||||
|
||||
// isInIgnoredFolder checks if the given path is in one of the ignored folders.
|
||||
func isInIgnoredFolder(path string) bool {
|
||||
// Make sure the regex works for Windows paths
|
||||
path = filepath.ToSlash(path)
|
||||
|
||||
for _, pattern := range ignoredFoldersRegex {
|
||||
if pattern.MatchString(path) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// return unique file
|
||||
func unique(files []string) []string {
|
||||
encountered := map[string]bool{}
|
||||
@@ -65,13 +93,13 @@ func unique(files []string) []string {
|
||||
}
|
||||
|
||||
func main() {
|
||||
if len(os.Args) != 2 {
|
||||
if len(os.Args) != 3 {
|
||||
// Args should have the program name on `0`
|
||||
// and the file name on `1`
|
||||
fmt.Println("Wrong number of args; Usage is:\n ./go-analyze file_name.go")
|
||||
fmt.Println("Wrong number of args; Usage is:\n ./go-analyze -modpath=module-path file_name.go")
|
||||
os.Exit(1)
|
||||
}
|
||||
fileName := os.Args[1]
|
||||
fileName := os.Args[2]
|
||||
rf, err := ioutil.ReadFile(fileName)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
@@ -95,6 +123,17 @@ func main() {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
// looking related packages
|
||||
var modPath string
|
||||
flag.StringVar(&modPath, "modpath", "", "module path")
|
||||
flag.Parse()
|
||||
if len(modPath) > 1 {
|
||||
err = filepath.Walk(modPath, visit(&files))
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
absFileName, _ := filepath.Abs(fileName)
|
||||
absFile, _ := filepath.Abs(file)
|
||||
@@ -127,7 +166,14 @@ func main() {
|
||||
for _, ed := range exportedDecl {
|
||||
if strings.Contains(se, ed) {
|
||||
// find relative path of related file
|
||||
rel, err := filepath.Rel(filepath.Dir(fileName), file)
|
||||
var basePath string
|
||||
if modPath == "" {
|
||||
basePath = filepath.Dir(fileName)
|
||||
} else {
|
||||
basePath = modPath
|
||||
}
|
||||
|
||||
rel, err := filepath.Rel(basePath, file)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@@ -138,24 +184,32 @@ func main() {
|
||||
}
|
||||
|
||||
parsed := parse(fileName)
|
||||
offset := parsed.Pos()
|
||||
reqRep := "*http.Request http.ResponseWriter"
|
||||
|
||||
for _, decl := range parsed.Decls {
|
||||
fn, ok := decl.(*ast.FuncDecl)
|
||||
if !ok {
|
||||
// this declaraction is not a function
|
||||
// this declaration is not a function
|
||||
// so we're not interested
|
||||
continue
|
||||
}
|
||||
if fn.Name.IsExported() == true {
|
||||
// we found the first exported function
|
||||
// we're done!
|
||||
analyzed := analyze{
|
||||
PackageName: parsed.Name.Name,
|
||||
FuncName: fn.Name.Name,
|
||||
Watch: unique(relatedFiles),
|
||||
// find a valid `net/http` handler function
|
||||
for _, param := range fn.Type.Params.List {
|
||||
if strings.Contains(reqRep, string(rf[param.Type.Pos()-offset:param.Type.End()-offset])) {
|
||||
// we found the first exported function with `net/http`
|
||||
// we're done!
|
||||
analyzed := analyze{
|
||||
PackageName: parsed.Name.Name,
|
||||
FuncName: fn.Name.Name,
|
||||
Watch: unique(relatedFiles),
|
||||
}
|
||||
analyzedJSON, _ := json.Marshal(analyzed)
|
||||
fmt.Print(string(analyzedJSON))
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
json, _ := json.Marshal(analyzed)
|
||||
fmt.Print(string(json))
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/lambda",
|
||||
"version": "0.5.3",
|
||||
"version": "0.5.4",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,11 +1,22 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } },
|
||||
{ "src": "subdirectory/index.zip", "use": "@now/lambda", "config": { "handler": "index.handler", "runtime": "nodejs8.10" } }
|
||||
{
|
||||
"src": "index.zip",
|
||||
"use": "@now/lambda",
|
||||
"config": { "handler": "index.handler", "runtime": "nodejs8.10" }
|
||||
},
|
||||
{
|
||||
"src": "subdirectory/index.zip",
|
||||
"use": "@now/lambda",
|
||||
"config": { "handler": "index.handler", "runtime": "nodejs8.10" }
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/", "mustContain": "cow:NO_REPLACE_TO_AVOID_CRC_MISMATCH" },
|
||||
{ "path": "/subdirectory/", "mustContain": "yoda:NO_REPLACE_TO_AVOID_CRC_MISMATCH" }
|
||||
{
|
||||
"path": "/subdirectory/",
|
||||
"mustContain": "yoda:NO_REPLACE_TO_AVOID_CRC_MISMATCH"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
1
packages/now-layer-node/.gitignore
vendored
Normal file
1
packages/now-layer-node/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/dist
|
||||
32
packages/now-layer-node/package.json
Normal file
32
packages/now-layer-node/package.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "@now/layer-node",
|
||||
"version": "0.0.2",
|
||||
"main": "./dist/src/index",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-layer-node"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "tsc && jest",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"fs-extra": "7.0.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"promisepipe": "3.0.0",
|
||||
"stream-to-promise": "2.2.0",
|
||||
"tar": "4.4.6",
|
||||
"yauzl-promise": "2.1.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tar": "4.0.0",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"typescript": "3.3.3"
|
||||
}
|
||||
}
|
||||
37
packages/now-layer-node/src/index.ts
Normal file
37
packages/now-layer-node/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { glob, Files } from '@now/build-utils';
|
||||
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||
import { install } from './install';
|
||||
|
||||
interface BuildLayerConfig {
|
||||
runtimeVersion: string;
|
||||
platform: string;
|
||||
arch: string;
|
||||
}
|
||||
|
||||
interface BuildLayerResult {
|
||||
files: Files;
|
||||
entrypoint: string;
|
||||
}
|
||||
|
||||
export async function buildLayer({
|
||||
runtimeVersion,
|
||||
platform,
|
||||
arch,
|
||||
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||
const dir = join(
|
||||
tmpdir(),
|
||||
`now-layer-node-${runtimeVersion}-${platform}-${arch}`
|
||||
);
|
||||
const exists = await pathExists(dir);
|
||||
if (exists) {
|
||||
await remove(dir);
|
||||
}
|
||||
await mkdir(dir);
|
||||
const { entrypoint } = await install(dir, runtimeVersion, platform, arch);
|
||||
const files = await glob('{bin/node,bin/node.exe,include/**}', {
|
||||
cwd: dir,
|
||||
});
|
||||
return { files, entrypoint };
|
||||
}
|
||||
68
packages/now-layer-node/src/install.ts
Normal file
68
packages/now-layer-node/src/install.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { basename, join } from 'path';
|
||||
import fetch from 'node-fetch';
|
||||
import { extract } from 'tar';
|
||||
import pipe from 'promisepipe';
|
||||
import { createWriteStream } from 'fs-extra';
|
||||
import { unzip, zipFromFile } from './unzip';
|
||||
|
||||
export async function install(
|
||||
dest: string,
|
||||
version: string,
|
||||
platform: string,
|
||||
arch: string
|
||||
) {
|
||||
const tarballUrl = getUrl(version, platform, arch);
|
||||
console.log('Downloading from ' + tarballUrl);
|
||||
console.log('Downloading to ' + dest);
|
||||
const res = await fetch(tarballUrl);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP request failed: ${res.status}`);
|
||||
}
|
||||
let entrypoint: string;
|
||||
if (platform === 'win32') {
|
||||
// Put it in the `bin` dir for consistency with the tarballs
|
||||
const finalDest = join(dest, 'bin');
|
||||
const zipName = basename(tarballUrl);
|
||||
const zipPath = join(dest, zipName);
|
||||
|
||||
await pipe(
|
||||
res.body,
|
||||
createWriteStream(zipPath)
|
||||
);
|
||||
|
||||
const zipFile = await zipFromFile(zipPath);
|
||||
await unzip(zipFile, finalDest, { strip: 1 });
|
||||
entrypoint = join('bin', 'node.exe');
|
||||
} else {
|
||||
const extractStream = extract({ strip: 1, C: dest });
|
||||
if (!extractStream.destroy) {
|
||||
// If there is an error in promisepipe,
|
||||
// it expects a destroy method
|
||||
extractStream.destroy = () => {};
|
||||
}
|
||||
await pipe(
|
||||
res.body,
|
||||
extractStream
|
||||
);
|
||||
entrypoint = join('bin', 'node');
|
||||
}
|
||||
|
||||
return { entrypoint };
|
||||
}
|
||||
|
||||
export function getUrl(
|
||||
version: string,
|
||||
platform: string = process.platform,
|
||||
arch: string = process.arch
|
||||
): string {
|
||||
let ext: string;
|
||||
let plat: string;
|
||||
if (platform === 'win32') {
|
||||
ext = 'zip';
|
||||
plat = 'win';
|
||||
} else {
|
||||
ext = 'tar.gz';
|
||||
plat = platform;
|
||||
}
|
||||
return `https://nodejs.org/dist/v${version}/node-v${version}-${plat}-${arch}.${ext}`;
|
||||
}
|
||||
96
packages/now-layer-node/src/unzip.ts
Normal file
96
packages/now-layer-node/src/unzip.ts
Normal file
@@ -0,0 +1,96 @@
|
||||
import { tmpdir } from 'os';
|
||||
import pipe from 'promisepipe';
|
||||
import { dirname, join } from 'path';
|
||||
import { createWriteStream, mkdirp, symlink, unlink } from 'fs-extra';
|
||||
import streamToPromise from 'stream-to-promise';
|
||||
import {
|
||||
Entry,
|
||||
ZipFile,
|
||||
open as zipFromFile,
|
||||
fromBuffer as zipFromBuffer,
|
||||
} from 'yauzl-promise';
|
||||
|
||||
export { zipFromFile, zipFromBuffer, ZipFile };
|
||||
|
||||
export async function unzipToTemp(
|
||||
data: Buffer | string,
|
||||
tmpDir: string = tmpdir()
|
||||
): Promise<string> {
|
||||
const dir = join(
|
||||
tmpDir,
|
||||
`zeit-fun-${Math.random()
|
||||
.toString(16)
|
||||
.substring(2)}`
|
||||
);
|
||||
let zip: ZipFile;
|
||||
if (Buffer.isBuffer(data)) {
|
||||
zip = await zipFromBuffer(data);
|
||||
} else {
|
||||
zip = await zipFromFile(data);
|
||||
}
|
||||
await unzip(zip, dir);
|
||||
await zip.close();
|
||||
return dir;
|
||||
}
|
||||
|
||||
interface UnzipOptions {
|
||||
strip?: number;
|
||||
}
|
||||
|
||||
export async function unzip(
|
||||
zipFile: ZipFile,
|
||||
dir: string,
|
||||
opts: UnzipOptions = {}
|
||||
): Promise<void> {
|
||||
let entry: Entry;
|
||||
const strip = opts.strip || 0;
|
||||
while ((entry = await zipFile.readEntry()) !== null) {
|
||||
const fileName =
|
||||
strip === 0
|
||||
? entry.fileName
|
||||
: entry.fileName
|
||||
.split('/')
|
||||
.slice(strip)
|
||||
.join('/');
|
||||
const destPath = join(dir, fileName);
|
||||
if (/\/$/.test(entry.fileName)) {
|
||||
await mkdirp(destPath);
|
||||
} else {
|
||||
const [entryStream] = await Promise.all([
|
||||
entry.openReadStream(),
|
||||
// ensure parent directory exists
|
||||
mkdirp(dirname(destPath)),
|
||||
]);
|
||||
const mode = entry.externalFileAttributes >>> 16;
|
||||
if (isSymbolicLink(mode)) {
|
||||
const linkDest = String(await streamToPromise(entryStream));
|
||||
await symlink(linkDest, destPath);
|
||||
} else {
|
||||
const octal = mode & 4095 /* 07777 */;
|
||||
const modeOctal = ('0000' + octal.toString(8)).slice(-4);
|
||||
const modeVal = parseInt(modeOctal, 8);
|
||||
try {
|
||||
await unlink(destPath);
|
||||
} catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const destStream = createWriteStream(destPath, {
|
||||
mode: modeVal,
|
||||
});
|
||||
await pipe(
|
||||
entryStream,
|
||||
destStream
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const S_IFMT = 61440; /* 0170000 type of file */
|
||||
const S_IFLNK = 40960; /* 0120000 symbolic link */
|
||||
|
||||
export function isSymbolicLink(mode: number): boolean {
|
||||
return (mode & S_IFMT) === S_IFLNK;
|
||||
}
|
||||
54
packages/now-layer-node/test/test.js
Normal file
54
packages/now-layer-node/test/test.js
Normal file
@@ -0,0 +1,54 @@
|
||||
/* global jest, expect, it */
|
||||
jest.setTimeout(30 * 1000);
|
||||
const { buildLayer } = require('../');
|
||||
|
||||
describe('buildLayer', () => {
|
||||
it('should get node 10 and metadata for windows', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '10.16.0',
|
||||
platform: 'win32',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(entrypoint).toBe('bin/node.exe');
|
||||
expect(names.has('bin/node.exe')).toBeTruthy();
|
||||
expect(names.has('bin/npm.cmd')).toBeFalsy();
|
||||
expect(names.has('bin/npx.cmd')).toBeFalsy();
|
||||
expect(names.has('bin/node_modules')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get node 10 and metadata for macos', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '10.16.0',
|
||||
platform: 'darwin',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(entrypoint).toBe('bin/node');
|
||||
expect(names.has('bin/node')).toBeTruthy();
|
||||
expect(names.has('bin/npm')).toBeFalsy();
|
||||
expect(names.has('bin/npx')).toBeFalsy();
|
||||
expect(names.has('lib/node_modules')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get node 10 and metadata for linux', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '10.16.0',
|
||||
platform: 'linux',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(entrypoint).toBe('bin/node');
|
||||
expect(names.has('bin/node')).toBeTruthy();
|
||||
expect(names.has('include/node/node.h')).toBeTruthy();
|
||||
expect(names.has('bin/npm')).toBeFalsy();
|
||||
expect(names.has('bin/npx')).toBeFalsy();
|
||||
expect(names.has('lib/node_modules')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
18
packages/now-layer-node/tsconfig.json
Normal file
18
packages/now-layer-node/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
4
packages/now-layer-node/types/promisepipe.ts
Normal file
4
packages/now-layer-node/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module 'promisepipe' {
|
||||
import { Stream } from 'stream';
|
||||
export default function pipe(...args: Stream[]): Promise<void>;
|
||||
}
|
||||
6
packages/now-layer-node/types/stream-to-promise.ts
Normal file
6
packages/now-layer-node/types/stream-to-promise.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
declare module 'stream-to-promise' {
|
||||
import { Stream } from 'stream';
|
||||
export default function streamToPromise(
|
||||
stream: NodeJS.ReadableStream
|
||||
): Promise<string>;
|
||||
}
|
||||
1
packages/now-layer-npm/.gitignore
vendored
Normal file
1
packages/now-layer-npm/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/dist
|
||||
29
packages/now-layer-npm/package.json
Normal file
29
packages/now-layer-npm/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@now/layer-npm",
|
||||
"version": "0.0.2",
|
||||
"main": "./dist/src/index",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-layer-npm"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "tsc && jest",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"fs-extra": "7.0.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"promisepipe": "3.0.0",
|
||||
"tar": "4.4.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tar": "4.0.0",
|
||||
"typescript": "3.3.3"
|
||||
}
|
||||
}
|
||||
37
packages/now-layer-npm/src/index.ts
Normal file
37
packages/now-layer-npm/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { glob, Files } from '@now/build-utils';
|
||||
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||
import { install } from './install';
|
||||
|
||||
interface BuildLayerConfig {
|
||||
runtimeVersion: string;
|
||||
platform: string;
|
||||
arch: string;
|
||||
}
|
||||
|
||||
interface BuildLayerResult {
|
||||
files: Files;
|
||||
entrypoint: string;
|
||||
}
|
||||
|
||||
export async function buildLayer({
|
||||
runtimeVersion,
|
||||
platform,
|
||||
arch,
|
||||
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||
const dir = join(
|
||||
tmpdir(),
|
||||
`now-layer-npm-${runtimeVersion}-${platform}-${arch}`
|
||||
);
|
||||
const exists = await pathExists(dir);
|
||||
if (exists) {
|
||||
await remove(dir);
|
||||
}
|
||||
await mkdir(dir);
|
||||
const { entrypoint } = await install(dir, runtimeVersion);
|
||||
const files = await glob('{bin/**,lib/**,node_modules/**}', {
|
||||
cwd: dir,
|
||||
});
|
||||
return { files, entrypoint };
|
||||
}
|
||||
29
packages/now-layer-npm/src/install.ts
Normal file
29
packages/now-layer-npm/src/install.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { join } from 'path';
|
||||
import fetch from 'node-fetch';
|
||||
import { extract } from 'tar';
|
||||
import pipe from 'promisepipe';
|
||||
|
||||
export async function install(dest: string, version: string) {
|
||||
const tarballUrl = `https://registry.npmjs.org/npm/-/npm-${version}.tgz`;
|
||||
console.log('Downloading from ' + tarballUrl);
|
||||
console.log('Downloading to ' + dest);
|
||||
const res = await fetch(tarballUrl);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP request failed: ${res.status}`);
|
||||
}
|
||||
const extractStream = extract({ strip: 1, C: dest });
|
||||
if (!extractStream.destroy) {
|
||||
// If there is an error in promisepipe,
|
||||
// it expects a destroy method
|
||||
extractStream.destroy = () => {};
|
||||
}
|
||||
await pipe(
|
||||
res.body,
|
||||
extractStream
|
||||
);
|
||||
|
||||
const pathToManifest = join(dest, 'package.json');
|
||||
const manifest = require(pathToManifest);
|
||||
const entrypoint = manifest.bin.npm;
|
||||
return { entrypoint };
|
||||
}
|
||||
50
packages/now-layer-npm/test/test.js
Normal file
50
packages/now-layer-npm/test/test.js
Normal file
@@ -0,0 +1,50 @@
|
||||
/* global jest, expect, it */
|
||||
jest.setTimeout(30 * 1000);
|
||||
const { buildLayer } = require('../');
|
||||
|
||||
describe('buildLayer', () => {
|
||||
it('should get npm 6 but not npm for windows', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '6.9.0',
|
||||
platform: 'win32',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/npm.cmd')).toBeTruthy();
|
||||
expect(names.has('bin/npx.cmd')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get npm 6 but not npm for macos', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '6.9.0',
|
||||
platform: 'darwin',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/npm')).toBeTruthy();
|
||||
expect(names.has('bin/npx')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get npm 6 but not npm for linux', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '6.9.0',
|
||||
platform: 'linux',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/npm-cli.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/npm')).toBeTruthy();
|
||||
expect(names.has('bin/npx')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
18
packages/now-layer-npm/tsconfig.json
Normal file
18
packages/now-layer-npm/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
4
packages/now-layer-npm/types/promisepipe.ts
Normal file
4
packages/now-layer-npm/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module 'promisepipe' {
|
||||
import { Stream } from 'stream';
|
||||
export default function pipe(...args: Stream[]): Promise<void>;
|
||||
}
|
||||
1
packages/now-layer-yarn/.gitignore
vendored
Normal file
1
packages/now-layer-yarn/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
/dist
|
||||
29
packages/now-layer-yarn/package.json
Normal file
29
packages/now-layer-yarn/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"name": "@now/layer-yarn",
|
||||
"version": "0.0.2",
|
||||
"main": "./dist/src/index",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-layer-yarn"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"test": "tsc && jest",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"fs-extra": "7.0.1",
|
||||
"node-fetch": "2.6.0",
|
||||
"promisepipe": "3.0.0",
|
||||
"tar": "4.4.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/tar": "4.0.0",
|
||||
"typescript": "3.3.3"
|
||||
}
|
||||
}
|
||||
37
packages/now-layer-yarn/src/index.ts
Normal file
37
packages/now-layer-yarn/src/index.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
import { glob, Files } from '@now/build-utils';
|
||||
import { mkdir, remove, pathExists } from 'fs-extra';
|
||||
import { install } from './install';
|
||||
|
||||
interface BuildLayerConfig {
|
||||
runtimeVersion: string;
|
||||
platform: string;
|
||||
arch: string;
|
||||
}
|
||||
|
||||
interface BuildLayerResult {
|
||||
files: Files;
|
||||
entrypoint: string;
|
||||
}
|
||||
|
||||
export async function buildLayer({
|
||||
runtimeVersion,
|
||||
platform,
|
||||
arch,
|
||||
}: BuildLayerConfig): Promise<BuildLayerResult> {
|
||||
const dir = join(
|
||||
tmpdir(),
|
||||
`now-layer-yarn-${runtimeVersion}-${platform}-${arch}`
|
||||
);
|
||||
const exists = await pathExists(dir);
|
||||
if (exists) {
|
||||
await remove(dir);
|
||||
}
|
||||
await mkdir(dir);
|
||||
const { entrypoint } = await install(dir, runtimeVersion);
|
||||
const files = await glob('{bin/**,lib/**}', {
|
||||
cwd: dir,
|
||||
});
|
||||
return { files, entrypoint };
|
||||
}
|
||||
29
packages/now-layer-yarn/src/install.ts
Normal file
29
packages/now-layer-yarn/src/install.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { join } from 'path';
|
||||
import fetch from 'node-fetch';
|
||||
import { extract } from 'tar';
|
||||
import pipe from 'promisepipe';
|
||||
|
||||
export async function install(dest: string, version: string) {
|
||||
const tarballUrl = `https://registry.npmjs.org/yarn/-/yarn-${version}.tgz`;
|
||||
console.log('Downloading from ' + tarballUrl);
|
||||
console.log('Downloading to ' + dest);
|
||||
const res = await fetch(tarballUrl);
|
||||
if (!res.ok) {
|
||||
throw new Error(`HTTP request failed: ${res.status}`);
|
||||
}
|
||||
const extractStream = extract({ strip: 1, C: dest });
|
||||
if (!extractStream.destroy) {
|
||||
// If there is an error in promisepipe,
|
||||
// it expects a destroy method
|
||||
extractStream.destroy = () => {};
|
||||
}
|
||||
await pipe(
|
||||
res.body,
|
||||
extractStream
|
||||
);
|
||||
|
||||
const pathToManifest = join(dest, 'package.json');
|
||||
const manifest = require(pathToManifest);
|
||||
const entrypoint = manifest.bin.yarn;
|
||||
return { entrypoint };
|
||||
}
|
||||
49
packages/now-layer-yarn/test/test.js
Normal file
49
packages/now-layer-yarn/test/test.js
Normal file
@@ -0,0 +1,49 @@
|
||||
/* global jest, expect, it */
|
||||
jest.setTimeout(30 * 1000);
|
||||
const { buildLayer } = require('../');
|
||||
|
||||
describe('buildLayer', () => {
|
||||
it('should get yarn for windows', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '1.16.0',
|
||||
platform: 'win32',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/yarn.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/yarn.cmd')).toBeTruthy();
|
||||
expect(names.has('lib/cli.js')).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should get yarn for macos', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '1.16.0',
|
||||
platform: 'darwin',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/yarn.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/yarn')).toBeTruthy();
|
||||
expect(names.has('lib/cli.js')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
|
||||
it('should get yarn for linux', async () => {
|
||||
const { files, entrypoint } = await buildLayer({
|
||||
runtimeVersion: '1.16.0',
|
||||
platform: 'linux',
|
||||
arch: 'x64',
|
||||
});
|
||||
const names = new Set(Object.keys(files));
|
||||
expect(names).toBeTruthy();
|
||||
expect(entrypoint).toBe('./bin/yarn.js');
|
||||
expect(names.size).toBeGreaterThan(0);
|
||||
expect(names.has('bin/yarn')).toBeTruthy();
|
||||
expect(names.has('lib/cli.js')).toBeTruthy();
|
||||
expect(names.has('README.md')).toBeFalsy();
|
||||
});
|
||||
});
|
||||
18
packages/now-layer-yarn/tsconfig.json
Normal file
18
packages/now-layer-yarn/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
4
packages/now-layer-yarn/types/promisepipe.ts
Normal file
4
packages/now-layer-yarn/types/promisepipe.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
declare module 'promisepipe' {
|
||||
import { Stream } from 'stream';
|
||||
export default function pipe(...args: Stream[]): Promise<void>;
|
||||
}
|
||||
@@ -8,9 +8,11 @@ const { runNpmInstall } = require('@now/build-utils/fs/run-user-scripts.js'); //
|
||||
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
exports.build = async ({
|
||||
files, entrypoint, workPath, meta,
|
||||
}) => {
|
||||
console.log('downloading user files...');
|
||||
const downloadedFiles = await download(files, workPath);
|
||||
const downloadedFiles = await download(files, workPath, meta);
|
||||
console.log('writing package.json...');
|
||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.15' } };
|
||||
const packageJsonPath = path.join(workPath, 'package.json');
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/mdx-deck",
|
||||
"version": "0.5.3",
|
||||
"version": "0.5.4",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/next",
|
||||
"version": "0.3.1",
|
||||
"version": "0.4.1",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"scripts": {
|
||||
@@ -14,7 +14,7 @@
|
||||
"directory": "packages/now-next"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^1.1.2",
|
||||
"@now/node-bridge": "^1.1.4",
|
||||
"fs-extra": "^7.0.0",
|
||||
"get-port": "^5.0.0",
|
||||
"resolve-from": "^5.0.0",
|
||||
|
||||
@@ -32,10 +32,12 @@ import {
|
||||
getRoutes,
|
||||
includeOnlyEntryDirectory,
|
||||
normalizePackageJson,
|
||||
onlyStaticDirectory,
|
||||
filesFromDirectory,
|
||||
stringMap,
|
||||
syncEnvVars,
|
||||
validateEntrypoint,
|
||||
normalizePage,
|
||||
getDynamicRoutes,
|
||||
} from './utils';
|
||||
|
||||
interface BuildParamsMeta {
|
||||
@@ -155,14 +157,13 @@ export const build = async ({
|
||||
entrypoint,
|
||||
meta = {} as BuildParamsMeta,
|
||||
}: BuildParamsType): Promise<{
|
||||
routes?: any[];
|
||||
routes?: ({ src?: string; dest?: string } | { handle: string })[];
|
||||
output: Files;
|
||||
watch?: string[];
|
||||
childProcesses: ChildProcess[];
|
||||
}> => {
|
||||
validateEntrypoint(entrypoint);
|
||||
|
||||
const routes: any[] = [];
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const entryPath = path.join(workPath, entryDirectory);
|
||||
const dotNext = path.join(entryPath, '.next');
|
||||
@@ -191,6 +192,10 @@ export const build = async ({
|
||||
console.log(`${name} Installing dependencies...`);
|
||||
await runNpmInstall(entryPath, ['--prefer-offline']);
|
||||
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'development';
|
||||
}
|
||||
|
||||
// The runtime env vars consist of the base `process.env` vars, but with the
|
||||
// build env vars removed, and the runtime env vars mixed in afterwards
|
||||
const runtimeEnv: EnvConfig = Object.assign({}, process.env);
|
||||
@@ -210,7 +215,13 @@ export const build = async ({
|
||||
|
||||
return {
|
||||
output: {},
|
||||
routes: getRoutes(entryDirectory, pathsInside, files, urls[entrypoint]),
|
||||
routes: getRoutes(
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
pathsInside,
|
||||
files,
|
||||
urls[entrypoint]
|
||||
),
|
||||
watch: pathsInside,
|
||||
childProcesses: childProcess ? [childProcess] : [],
|
||||
};
|
||||
@@ -285,7 +296,10 @@ export const build = async ({
|
||||
await unlinkFile(path.join(entryPath, '.npmrc'));
|
||||
}
|
||||
|
||||
const exportedPageRoutes: { src: string; dest: string }[] = [];
|
||||
const lambdas: { [key: string]: Lambda } = {};
|
||||
const staticPages: { [key: string]: FileFsRef } = {};
|
||||
const dynamicPages: string[] = [];
|
||||
|
||||
if (isLegacy) {
|
||||
const filesAfterBuild = await glob('**', entryPath);
|
||||
@@ -310,7 +324,9 @@ export const build = async ({
|
||||
file => file.startsWith('node_modules/.cache')
|
||||
);
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
'now__bridge.js': new FileFsRef({
|
||||
fsPath: require('@now/node-bridge'),
|
||||
}),
|
||||
};
|
||||
const nextFiles: { [key: string]: FileFsRef } = {
|
||||
...nodeModules,
|
||||
@@ -372,15 +388,33 @@ export const build = async ({
|
||||
} else {
|
||||
console.log('preparing lambda files...');
|
||||
const launcherFiles = {
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
'now__bridge.js': new FileFsRef({
|
||||
fsPath: require('@now/node-bridge'),
|
||||
}),
|
||||
'now__launcher.js': new FileFsRef({
|
||||
fsPath: path.join(__dirname, 'launcher.js'),
|
||||
}),
|
||||
};
|
||||
const pages = await glob(
|
||||
'**/*.js',
|
||||
path.join(entryPath, '.next', 'serverless', 'pages')
|
||||
);
|
||||
const pagesDir = path.join(entryPath, '.next', 'serverless', 'pages');
|
||||
|
||||
const pages = await glob('**/*.js', pagesDir);
|
||||
const staticPageFiles = await glob('**/*.html', pagesDir);
|
||||
|
||||
Object.keys(staticPageFiles).forEach((page: string) => {
|
||||
const staticRoute = path.join(entryDirectory, page);
|
||||
staticPages[staticRoute] = staticPageFiles[page];
|
||||
|
||||
const pathname = page.replace(/\.html$/, '');
|
||||
|
||||
if (pathname.startsWith('$') || pathname.includes('/$')) {
|
||||
dynamicPages.push(pathname);
|
||||
}
|
||||
|
||||
exportedPageRoutes.push({
|
||||
src: `^${path.join('/', entryDirectory, pathname)}$`,
|
||||
dest: path.join('/', staticRoute),
|
||||
});
|
||||
});
|
||||
|
||||
const pageKeys = Object.keys(pages);
|
||||
|
||||
@@ -413,12 +447,16 @@ export const build = async ({
|
||||
await Promise.all(
|
||||
pageKeys.map(async page => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
if (['_app.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pathname = page.replace(/\.js$/, '');
|
||||
|
||||
if (pathname.startsWith('$') || pathname.includes('/$')) {
|
||||
dynamicPages.push(normalizePage(pathname));
|
||||
}
|
||||
|
||||
console.log(`Creating lambda for page: "${page}"...`);
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
@@ -448,14 +486,61 @@ export const build = async ({
|
||||
{}
|
||||
);
|
||||
|
||||
const staticDirectoryFiles = onlyStaticDirectory(
|
||||
includeOnlyEntryDirectory(files, entryDirectory),
|
||||
entryDirectory
|
||||
const entryDirectoryFiles = includeOnlyEntryDirectory(files, entryDirectory);
|
||||
const staticDirectoryFiles = filesFromDirectory(
|
||||
entryDirectoryFiles,
|
||||
path.join(entryDirectory, 'static')
|
||||
);
|
||||
const publicDirectoryFiles = filesFromDirectory(
|
||||
entryDirectoryFiles,
|
||||
path.join(entryDirectory, 'public')
|
||||
);
|
||||
const publicFiles = Object.keys(publicDirectoryFiles).reduce(
|
||||
(mappedFiles, file) => ({
|
||||
...mappedFiles,
|
||||
[file.replace(/public[/\\]+/, '')]: publicDirectoryFiles[file],
|
||||
}),
|
||||
{}
|
||||
);
|
||||
|
||||
let dynamicRoutes = getDynamicRoutes(
|
||||
entryPath,
|
||||
entryDirectory,
|
||||
dynamicPages
|
||||
).map(route => {
|
||||
// make sure .html is added to dest for now until
|
||||
// outputting static files to clean routes is available
|
||||
if (staticPages[`${route.dest}.html`]) {
|
||||
route.dest = `${route.dest}.html`;
|
||||
}
|
||||
return route;
|
||||
});
|
||||
|
||||
return {
|
||||
output: { ...lambdas, ...staticFiles, ...staticDirectoryFiles },
|
||||
routes: [],
|
||||
output: {
|
||||
...publicFiles,
|
||||
...lambdas,
|
||||
...staticPages,
|
||||
...staticFiles,
|
||||
...staticDirectoryFiles,
|
||||
},
|
||||
routes: [
|
||||
// Static exported pages (.html rewrites)
|
||||
...exportedPageRoutes,
|
||||
// Next.js page lambdas, `static/` folder, reserved assets, and `public/`
|
||||
// folder
|
||||
{ handle: 'filesystem' },
|
||||
// Dynamic routes
|
||||
...dynamicRoutes,
|
||||
...(isLegacy
|
||||
? []
|
||||
: [
|
||||
{
|
||||
src: path.join('/', entryDirectory, '.*'),
|
||||
dest: path.join('/', entryDirectory, '_error'),
|
||||
},
|
||||
]),
|
||||
],
|
||||
watch: [],
|
||||
childProcesses: [],
|
||||
};
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||
process.env.NODE_ENV =
|
||||
process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||
}
|
||||
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('./now__bridge');
|
||||
const page = require('./page');
|
||||
|
||||
const server = new Server(page.render);
|
||||
// page.render is for React rendering
|
||||
// page.default is for /api rendering
|
||||
// page is for module.exports in /api
|
||||
const server = new Server(page.render || page.default || page);
|
||||
const bridge = new Bridge(server);
|
||||
bridge.listen();
|
||||
|
||||
|
||||
@@ -4,7 +4,8 @@ import url from 'url';
|
||||
import { Bridge } from './now__bridge';
|
||||
|
||||
if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||
process.env.NODE_ENV =
|
||||
process.env.NOW_REGION === 'dev1' ? 'development' : 'production';
|
||||
}
|
||||
|
||||
const app = next({});
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import resolveFrom from 'resolve-from';
|
||||
import { Files } from '@now/build-utils';
|
||||
|
||||
type stringMap = { [key: string]: string };
|
||||
@@ -73,11 +74,11 @@ function excludeLockFiles(files: Files): Files {
|
||||
}
|
||||
|
||||
/**
|
||||
* Include the static directory from files
|
||||
* Include only the files from a selected directory
|
||||
*/
|
||||
function onlyStaticDirectory(files: Files, entryDir: string): Files {
|
||||
function filesFromDirectory(files: Files, dir: string): Files {
|
||||
function matcher(filePath: string) {
|
||||
return !filePath.startsWith(path.join(entryDir, 'static'));
|
||||
return !filePath.startsWith(dir.replace(/\\/g, '/'));
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
@@ -168,7 +169,18 @@ function getPathsInside(entryDirectory: string, files: Files) {
|
||||
return watch;
|
||||
}
|
||||
|
||||
function normalizePage(page: string): string {
|
||||
// remove '/index' from the end
|
||||
page = page.replace(/\/index$/, '/');
|
||||
// Resolve on anything that doesn't start with `/`
|
||||
if (!page.startsWith('/')) {
|
||||
page = `/${page}`;
|
||||
}
|
||||
return page;
|
||||
}
|
||||
|
||||
function getRoutes(
|
||||
entryPath: string,
|
||||
entryDirectory: string,
|
||||
pathsInside: string[],
|
||||
files: Files,
|
||||
@@ -195,8 +207,10 @@ function getRoutes(
|
||||
dest: `${url}/static/$1`,
|
||||
},
|
||||
];
|
||||
const filePaths = Object.keys(filesInside);
|
||||
const dynamicPages = [];
|
||||
|
||||
for (const file of Object.keys(filesInside)) {
|
||||
for (const file of filePaths) {
|
||||
const relativePath = path.relative(entryDirectory, file);
|
||||
const isPage = pathIsInside('pages', relativePath);
|
||||
|
||||
@@ -212,6 +226,10 @@ function getRoutes(
|
||||
continue;
|
||||
}
|
||||
|
||||
if (pageName.startsWith('$') || pageName.includes('/$')) {
|
||||
dynamicPages.push(normalizePage(pageName));
|
||||
}
|
||||
|
||||
routes.push({
|
||||
src: `${prefix}${pageName}`,
|
||||
dest: `${url}/${pageName}`,
|
||||
@@ -227,6 +245,82 @@ function getRoutes(
|
||||
}
|
||||
}
|
||||
|
||||
routes.push(
|
||||
...getDynamicRoutes(entryPath, entryDirectory, dynamicPages).map(
|
||||
(route: { src: string; dest: string }) => {
|
||||
// convert to make entire RegExp match as one group
|
||||
route.src = route.src.replace('^', '^(').replace('$', ')$');
|
||||
route.dest = `${url}/$1`;
|
||||
return route;
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
// Add public folder routes
|
||||
for (const file of filePaths) {
|
||||
const relativePath = path.relative(entryDirectory, file);
|
||||
const isPublic = pathIsInside('public', relativePath);
|
||||
|
||||
if (!isPublic) continue;
|
||||
|
||||
const fileName = path.relative('public', relativePath);
|
||||
const route = {
|
||||
src: `${prefix}${fileName}`,
|
||||
dest: `${url}/${fileName}`,
|
||||
};
|
||||
|
||||
// Only add the route if a page is not already using it
|
||||
if (!routes.some(r => r.src === route.src)) {
|
||||
routes.push(route);
|
||||
}
|
||||
}
|
||||
|
||||
return routes;
|
||||
}
|
||||
|
||||
export function getDynamicRoutes(
|
||||
entryPath: string,
|
||||
entryDirectory: string,
|
||||
dynamicPages: string[]
|
||||
): { src: string; dest: string }[] {
|
||||
if (!dynamicPages.length) {
|
||||
return [];
|
||||
}
|
||||
|
||||
let getRouteRegex:
|
||||
| ((pageName: string) => { re: RegExp })
|
||||
| undefined = undefined;
|
||||
|
||||
let getSortedRoutes: ((normalizedPages: string[]) => string[]) | undefined;
|
||||
|
||||
try {
|
||||
({ getRouteRegex, getSortedRoutes } = require(resolveFrom(
|
||||
entryPath,
|
||||
'next-server/dist/lib/router/utils'
|
||||
)));
|
||||
if (typeof getRouteRegex !== 'function') {
|
||||
getRouteRegex = undefined;
|
||||
}
|
||||
} catch (_) {}
|
||||
|
||||
if (!getRouteRegex || !getSortedRoutes) {
|
||||
throw new Error(
|
||||
'Found usage of dynamic routes but not on a new enough version of Next.js.'
|
||||
);
|
||||
}
|
||||
|
||||
const pageMatchers = getSortedRoutes(dynamicPages).map(pageName => ({
|
||||
pageName,
|
||||
matcher: getRouteRegex!(pageName).re,
|
||||
}));
|
||||
|
||||
const routes: { src: string; dest: string }[] = [];
|
||||
pageMatchers.forEach(pageMatcher => {
|
||||
routes.push({
|
||||
src: pageMatcher.matcher.source,
|
||||
dest: path.join('/', entryDirectory, pageMatcher.pageName),
|
||||
});
|
||||
});
|
||||
return routes;
|
||||
}
|
||||
|
||||
@@ -250,10 +344,11 @@ export {
|
||||
includeOnlyEntryDirectory,
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
onlyStaticDirectory,
|
||||
filesFromDirectory,
|
||||
getNextConfig,
|
||||
getPathsInside,
|
||||
getRoutes,
|
||||
stringMap,
|
||||
syncEnvVars,
|
||||
normalizePage,
|
||||
};
|
||||
|
||||
@@ -12,10 +12,12 @@ it(
|
||||
} = await runBuildLambda(path.join(__dirname, 'standard'));
|
||||
expect(output.index).toBeDefined();
|
||||
const filePaths = Object.keys(output);
|
||||
const serverlessError = filePaths.some(filePath => filePath.match(/_error/));
|
||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||
expect(hasUnderScoreAppStaticFile).toBeTruthy();
|
||||
expect(hasUnderScoreErrorStaticFile).toBeTruthy();
|
||||
expect(serverlessError).toBeTruthy();
|
||||
},
|
||||
FOUR_MINUTES,
|
||||
);
|
||||
@@ -28,6 +30,7 @@ it(
|
||||
} = await runBuildLambda(path.join(__dirname, 'monorepo'));
|
||||
expect(output['www/index']).toBeDefined();
|
||||
expect(output['www/static/test.txt']).toBeDefined();
|
||||
expect(output['www/data.txt']).toBeDefined();
|
||||
const filePaths = Object.keys(output);
|
||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||
@@ -96,3 +99,14 @@ it(
|
||||
},
|
||||
FOUR_MINUTES,
|
||||
);
|
||||
|
||||
it(
|
||||
'Should build the public-files test',
|
||||
async () => {
|
||||
const {
|
||||
buildResult: { output },
|
||||
} = await runBuildLambda(path.join(__dirname, 'public-files'));
|
||||
expect(output['robots.txt']).toBeDefined();
|
||||
},
|
||||
FOUR_MINUTES,
|
||||
);
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{"src": "package.json", "use": "@now/next"}
|
||||
]
|
||||
"builds": [{ "src": "package.json", "use": "@now/next" }]
|
||||
}
|
||||
|
||||
@@ -3,4 +3,4 @@
|
||||
"isomorphic-unfetch": "latest",
|
||||
"next": "7.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{"src": "next.config.js", "use": "@now/next"}
|
||||
]
|
||||
"builds": [{ "src": "next.config.js", "use": "@now/next" }]
|
||||
}
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
"dependencies": {
|
||||
"next": "^7.0.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{"src": "next.config.js", "use": "@now/next"}
|
||||
]
|
||||
"builds": [{ "src": "next.config.js", "use": "@now/next" }]
|
||||
}
|
||||
|
||||
@@ -2,4 +2,4 @@
|
||||
"dependencies": {
|
||||
"next": "7.0.2"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "www/package.json", "use": "@now/next" }
|
||||
]
|
||||
"version": 2,
|
||||
"builds": [{ "src": "www/package.json", "use": "@now/next" }]
|
||||
}
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
data
|
||||
@@ -1,6 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{"src": "pages/index.js", "use": "@now/next"}
|
||||
]
|
||||
"builds": [{ "src": "pages/index.js", "use": "@now/next" }]
|
||||
}
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
target: 'serverless',
|
||||
};
|
||||
4
packages/now-next/test/integration/public-files/now.json
Normal file
4
packages/now-next/test/integration/public-files/now.json
Normal file
@@ -0,0 +1,4 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "next.config.js", "use": "@now/next" }]
|
||||
}
|
||||
10
packages/now-next/test/integration/public-files/package.json
Normal file
10
packages/now-next/test/integration/public-files/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "8",
|
||||
"react": "16",
|
||||
"react-dom": "16"
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user