mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 04:22:13 +00:00
Compare commits
113 Commits
@now/ruby@
...
@now/pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
03b5a0c0bf | ||
|
|
f76abe3372 | ||
|
|
cdd43b74ae | ||
|
|
fa633d0e02 | ||
|
|
9b46e60c09 | ||
|
|
58eef7f394 | ||
|
|
e97e0fbb64 | ||
|
|
b82876fd82 | ||
|
|
02ad32ec22 | ||
|
|
433fe35c93 | ||
|
|
90c59d6ae2 | ||
|
|
33672c3d78 | ||
|
|
59ae7a989a | ||
|
|
5767e9e8c2 | ||
|
|
e62b9e8ed9 | ||
|
|
59597ccd17 | ||
|
|
7be49c66ef | ||
|
|
1380e25ef3 | ||
|
|
e825ce746f | ||
|
|
4e58951808 | ||
|
|
fbd805aad7 | ||
|
|
2a2705c6e3 | ||
|
|
986c957183 | ||
|
|
c5d063e876 | ||
|
|
500c36f5d4 | ||
|
|
69dbbeac44 | ||
|
|
69486c3adb | ||
|
|
e6692bb79b | ||
|
|
94fba1d7af | ||
|
|
223d8f4774 | ||
|
|
42e7a7e4e3 | ||
|
|
6716fdd49b | ||
|
|
3b69092fd8 | ||
|
|
aa8eaedbc8 | ||
|
|
f519ed373f | ||
|
|
851dff4b03 | ||
|
|
c5ebaa11ea | ||
|
|
934fbc8992 | ||
|
|
72cb5515fd | ||
|
|
c7f0770d53 | ||
|
|
7ea49e8ada | ||
|
|
cae6ce96b3 | ||
|
|
3699dfd756 | ||
|
|
6dca96d877 | ||
|
|
88c14b27a2 | ||
|
|
0d2a9539f6 | ||
|
|
bae160bd7c | ||
|
|
92852ecff2 | ||
|
|
ac0c841cb8 | ||
|
|
53e4b71f89 | ||
|
|
017a2692ca | ||
|
|
311f89eecb | ||
|
|
40d2bc4743 | ||
|
|
37160cbc8b | ||
|
|
3807a2b018 | ||
|
|
b6697dd432 | ||
|
|
6c33496e8a | ||
|
|
89f32625ed | ||
|
|
8253e76ec0 | ||
|
|
e0b3e9606a | ||
|
|
dc75a303f7 | ||
|
|
c1eb8ec78c | ||
|
|
12435f25fd | ||
|
|
d4dc5222cf | ||
|
|
bf1e59b2d3 | ||
|
|
3657e4a36e | ||
|
|
09efc1d865 | ||
|
|
22bded50b6 | ||
|
|
b5b02be3c2 | ||
|
|
776f372eb3 | ||
|
|
81279fd40b | ||
|
|
3342485d29 | ||
|
|
028ee848f5 | ||
|
|
7e64c3b8a9 | ||
|
|
704031f7b2 | ||
|
|
5e3c184735 | ||
|
|
88a8022787 | ||
|
|
96844dc4a5 | ||
|
|
a09acd6969 | ||
|
|
4e232f78de | ||
|
|
b146a04772 | ||
|
|
eaaa50e616 | ||
|
|
c893eaeb7a | ||
|
|
5bf7d7fd07 | ||
|
|
ca8fc92b94 | ||
|
|
9956e85f12 | ||
|
|
7fa4739c78 | ||
|
|
0ef2e2a7ec | ||
|
|
8fd1752acf | ||
|
|
14a1446faf | ||
|
|
0c2c8c5ae5 | ||
|
|
511b27ad39 | ||
|
|
e22ce7da0a | ||
|
|
d9a4ce06bc | ||
|
|
77fb14cc60 | ||
|
|
17c397211e | ||
|
|
6ca83644bc | ||
|
|
d1946ea9b6 | ||
|
|
cc9eae3b71 | ||
|
|
7bbc17df4b | ||
|
|
df6b2be482 | ||
|
|
5ff6263fb7 | ||
|
|
04dc8aaf73 | ||
|
|
5435805e58 | ||
|
|
903f819c5d | ||
|
|
5d927b2d25 | ||
|
|
b7a260cc6d | ||
|
|
e8ba8fb97b | ||
|
|
dd1d9d856b | ||
|
|
eef4c65e5f | ||
|
|
3f64594a22 | ||
|
|
3f5f71f8ab | ||
|
|
2a44179898 |
@@ -68,6 +68,9 @@ jobs:
|
||||
command: sudo apt install -y rsync
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Linking dependencies
|
||||
command: yarn bootstrap
|
||||
- run:
|
||||
name: Building
|
||||
command: yarn build
|
||||
@@ -110,25 +113,6 @@ jobs:
|
||||
name: Linting Code
|
||||
command: yarn test-lint
|
||||
|
||||
# test-unit:
|
||||
# docker:
|
||||
# - image: circleci/node:10
|
||||
# working_directory: ~/repo
|
||||
# steps:
|
||||
# - checkout
|
||||
# - attach_workspace:
|
||||
# at: .
|
||||
# - run:
|
||||
# name: Compiling `now dev` HTML error templates
|
||||
# command: node packages/now-cli/scripts/compile-templates.js
|
||||
# - run:
|
||||
# name: Running Unit Tests
|
||||
# command: yarn test-unit --clean false
|
||||
# - persist_to_workspace:
|
||||
# root: .
|
||||
# paths:
|
||||
# - packages/now-cli/.nyc_output
|
||||
|
||||
test-integration-macos-node-8:
|
||||
macos:
|
||||
xcode: '9.2.0'
|
||||
@@ -348,6 +332,24 @@ jobs:
|
||||
name: Running Integration Tests Once
|
||||
command: yarn test-integration-once --clean false
|
||||
|
||||
test-unit:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Compiling `now dev` HTML error templates
|
||||
command: node packages/now-cli/scripts/compile-templates.js
|
||||
- run:
|
||||
name: Output version
|
||||
command: node --version
|
||||
- run:
|
||||
name: Running Unit Tests
|
||||
command: yarn test-unit --clean false
|
||||
|
||||
coverage:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
@@ -387,36 +389,6 @@ jobs:
|
||||
name: Finalize Sentry Release
|
||||
command: sentry-cli releases finalize now-cli@`git describe --tags`
|
||||
|
||||
publish-stable:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Saving Authentication Information
|
||||
command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
- run:
|
||||
name: Publishing to Stable Channel
|
||||
command: npm publish --tag latest
|
||||
|
||||
publish-canary:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Saving Authentication Information
|
||||
command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
- run:
|
||||
name: Publishing to Canary Channel
|
||||
command: npm publish --tag canary
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
unscheduled:
|
||||
@@ -437,12 +409,6 @@ workflows:
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
# - test-unit:
|
||||
# requires:
|
||||
# - build
|
||||
# filters:
|
||||
# tags:
|
||||
# only: /.*/
|
||||
- test-integration-macos-node-8:
|
||||
requires:
|
||||
- build
|
||||
@@ -518,12 +484,14 @@ workflows:
|
||||
- test-integration-once:
|
||||
requires:
|
||||
- build
|
||||
- test-unit:
|
||||
requires:
|
||||
- build
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
- coverage:
|
||||
requires:
|
||||
#- test-unit
|
||||
- test-integration-macos-node-8
|
||||
- test-integration-macos-node-10
|
||||
- test-integration-macos-node-12
|
||||
@@ -537,23 +505,8 @@ workflows:
|
||||
- test-integration-linux-now-dev-node-10
|
||||
- test-integration-linux-now-dev-node-12
|
||||
- test-integration-once
|
||||
- test-unit
|
||||
- test-lint
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
- publish-canary:
|
||||
requires:
|
||||
- coverage
|
||||
filters:
|
||||
tags:
|
||||
only: /^.*canary.*($|\b)/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
- publish-stable:
|
||||
requires:
|
||||
- coverage
|
||||
filters:
|
||||
tags:
|
||||
only: /^(\d+\.)?(\d+\.)?(\*|\d+)$/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
|
||||
2
.github/workflows/publish.yml
vendored
2
.github/workflows/publish.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
- '!*'
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
Publish:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
@@ -12,5 +12,6 @@ optimistic_updates = true
|
||||
|
||||
[merge.message]
|
||||
title = "pull_request_title"
|
||||
body = "pull_request_body"
|
||||
include_pr_number = true
|
||||
body_type = "markdown"
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
# Publishing to npm
|
||||
|
||||
Always publish to the Canary Channel as soon as a PR is merged into the `canary` branch.
|
||||
|
||||
```
|
||||
yarn publish-canary
|
||||
```
|
||||
|
||||
Publish the Stable Channel weekly.
|
||||
|
||||
- Cherry pick each commit from `canary` to `master` branch
|
||||
- Verify that you are _in-sync_ with canary (with the exception of the `version` line in `package.json`)
|
||||
- Deploy the modified Builders
|
||||
|
||||
```
|
||||
# View differences excluding "Publish" commits
|
||||
git checkout canary && git pull
|
||||
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/canary.txt
|
||||
git checkout master && git pull
|
||||
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/master.txt
|
||||
diff ~/Desktop/canary.txt ~/Desktop/master.txt
|
||||
|
||||
# Cherry pick all PRs from canary into master ...
|
||||
git cherry-pick <PR501_COMMIT_SHA>
|
||||
git cherry-pick <PR502_COMMIT_SHA>
|
||||
git cherry-pick <PR503_COMMIT_SHA>
|
||||
git cherry-pick <PR504_COMMIT_SHA>
|
||||
|
||||
# Verify the only difference is "version" in package.json
|
||||
git diff origin/canary
|
||||
|
||||
# Ship it
|
||||
yarn publish-stable
|
||||
```
|
||||
|
||||
After running this publish step, GitHub Actions will take care of publishing the modified Builder packages to npm.
|
||||
|
||||
If for some reason GitHub Actions fails to publish the npm package, you may do so
|
||||
manually by running `npm publish` from the package directory. Make sure to
|
||||
use `npm publish --tag canary` if you are publishing a canary release!
|
||||
@@ -1,6 +1,6 @@
|
||||

|
||||
|
||||
[](https://circleci.com/gh/zeit/workflows/now)
|
||||
[](https://circleci.com/gh/zeit/workflows/now/tree/master)
|
||||
[](https://spectrum.chat/zeit)
|
||||
|
||||
**Note**: The [canary](https://github.com/zeit/now/tree/canary) branch is under heavy development – the stable release branch is [master](https://github.com/zeit/now/tree/master).
|
||||
|
||||
18
changelog.js
Normal file
18
changelog.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
const commit = execSync('git log --pretty=format:"%s %H"')
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n')
|
||||
.find(line => line.startsWith('Publish '))
|
||||
.split(' ')
|
||||
.pop();
|
||||
|
||||
if (!commit) {
|
||||
throw new Error('Unable to find last publish commit');
|
||||
}
|
||||
|
||||
const log = execSync(`git log --pretty=format:"- %s [%an]" ${commit}...HEAD`).toString().trim();
|
||||
|
||||
console.log(`Changes since the last publish commit ${commit}:`);
|
||||
console.log(`\n${log}\n`);
|
||||
36
diff.js
Normal file
36
diff.js
Normal file
@@ -0,0 +1,36 @@
|
||||
const { execSync } = require('child_process');
|
||||
const { join } = require('path');
|
||||
const { tmpdir } = require('os');
|
||||
const { mkdirSync, writeFileSync } = require('fs');
|
||||
|
||||
function getCommits(count) {
|
||||
return execSync('git log --pretty=format:"%s [%an]"')
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n')
|
||||
.slice(0, count)
|
||||
.filter(line => !line.startsWith('Publish '))
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function main(count = '100') {
|
||||
console.log(`Generating diff using last ${count} commits...`);
|
||||
const randomTmpId = Math.random().toString().slice(2);
|
||||
const dir = join(tmpdir(), 'now-diff' + randomTmpId);
|
||||
mkdirSync(dir);
|
||||
|
||||
execSync('git checkout canary && git pull');
|
||||
const canary = getCommits(count);
|
||||
execSync('git checkout master && git pull');
|
||||
const master = getCommits(count);
|
||||
|
||||
writeFileSync(join(dir, 'log.txt'), '# canary\n' + canary);
|
||||
execSync('git init && git add -A && git commit -m "init"', { cwd: dir });
|
||||
writeFileSync(join(dir, 'log.txt'), '# master\n' + master);
|
||||
|
||||
console.log(`Done generating diff. Run the following:`);
|
||||
console.log(`cd ${dir}`);
|
||||
console.log('Then use `git diff` or `git difftool` to view the differences.');
|
||||
}
|
||||
|
||||
main(process.argv[2]);
|
||||
@@ -33,6 +33,8 @@
|
||||
"publish-stable": "git checkout master && git pull && lerna version --exact",
|
||||
"publish-canary": "git checkout canary && git pull && lerna version prerelease --preid canary --exact",
|
||||
"publish-from-github": "./.circleci/publish.sh",
|
||||
"diff": "node diff.js",
|
||||
"changelog": "node changelog.js",
|
||||
"build": "node run.js build all",
|
||||
"test-lint": "node run.js test-lint",
|
||||
"test-unit": "node run.js test-unit",
|
||||
@@ -56,5 +58,8 @@
|
||||
"hooks": {
|
||||
"pre-commit": "lint-staged"
|
||||
}
|
||||
},
|
||||
"resolutions": {
|
||||
"signal-exit": "TooTallNate/signal-exit#update/sighub-to-sigint-on-windows"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
{
|
||||
"name": "gatsby-plugin-now",
|
||||
"version": "1.2.1",
|
||||
"version": "1.2.2",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://github.com/zeit/now/tree/canary/packages/gatsby-plugin-now",
|
||||
"homepage": "https://github.com/zeit/now/tree/canary/packages/gatsby-plugin-now#readme",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now.git",
|
||||
"directory": "packages/gatsby-plugin-now"
|
||||
},
|
||||
"keywords": [
|
||||
"gatsby",
|
||||
"gatsby-plugin"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "./build.sh",
|
||||
"test-integration-once": "jest --verbose"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.10.0",
|
||||
"version": "0.11.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -12,7 +12,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "./build.sh",
|
||||
"test-integration-once": "jest --env node --verbose --runInBand",
|
||||
"test-unit": "jest --env node --verbose --runInBand test/unit.test.js",
|
||||
"test-integration-once": "jest --env node --verbose --runInBand test/integration.test.js",
|
||||
"prepublishOnly": "./build.sh"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
export default function debug(message: string, ...additional: any[]) {
|
||||
if (process.env.NOW_BUILDER_DEBUG) {
|
||||
console.log(message, ...additional);
|
||||
} else if (process.env.NOW_BUILDER_ANNOTATE) {
|
||||
console.log(`[now-builder-debug] ${message}`, ...additional);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,13 +17,13 @@ const MISSING_BUILD_SCRIPT_ERROR: ErrorResponse = {
|
||||
code: 'missing_build_script',
|
||||
message:
|
||||
'Your `package.json` file is missing a `build` property inside the `script` property.' +
|
||||
'\nMore details: https://zeit.co/docs/v2/advanced/platform/frequently-asked-questions#missing-build-script'
|
||||
'\nMore details: https://zeit.co/docs/v2/advanced/platform/frequently-asked-questions#missing-build-script',
|
||||
};
|
||||
|
||||
// Static builders are special cased in `@now/static-build`
|
||||
function getBuilders(): Map<string, Builder> {
|
||||
return new Map<string, Builder>([
|
||||
['next', { src, use: '@now/next', config }]
|
||||
['next', { src, use: '@now/next', config }],
|
||||
]);
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@ function getApiBuilders(): Builder[] {
|
||||
{ src: 'api/**/*.ts', use: '@now/node', config },
|
||||
{ src: 'api/**/*.go', use: '@now/go', config },
|
||||
{ src: 'api/**/*.py', use: '@now/python', config },
|
||||
{ src: 'api/**/*.rb', use: '@now/ruby', config }
|
||||
{ src: 'api/**/*.rb', use: '@now/ruby', config },
|
||||
];
|
||||
}
|
||||
|
||||
@@ -107,6 +107,31 @@ async function detectApiBuilders(files: string[]): Promise<Builder[]> {
|
||||
return finishedBuilds as Builder[];
|
||||
}
|
||||
|
||||
// When a package has files that conflict with `/api` routes
|
||||
// e.g. Next.js pages/api we'll check it here and return an error.
|
||||
async function checkConflictingFiles(
|
||||
files: string[],
|
||||
builders: Builder[]
|
||||
): Promise<ErrorResponse | null> {
|
||||
// For Next.js
|
||||
if (builders.some(builder => builder.use.startsWith('@now/next'))) {
|
||||
const hasApiPages = files.some(file => file.startsWith('pages/api/'));
|
||||
const hasApiBuilders = builders.some(builder =>
|
||||
builder.src.startsWith('api/')
|
||||
);
|
||||
|
||||
if (hasApiPages && hasApiBuilders) {
|
||||
return {
|
||||
code: 'conflicting_files',
|
||||
message:
|
||||
'It is not possible to use `api` and `pages/api` at the same time, please only use one option',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// When zero config is used we can call this function
|
||||
// to determine what builders to use
|
||||
export async function detectBuilders(
|
||||
@@ -116,20 +141,28 @@ export async function detectBuilders(
|
||||
): Promise<{
|
||||
builders: Builder[] | null;
|
||||
errors: ErrorResponse[] | null;
|
||||
warnings: ErrorResponse[];
|
||||
}> {
|
||||
const errors: ErrorResponse[] = [];
|
||||
const warnings: ErrorResponse[] = [];
|
||||
|
||||
// Detect all builders for the `api` directory before anything else
|
||||
let builders = await detectApiBuilders(files);
|
||||
|
||||
if (pkg && hasBuildScript(pkg)) {
|
||||
builders.push(await detectBuilder(pkg));
|
||||
|
||||
const conflictError = await checkConflictingFiles(files, builders);
|
||||
|
||||
if (conflictError) {
|
||||
warnings.push(conflictError);
|
||||
}
|
||||
} else {
|
||||
if (pkg && builders.length === 0) {
|
||||
// We only show this error when there are no api builders
|
||||
// since the dependencies of the pkg could be used for those
|
||||
errors.push(MISSING_BUILD_SCRIPT_ERROR);
|
||||
return { errors, builders: null };
|
||||
return { errors, warnings, builders: null };
|
||||
}
|
||||
|
||||
// We allow a `public` directory
|
||||
@@ -138,21 +171,19 @@ export async function detectBuilders(
|
||||
builders.push({
|
||||
use: '@now/static',
|
||||
src: 'public/**/*',
|
||||
config
|
||||
config,
|
||||
});
|
||||
} else if (
|
||||
builders.length > 0 &&
|
||||
files.some(f => !f.startsWith('api/') && f !== 'package.json')
|
||||
) {
|
||||
// Everything besides the api directory
|
||||
// and package.json can be served as static files
|
||||
builders.push({
|
||||
use: '@now/static',
|
||||
src: '!{api/**,package.json}',
|
||||
config,
|
||||
});
|
||||
} else if (builders.length > 0) {
|
||||
// We can't use pattern matching, since `!(api)` and `!(api)/**/*`
|
||||
// won't give the correct results
|
||||
builders.push(
|
||||
...files
|
||||
.filter(name => !name.startsWith('api/'))
|
||||
.filter(name => !(name === 'package.json'))
|
||||
.map(name => ({
|
||||
use: '@now/static',
|
||||
src: name,
|
||||
config
|
||||
}))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,6 +208,7 @@ export async function detectBuilders(
|
||||
|
||||
return {
|
||||
builders: builders.length ? builders : null,
|
||||
errors: errors.length ? errors : null
|
||||
errors: errors.length ? errors : null,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -118,7 +118,7 @@ function partiallyMatches(pathA: string, pathB: string): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Counts how often a path occurres when all placeholders
|
||||
// Counts how often a path occurs when all placeholders
|
||||
// got resolved, so we can check if they have conflicts
|
||||
function pathOccurrences(filePath: string, files: string[]): string[] {
|
||||
const getAbsolutePath = (unresolvedPath: string): string => {
|
||||
@@ -226,7 +226,7 @@ async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
|
||||
error: {
|
||||
code: 'conflicting_path_segment',
|
||||
message:
|
||||
`The segment "${conflictingSegment}" occurres more than ` +
|
||||
`The segment "${conflictingSegment}" occurs more than ` +
|
||||
`one time in your path "${file}". Please make sure that ` +
|
||||
`every segment in a path is unique`
|
||||
}
|
||||
|
||||
@@ -4,11 +4,13 @@ import { File } from './types';
|
||||
|
||||
interface FileBlobOptions {
|
||||
mode?: number;
|
||||
contentType?: string;
|
||||
data: string | Buffer;
|
||||
}
|
||||
|
||||
interface FromStreamOptions {
|
||||
mode?: number;
|
||||
contentType?: string;
|
||||
stream: NodeJS.ReadableStream;
|
||||
}
|
||||
|
||||
@@ -16,16 +18,22 @@ export default class FileBlob implements File {
|
||||
public type: 'FileBlob';
|
||||
public mode: number;
|
||||
public data: string | Buffer;
|
||||
public contentType: string | undefined;
|
||||
|
||||
constructor({ mode = 0o100644, data }: FileBlobOptions) {
|
||||
constructor({ mode = 0o100644, contentType, data }: FileBlobOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
||||
this.type = 'FileBlob';
|
||||
this.mode = mode;
|
||||
this.contentType = contentType;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
static async fromStream({ mode = 0o100644, stream }: FromStreamOptions) {
|
||||
static async fromStream({
|
||||
mode = 0o100644,
|
||||
contentType,
|
||||
stream,
|
||||
}: FromStreamOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
const chunks: Buffer[] = [];
|
||||
@@ -37,7 +45,7 @@ export default class FileBlob implements File {
|
||||
});
|
||||
|
||||
const data = Buffer.concat(chunks);
|
||||
return new FileBlob({ mode, data });
|
||||
return new FileBlob({ mode, contentType, data });
|
||||
}
|
||||
|
||||
toStream(): NodeJS.ReadableStream {
|
||||
|
||||
@@ -9,11 +9,13 @@ const semaToPreventEMFILE = new Sema(20);
|
||||
|
||||
interface FileFsRefOptions {
|
||||
mode?: number;
|
||||
contentType?: string;
|
||||
fsPath: string;
|
||||
}
|
||||
|
||||
interface FromStreamOptions {
|
||||
mode: number;
|
||||
contentType?: string;
|
||||
stream: NodeJS.ReadableStream;
|
||||
fsPath: string;
|
||||
}
|
||||
@@ -22,17 +24,20 @@ class FileFsRef implements File {
|
||||
public type: 'FileFsRef';
|
||||
public mode: number;
|
||||
public fsPath: string;
|
||||
public contentType: string | undefined;
|
||||
|
||||
constructor({ mode = 0o100644, fsPath }: FileFsRefOptions) {
|
||||
constructor({ mode = 0o100644, contentType, fsPath }: FileFsRefOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof fsPath === 'string');
|
||||
this.type = 'FileFsRef';
|
||||
this.mode = mode;
|
||||
this.contentType = contentType;
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
static async fromFsPath({
|
||||
mode,
|
||||
contentType,
|
||||
fsPath,
|
||||
}: FileFsRefOptions): Promise<FileFsRef> {
|
||||
let m = mode;
|
||||
@@ -40,11 +45,12 @@ class FileFsRef implements File {
|
||||
const stat = await fs.lstat(fsPath);
|
||||
m = stat.mode;
|
||||
}
|
||||
return new FileFsRef({ mode: m, fsPath });
|
||||
return new FileFsRef({ mode: m, contentType, fsPath });
|
||||
}
|
||||
|
||||
static async fromStream({
|
||||
mode = 0o100644,
|
||||
contentType,
|
||||
stream,
|
||||
fsPath,
|
||||
}: FromStreamOptions): Promise<FileFsRef> {
|
||||
@@ -63,7 +69,7 @@ class FileFsRef implements File {
|
||||
dest.on('error', reject);
|
||||
});
|
||||
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
return new FileFsRef({ mode, contentType, fsPath });
|
||||
}
|
||||
|
||||
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||
|
||||
@@ -8,6 +8,7 @@ import { File } from './types';
|
||||
interface FileRefOptions {
|
||||
mode?: number;
|
||||
digest: string;
|
||||
contentType?: string;
|
||||
mutable?: boolean;
|
||||
}
|
||||
|
||||
@@ -26,14 +27,21 @@ export default class FileRef implements File {
|
||||
public type: 'FileRef';
|
||||
public mode: number;
|
||||
public digest: string;
|
||||
public contentType: string | undefined;
|
||||
private mutable: boolean;
|
||||
|
||||
constructor({ mode = 0o100644, digest, mutable = false }: FileRefOptions) {
|
||||
constructor({
|
||||
mode = 0o100644,
|
||||
digest,
|
||||
contentType,
|
||||
mutable = false,
|
||||
}: FileRefOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof digest === 'string');
|
||||
this.type = 'FileRef';
|
||||
this.mode = mode;
|
||||
this.digest = digest;
|
||||
this.contentType = contentType;
|
||||
this.mutable = mutable;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { intersects } from 'semver';
|
||||
import { NodeVersion } from '../types';
|
||||
import debug from '../debug';
|
||||
|
||||
const supportedOptions: NodeVersion[] = [
|
||||
{ major: 10, range: '10.x', runtime: 'nodejs10.x' },
|
||||
@@ -20,7 +21,7 @@ export async function getSupportedNodeVersion(
|
||||
|
||||
if (!engineRange) {
|
||||
if (!silent) {
|
||||
console.log(
|
||||
debug(
|
||||
'missing `engines` in `package.json`, using default range: ' +
|
||||
selection.range
|
||||
);
|
||||
@@ -34,7 +35,7 @@ export async function getSupportedNodeVersion(
|
||||
});
|
||||
if (found) {
|
||||
if (!silent) {
|
||||
console.log(
|
||||
debug(
|
||||
'Found `engines` in `package.json`, selecting range: ' +
|
||||
selection.range
|
||||
);
|
||||
|
||||
@@ -5,18 +5,18 @@ import debug from '../debug';
|
||||
import spawn from 'cross-spawn';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { deprecate } from 'util';
|
||||
import { cpus } from 'os';
|
||||
import { Meta, PackageJson, NodeVersion, Config } from '../types';
|
||||
import { getSupportedNodeVersion } from './node-version';
|
||||
|
||||
function spawnAsync(
|
||||
export function spawnAsync(
|
||||
command: string,
|
||||
args: string[],
|
||||
cwd: string,
|
||||
opts: SpawnOptions = {}
|
||||
) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const stderrLogs: Buffer[] = [];
|
||||
opts = { stdio: 'inherit', cwd, ...opts };
|
||||
opts = { stdio: 'inherit', ...opts };
|
||||
const child = spawn(command, args, opts);
|
||||
|
||||
if (opts.stdio === 'pipe' && child.stderr) {
|
||||
@@ -55,7 +55,10 @@ export async function runShellScript(
|
||||
assert(path.isAbsolute(fsPath));
|
||||
const destPath = path.dirname(fsPath);
|
||||
await chmodPlusX(fsPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, args, destPath, spawnOpts);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, args, {
|
||||
cwd: destPath,
|
||||
...spawnOpts,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -140,33 +143,89 @@ export async function runNpmInstall(
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let commandArgs = args;
|
||||
console.log(`installing to ${destPath}`);
|
||||
debug(`Installing to ${destPath}`);
|
||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||
|
||||
const opts = spawnOpts || { env: process.env };
|
||||
const opts = { cwd: destPath, ...spawnOpts } || {
|
||||
cwd: destPath,
|
||||
env: process.env,
|
||||
};
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync(
|
||||
'npm',
|
||||
commandArgs.concat(['install', '--unsafe-perm']),
|
||||
destPath,
|
||||
opts
|
||||
);
|
||||
} else {
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
commandArgs.concat(['--ignore-engines', '--cwd', destPath]),
|
||||
destPath,
|
||||
opts
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function runBundleInstall(
|
||||
destPath: string,
|
||||
args: string[] = [],
|
||||
spawnOpts?: SpawnOptions,
|
||||
meta?: Meta
|
||||
) {
|
||||
if (meta && meta.isDev) {
|
||||
debug('Skipping dependency installation because dev mode is enabled');
|
||||
return;
|
||||
}
|
||||
|
||||
assert(path.isAbsolute(destPath));
|
||||
const opts = { cwd: destPath, ...spawnOpts } || {
|
||||
cwd: destPath,
|
||||
env: process.env,
|
||||
};
|
||||
|
||||
await spawnAsync(
|
||||
'bundle',
|
||||
args.concat([
|
||||
'install',
|
||||
'--no-prune',
|
||||
'--retry',
|
||||
'3',
|
||||
'--jobs',
|
||||
String(cpus().length || 1),
|
||||
]),
|
||||
opts
|
||||
);
|
||||
}
|
||||
|
||||
export async function runPipInstall(
|
||||
destPath: string,
|
||||
args: string[] = [],
|
||||
spawnOpts?: SpawnOptions,
|
||||
meta?: Meta
|
||||
) {
|
||||
if (meta && meta.isDev) {
|
||||
debug('Skipping dependency installation because dev mode is enabled');
|
||||
return;
|
||||
}
|
||||
|
||||
assert(path.isAbsolute(destPath));
|
||||
const opts = { cwd: destPath, ...spawnOpts } || {
|
||||
cwd: destPath,
|
||||
env: process.env,
|
||||
};
|
||||
|
||||
await spawnAsync(
|
||||
'pip3',
|
||||
['install', '--disable-pip-version-check', ...args],
|
||||
opts
|
||||
);
|
||||
}
|
||||
|
||||
export async function runPackageJsonScript(
|
||||
destPath: string,
|
||||
scriptName: string,
|
||||
opts?: SpawnOptions
|
||||
spawnOpts?: SpawnOptions
|
||||
) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
const { packageJson, hasPackageLockJson } = await scanParentDirs(
|
||||
@@ -181,17 +240,14 @@ export async function runPackageJsonScript(
|
||||
);
|
||||
if (!hasScript) return false;
|
||||
|
||||
const opts = { cwd: destPath, ...spawnOpts };
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
console.log(`running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], destPath, opts);
|
||||
console.log(`Running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], opts);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath, 'run', scriptName],
|
||||
destPath,
|
||||
opts
|
||||
);
|
||||
console.log(`Running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], opts);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
@@ -2,14 +2,18 @@ import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { Lambda, createLambda } from './lambda';
|
||||
import { Prerender } from './prerender';
|
||||
import download, { DownloadedFiles } from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory';
|
||||
import glob from './fs/glob';
|
||||
import rename from './fs/rename';
|
||||
import {
|
||||
spawnAsync,
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runBundleInstall,
|
||||
runPipInstall,
|
||||
runShellScript,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
@@ -26,14 +30,18 @@ export {
|
||||
FileRef,
|
||||
Lambda,
|
||||
createLambda,
|
||||
Prerender,
|
||||
download,
|
||||
DownloadedFiles,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
rename,
|
||||
spawnAsync,
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runBundleInstall,
|
||||
runPipInstall,
|
||||
runShellScript,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
|
||||
42
packages/now-build-utils/src/prerender.ts
Normal file
42
packages/now-build-utils/src/prerender.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { Lambda } from './lambda';
|
||||
|
||||
interface PrerenderOptions {
|
||||
expiration: number;
|
||||
lambda: Lambda;
|
||||
fallback: FileBlob | FileFsRef | FileRef | null;
|
||||
group?: number;
|
||||
}
|
||||
|
||||
export class Prerender {
|
||||
public type: 'Prerender';
|
||||
public expiration: number;
|
||||
public lambda: Lambda;
|
||||
public fallback: FileBlob | FileFsRef | FileRef | null;
|
||||
public group?: number;
|
||||
|
||||
constructor({ expiration, lambda, fallback, group }: PrerenderOptions) {
|
||||
this.type = 'Prerender';
|
||||
this.expiration = expiration;
|
||||
this.lambda = lambda;
|
||||
|
||||
if (
|
||||
typeof group !== 'undefined' &&
|
||||
(group <= 0 || !Number.isInteger(group))
|
||||
) {
|
||||
throw new Error(
|
||||
'The `group` argument for `Prerender` needs to be a natural number.'
|
||||
);
|
||||
}
|
||||
this.group = group;
|
||||
|
||||
if (typeof fallback === 'undefined') {
|
||||
throw new Error(
|
||||
'The `fallback` argument for `Prerender` needs to be a `FileBlob`, `FileFsRef`, `FileRef`, or null.'
|
||||
);
|
||||
}
|
||||
this.fallback = fallback;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,14 @@
|
||||
import FileRef from './file-ref';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
|
||||
export interface Env {
|
||||
[name: string]: string | undefined;
|
||||
}
|
||||
|
||||
export interface File {
|
||||
type: string;
|
||||
mode: number;
|
||||
contentType?: string;
|
||||
toStream: () => NodeJS.ReadableStream;
|
||||
/**
|
||||
* The absolute path to the file in the filesystem
|
||||
@@ -52,6 +57,8 @@ export interface Meta {
|
||||
requestPath?: string;
|
||||
filesChanged?: string[];
|
||||
filesRemoved?: string[];
|
||||
env?: Env;
|
||||
buildEnv?: Env;
|
||||
}
|
||||
|
||||
export interface AnalyzeOptions {
|
||||
|
||||
209
packages/now-build-utils/test/integration.test.js
vendored
Normal file
209
packages/now-build-utils/test/integration.test.js
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment');
|
||||
const { glob, detectBuilders, detectRoutes } = require('../');
|
||||
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
|
||||
const builderUrl = '@canary';
|
||||
let buildUtilsUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const buildUtilsPath = path.resolve(__dirname, '..');
|
||||
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
|
||||
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||
});
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
if (fixture.includes('zero-config')) {
|
||||
// Those have separate tests
|
||||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
|
||||
// few foreign tests
|
||||
|
||||
const buildersToTestWith = ['now-next', 'now-node', 'now-static-build'];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const builder of buildersToTestWith) {
|
||||
const fixturesPath2 = path.resolve(
|
||||
__dirname,
|
||||
`../../${builder}/test/fixtures`
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
||||
// don't run all foreign fixtures, just some
|
||||
if (['01-cowsay', '01-cache-headers', '03-env-vars'].includes(fixture)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${builder}/${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath2, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes`', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '01-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/my-endpoint',
|
||||
mustContain: 'my-endpoint',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/other-endpoint',
|
||||
mustContain: 'other-endpoint',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/team/zeit',
|
||||
mustContain: 'team/zeit',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/user/myself',
|
||||
mustContain: 'user/myself',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/not-okay/',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt',
|
||||
},
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
});
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes` with `index` files', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/not-okay',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/index',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/index.js',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date.js',
|
||||
mustContain: 'hello from api/date.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
// Someone might expect this to be `date.js`,
|
||||
// but I doubt that there is any case were both
|
||||
// `date/index.js` and `date.js` exists,
|
||||
// so it is not special cased
|
||||
path: '/api/date',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date/',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date/index',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date/index.js',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt',
|
||||
},
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
});
|
||||
@@ -6,25 +6,8 @@ const { createZip } = require('../dist/lambda');
|
||||
const { glob, download, detectBuilders, detectRoutes } = require('../');
|
||||
const {
|
||||
getSupportedNodeVersion,
|
||||
defaultSelection
|
||||
defaultSelection,
|
||||
} = require('../dist/fs/node-version');
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment
|
||||
} = require('../../../test/lib/deployment/test-deployment');
|
||||
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
|
||||
const builderUrl = '@canary';
|
||||
let buildUtilsUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const buildUtilsPath = path.resolve(__dirname, '..');
|
||||
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
|
||||
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||
});
|
||||
|
||||
// unit tests
|
||||
|
||||
it('should re-create symlinks properly', async () => {
|
||||
const files = await glob('**', path.join(__dirname, 'symlinks'));
|
||||
@@ -38,7 +21,7 @@ it('should re-create symlinks properly', async () => {
|
||||
|
||||
const [linkStat, aStat] = await Promise.all([
|
||||
fs.lstat(path.join(outDir, 'link.txt')),
|
||||
fs.lstat(path.join(outDir, 'a.txt'))
|
||||
fs.lstat(path.join(outDir, 'a.txt')),
|
||||
]);
|
||||
assert(linkStat.isSymbolicLink());
|
||||
assert(aStat.isFile());
|
||||
@@ -60,7 +43,7 @@ it('should create zip files with symlinks properly', async () => {
|
||||
|
||||
const [linkStat, aStat] = await Promise.all([
|
||||
fs.lstat(path.join(outDir, 'link.txt')),
|
||||
fs.lstat(path.join(outDir, 'a.txt'))
|
||||
fs.lstat(path.join(outDir, 'a.txt')),
|
||||
]);
|
||||
assert(linkStat.isSymbolicLink());
|
||||
assert(aStat.isFile());
|
||||
@@ -120,7 +103,7 @@ it('should support require by path for legacy builders', () => {
|
||||
const glob2 = require('@now/build-utils/fs/glob.js');
|
||||
const rename2 = require('@now/build-utils/fs/rename.js');
|
||||
const {
|
||||
runNpmInstall: runNpmInstall2
|
||||
runNpmInstall: runNpmInstall2,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const streamToBuffer2 = require('@now/build-utils/fs/stream-to-buffer.js');
|
||||
|
||||
@@ -142,56 +125,6 @@ it('should support require by path for legacy builders', () => {
|
||||
expect(Lambda2).toBe(index.Lambda);
|
||||
});
|
||||
|
||||
// own fixtures
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
if (fixture.includes('zero-config')) {
|
||||
// Those have separate tests
|
||||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
|
||||
// few foreign tests
|
||||
|
||||
const buildersToTestWith = ['now-next', 'now-node', 'now-static-build'];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const builder of buildersToTestWith) {
|
||||
const fixturesPath2 = path.resolve(
|
||||
__dirname,
|
||||
`../../${builder}/test/fixtures`
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
||||
// don't run all foreign fixtures, just some
|
||||
if (['01-cowsay', '01-cache-headers', '03-env-vars'].includes(fixture)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${builder}/${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath2, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('Test `detectBuilders`', async () => {
|
||||
{
|
||||
// package.json + no build
|
||||
@@ -206,7 +139,7 @@ it('Test `detectBuilders`', async () => {
|
||||
// package.json + no build + next
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'pages/index.js'];
|
||||
const { builders, errors } = await detectBuilders(files, pkg);
|
||||
@@ -218,7 +151,7 @@ it('Test `detectBuilders`', async () => {
|
||||
// package.json + no build + next
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'pages/index.js'];
|
||||
const { builders, errors } = await detectBuilders(files, pkg);
|
||||
@@ -258,7 +191,7 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/users.js');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('index.html');
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
@@ -270,10 +203,8 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/[endpoint].js');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('index.html');
|
||||
expect(builders[2].use).toBe('@now/static');
|
||||
expect(builders[2].src).toBe('static/image.png');
|
||||
expect(builders.length).toBe(3);
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
|
||||
@@ -282,7 +213,7 @@ it('Test `detectBuilders`', async () => {
|
||||
const files = [
|
||||
'api/_utils/handler.js',
|
||||
'api/[endpoint]/.helper.js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
@@ -295,7 +226,7 @@ it('Test `detectBuilders`', async () => {
|
||||
// api + next + public
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'api/endpoint.js', 'public/index.html'];
|
||||
|
||||
@@ -311,7 +242,7 @@ it('Test `detectBuilders`', async () => {
|
||||
// api + next + raw static
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'api/endpoint.js', 'index.html'];
|
||||
|
||||
@@ -331,10 +262,8 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/endpoint.js');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('favicon.ico');
|
||||
expect(builders[2].use).toBe('@now/static');
|
||||
expect(builders[2].src).toBe('index.html');
|
||||
expect(builders.length).toBe(3);
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders.length).toBe(2);
|
||||
}
|
||||
|
||||
{
|
||||
@@ -343,7 +272,7 @@ it('Test `detectBuilders`', async () => {
|
||||
'api/endpoint.js',
|
||||
'public/index.html',
|
||||
'public/favicon.ico',
|
||||
'README.md'
|
||||
'README.md',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
@@ -367,7 +296,7 @@ it('Test `detectBuilders`', async () => {
|
||||
// next + public
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'public/index.html', 'README.md'];
|
||||
|
||||
@@ -381,7 +310,7 @@ it('Test `detectBuilders`', async () => {
|
||||
// nuxt
|
||||
const pkg = {
|
||||
scripts: { build: 'nuxt build' },
|
||||
dependencies: { nuxt: '2.8.1' }
|
||||
dependencies: { nuxt: '2.8.1' },
|
||||
};
|
||||
const files = ['package.json', 'pages/index.js'];
|
||||
|
||||
@@ -433,12 +362,12 @@ it('Test `detectBuilders`', async () => {
|
||||
// package.json + api + canary
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = [
|
||||
'pages/index.js',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg, { tag: 'canary' });
|
||||
@@ -452,12 +381,12 @@ it('Test `detectBuilders`', async () => {
|
||||
// package.json + api + latest
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = [
|
||||
'pages/index.js',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg, { tag: 'latest' });
|
||||
@@ -471,12 +400,12 @@ it('Test `detectBuilders`', async () => {
|
||||
// package.json + api + random tag
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = [
|
||||
'pages/index.js',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg, { tag: 'haha' });
|
||||
@@ -485,6 +414,36 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[2].use).toBe('@now/next@haha');
|
||||
expect(builders.length).toBe(3);
|
||||
}
|
||||
|
||||
{
|
||||
// next.js pages/api + api
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['api/user.js', 'pages/api/user.js'];
|
||||
|
||||
const { warnings, errors, builders } = await detectBuilders(files, pkg);
|
||||
expect(errors).toBe(null);
|
||||
expect(warnings[0].code).toBe('conflicting_files');
|
||||
expect(builders).toBeDefined();
|
||||
expect(builders.length).toBe(2);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[1].use).toBe('@now/next');
|
||||
}
|
||||
|
||||
{
|
||||
// many static files + one api file
|
||||
const files = Array.from({ length: 5000 }).map((_, i) => `file${i}.html`);
|
||||
files.push('api/index.ts');
|
||||
const { builders } = await detectBuilders(files);
|
||||
|
||||
expect(builders.length).toBe(2);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/index.ts');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
}
|
||||
});
|
||||
|
||||
it('Test `detectRoutes`', async () => {
|
||||
@@ -545,7 +504,7 @@ it('Test `detectRoutes`', async () => {
|
||||
const files = [
|
||||
'public/index.html',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
@@ -560,7 +519,7 @@ it('Test `detectRoutes`', async () => {
|
||||
{
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['public/index.html', 'api/[endpoint].js'];
|
||||
|
||||
@@ -617,7 +576,7 @@ it('Test `detectRoutes`', async () => {
|
||||
'api/users/index.ts',
|
||||
'api/users/index.d.ts',
|
||||
'api/food.ts',
|
||||
'api/ts/gold.ts'
|
||||
'api/ts/gold.ts',
|
||||
];
|
||||
const { builders } = await detectBuilders(files);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
@@ -630,146 +589,3 @@ it('Test `detectRoutes`', async () => {
|
||||
expect(defaultRoutes.length).toBe(5);
|
||||
}
|
||||
});
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes`', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '01-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/my-endpoint',
|
||||
mustContain: 'my-endpoint',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/other-endpoint',
|
||||
mustContain: 'other-endpoint',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/team/zeit',
|
||||
mustContain: 'team/zeit',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/user/myself',
|
||||
mustContain: 'user/myself',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/not-okay/',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt'
|
||||
}
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
});
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes` with `index` files', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/not-okay',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/index',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/index.js',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date.js',
|
||||
mustContain: 'hello from api/date.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
// Someone might expect this to be `date.js`,
|
||||
// but I doubt that there is any case were both
|
||||
// `date/index.js` and `date.js` exists,
|
||||
// so it is not special cased
|
||||
path: '/api/date',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date/',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date/index',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date/index.js',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt'
|
||||
}
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
});
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
[](https://spectrum.chat/zeit)
|
||||
|
||||
## Usage
|
||||
## Usages
|
||||
|
||||
To install the latest version of Now CLI, visit [zeit.co/download](https://zeit.co/download) or run this command:
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "now",
|
||||
"version": "16.2.0",
|
||||
"version": "16.4.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Now",
|
||||
@@ -61,12 +61,6 @@
|
||||
"node": ">= 8.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@now/build-utils": "0.10.0",
|
||||
"@now/go": "latest",
|
||||
"@now/next": "latest",
|
||||
"@now/node": "latest",
|
||||
"@now/routing-utils": "1.2.3-canary.0",
|
||||
"@now/static-build": "latest",
|
||||
"@sentry/node": "5.5.0",
|
||||
"@types/ansi-escapes": "3.0.0",
|
||||
"@types/ansi-regex": "4.0.0",
|
||||
@@ -98,7 +92,7 @@
|
||||
"@types/which": "1.3.1",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@zeit/dockerignore": "0.0.5",
|
||||
"@zeit/fun": "0.9.3",
|
||||
"@zeit/fun": "0.10.2",
|
||||
"@zeit/ncc": "0.18.5",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
"ajv": "6.10.2",
|
||||
@@ -131,10 +125,10 @@
|
||||
"escape-html": "1.0.3",
|
||||
"esm": "3.1.4",
|
||||
"execa": "1.0.0",
|
||||
"fetch-h2": "2.0.3",
|
||||
"fs-extra": "7.0.1",
|
||||
"glob": "7.1.2",
|
||||
"http-proxy": "1.17.0",
|
||||
"ignore": "4.0.6",
|
||||
"ini": "1.3.4",
|
||||
"inquirer": "3.3.0",
|
||||
"is-url": "1.2.2",
|
||||
@@ -145,8 +139,9 @@
|
||||
"mime-types": "2.1.24",
|
||||
"minimatch": "3.0.4",
|
||||
"mri": "1.1.0",
|
||||
"ms": "2.1.1",
|
||||
"ms": "2.1.2",
|
||||
"node-fetch": "1.7.3",
|
||||
"now-client": "./packages/now-client",
|
||||
"npm-package-arg": "6.1.0",
|
||||
"nyc": "13.2.0",
|
||||
"ora": "3.4.0",
|
||||
@@ -173,6 +168,7 @@
|
||||
"through2": "2.0.3",
|
||||
"title": "3.4.1",
|
||||
"tmp-promise": "1.0.3",
|
||||
"tree-kill": "1.2.1",
|
||||
"ts-node": "8.3.0",
|
||||
"typescript": "3.2.4",
|
||||
"universal-analytics": "0.4.20",
|
||||
|
||||
@@ -8,16 +8,13 @@ import { createWriteStream, mkdirp, remove, writeJSON } from 'fs-extra';
|
||||
|
||||
import { getDistTag } from '../src/util/get-dist-tag';
|
||||
import pkg from '../package.json';
|
||||
import { getBundledBuilders } from '../src/util/dev/get-bundled-builders';
|
||||
|
||||
const dirRoot = join(__dirname, '..');
|
||||
|
||||
const bundledBuilders = Object.keys(pkg.devDependencies).filter(d =>
|
||||
d.startsWith('@now/')
|
||||
);
|
||||
|
||||
async function createBuildersTarball() {
|
||||
const distTag = getDistTag(pkg.version);
|
||||
const builders = Array.from(bundledBuilders).map(b => `${b}@${distTag}`);
|
||||
const builders = Array.from(getBundledBuilders()).map(b => `${b}@${distTag}`);
|
||||
console.log(`Creating builders tarball with: ${builders.join(', ')}`);
|
||||
|
||||
const buildersDir = join(dirRoot, '.builders');
|
||||
@@ -39,7 +36,7 @@ async function createBuildersTarball() {
|
||||
const yarn = join(dirRoot, '../../node_modules/yarn/bin/yarn.js');
|
||||
await execa(process.execPath, [yarn, 'add', '--no-lockfile', ...builders], {
|
||||
cwd: buildersDir,
|
||||
stdio: 'inherit'
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
const packer = tar.pack(buildersDir);
|
||||
@@ -66,7 +63,7 @@ async function main() {
|
||||
// Compile the `doT.js` template files for `now dev`
|
||||
console.log();
|
||||
await execa(process.execPath, [join(__dirname, 'compile-templates.js')], {
|
||||
stdio: 'inherit'
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
// Do the initial `ncc` build
|
||||
@@ -92,20 +89,22 @@ async function main() {
|
||||
// get compiled into the final ncc bundle file, however, we want them to be
|
||||
// present in the npm package because the contents of those files are involved
|
||||
// with `fun`'s cache invalidation mechanism and they need to be shasum'd.
|
||||
const runtimes = join(dirRoot, '../../node_modules/@zeit/fun/dist/src/runtimes');
|
||||
const runtimes = join(
|
||||
dirRoot,
|
||||
'../../node_modules/@zeit/fun/dist/src/runtimes'
|
||||
);
|
||||
const dest = join(dirRoot, 'dist/runtimes');
|
||||
await cpy('**/*', dest, { parents: true, cwd: runtimes });
|
||||
|
||||
console.log('Finished building `now-cli`');
|
||||
}
|
||||
|
||||
process.on('unhandledRejection', (err: any) => {
|
||||
console.error('Unhandled Rejection:');
|
||||
console.error(err);
|
||||
process.on('unhandledRejection', (reason: any, promise: Promise<any>) => {
|
||||
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
process.on('uncaughtException', (err: any) => {
|
||||
process.on('uncaughtException', err => {
|
||||
console.error('Uncaught Exception:');
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
|
||||
@@ -38,7 +38,7 @@ export default async function set(
|
||||
const {
|
||||
authConfig: { token },
|
||||
config,
|
||||
localConfig
|
||||
localConfig,
|
||||
} = ctx;
|
||||
|
||||
const { currentTeam } = config;
|
||||
@@ -48,14 +48,14 @@ export default async function set(
|
||||
const {
|
||||
'--debug': debugEnabled,
|
||||
'--no-verify': noVerify,
|
||||
'--rules': rulesPath
|
||||
'--rules': rulesPath,
|
||||
} = opts;
|
||||
|
||||
const client = new Client({
|
||||
apiUrl,
|
||||
token,
|
||||
currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
let contextName = null;
|
||||
let user = null;
|
||||
@@ -79,14 +79,14 @@ export default async function set(
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!isValidName(args[0])) {
|
||||
if (args.length >= 1 && !isValidName(args[0])) {
|
||||
output.error(
|
||||
`The provided argument "${args[0]}" is not a valid deployment`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!isValidName(args[1])) {
|
||||
if (args.length >= 2 && !isValidName(args[1])) {
|
||||
output.error(`The provided argument "${args[1]}" is not a valid domain`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -3,18 +3,14 @@ import bytes from 'bytes';
|
||||
import { write as copy } from 'clipboardy';
|
||||
import chalk from 'chalk';
|
||||
import title from 'title';
|
||||
import Progress from 'progress';
|
||||
import Client from '../../util/client';
|
||||
import wait from '../../util/output/wait';
|
||||
import { handleError } from '../../util/error';
|
||||
import getArgs from '../../util/get-args';
|
||||
import toHumanPath from '../../util/humanize-path';
|
||||
import Now from '../../util';
|
||||
import stamp from '../../util/output/stamp.ts';
|
||||
import { isReady, isDone, isFailed } from '../../util/build-state';
|
||||
import createDeploy from '../../util/deploy/create-deploy';
|
||||
import getDeploymentByIdOrHost from '../../util/deploy/get-deployment-by-id-or-host';
|
||||
import sleep from '../../util/sleep';
|
||||
import parseMeta from '../../util/parse-meta';
|
||||
import code from '../../util/output/code';
|
||||
import param from '../../util/output/param';
|
||||
@@ -36,12 +32,15 @@ import {
|
||||
AliasDomainConfigured,
|
||||
MissingBuildScript,
|
||||
ConflictingFilePath,
|
||||
ConflictingPathSegment
|
||||
ConflictingPathSegment,
|
||||
BuildError,
|
||||
NotDomainOwner,
|
||||
} from '../../util/errors-ts';
|
||||
import { SchemaValidationFailed } from '../../util/errors';
|
||||
import purchaseDomainIfAvailable from '../../util/domains/purchase-domain-if-available';
|
||||
import handleCertError from '../../util/certs/handle-cert-error';
|
||||
import isWildcardAlias from '../../util/alias/is-wildcard-alias';
|
||||
import shouldDeployDir from '../../util/deploy/should-deploy-dir';
|
||||
|
||||
const addProcessEnv = async (log, env) => {
|
||||
let val;
|
||||
@@ -72,11 +71,12 @@ const addProcessEnv = async (log, env) => {
|
||||
};
|
||||
|
||||
const deploymentErrorMsg = `Your deployment failed. Please retry later. More: https://err.sh/now/deployment-error`;
|
||||
const prepareAlias = input => isWildcardAlias(input) ? input : `https://${input}`;
|
||||
const prepareAlias = input =>
|
||||
isWildcardAlias(input) ? input : `https://${input}`;
|
||||
|
||||
const printDeploymentStatus = async (
|
||||
output,
|
||||
{ url, readyState, alias: aliasList, aliasError },
|
||||
{ readyState, alias: aliasList, aliasError },
|
||||
deployStamp,
|
||||
clipboardEnabled,
|
||||
localConfig,
|
||||
@@ -94,10 +94,18 @@ const printDeploymentStatus = async (
|
||||
const preparedAlias = prepareAlias(firstAlias);
|
||||
try {
|
||||
await copy(`https://${firstAlias}`);
|
||||
output.ready(`Deployed to ${chalk.bold(chalk.cyan(preparedAlias))} ${chalk.gray('[in clipboard]')} ${deployStamp()}`);
|
||||
output.ready(
|
||||
`Deployed to ${chalk.bold(
|
||||
chalk.cyan(preparedAlias)
|
||||
)} ${chalk.gray('[in clipboard]')} ${deployStamp()}`
|
||||
);
|
||||
} catch (err) {
|
||||
output.debug(`Error copying to clipboard: ${err}`);
|
||||
output.ready(`Deployed to ${chalk.bold(chalk.cyan(preparedAlias))} ${deployStamp()}`);
|
||||
output.ready(
|
||||
`Deployed to ${chalk.bold(
|
||||
chalk.cyan(preparedAlias)
|
||||
)} ${deployStamp()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -109,13 +117,17 @@ const printDeploymentStatus = async (
|
||||
|
||||
for (const alias of aliasList) {
|
||||
const index = aliasList.indexOf(alias);
|
||||
const isLast = index === (aliasList.length - 1);
|
||||
const isLast = index === aliasList.length - 1;
|
||||
const shouldCopy = matching ? alias === matching : isLast;
|
||||
|
||||
if (shouldCopy && clipboardEnabled) {
|
||||
try {
|
||||
await copy(`https://${alias}`);
|
||||
output.print(`- ${chalk.bold(chalk.cyan(prepareAlias(alias)))} ${chalk.gray('[in clipboard]')}\n`);
|
||||
output.print(
|
||||
`- ${chalk.bold(chalk.cyan(prepareAlias(alias)))} ${chalk.gray(
|
||||
'[in clipboard]'
|
||||
)}\n`
|
||||
);
|
||||
|
||||
continue;
|
||||
} catch (err) {
|
||||
@@ -138,20 +150,6 @@ const printDeploymentStatus = async (
|
||||
return 1;
|
||||
}
|
||||
|
||||
const failedBuilds = builds.filter(isFailed);
|
||||
const amount = failedBuilds.length;
|
||||
|
||||
if (amount > 0) {
|
||||
output.error('Build failed');
|
||||
output.error(
|
||||
`Check your logs at https://${url}/_logs or run ${code(
|
||||
`now logs ${url}`
|
||||
)}`
|
||||
);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
output.error(deploymentErrorMsg);
|
||||
return 1;
|
||||
};
|
||||
@@ -206,7 +204,15 @@ export default async function main(
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { apiUrl, authConfig: { token }, config: { currentTeam } } = ctx;
|
||||
if (!(await shouldDeployDir(argv._[0], output))) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const {
|
||||
apiUrl,
|
||||
authConfig: { token },
|
||||
config: { currentTeam },
|
||||
} = ctx;
|
||||
const { log, debug, error, warn } = output;
|
||||
const paths = Object.keys(stats);
|
||||
const debugEnabled = argv['--debug'];
|
||||
@@ -236,7 +242,6 @@ export default async function main(
|
||||
parseMeta(argv['--meta'])
|
||||
);
|
||||
|
||||
let syncCount;
|
||||
let deployStamp = stamp();
|
||||
let deployment = null;
|
||||
|
||||
@@ -289,11 +294,15 @@ export default async function main(
|
||||
parseEnv(argv['--env'])
|
||||
);
|
||||
|
||||
// Enable debug mode for builders
|
||||
const buildDebugEnv = debugEnabled ? { NOW_BUILDER_DEBUG: '1' } : {};
|
||||
|
||||
// Merge build env out of `build.env` from now.json, and `--build-env` args
|
||||
const deploymentBuildEnv = Object.assign(
|
||||
{},
|
||||
parseEnv(localConfig.build && localConfig.build.env),
|
||||
parseEnv(argv['--build-env'])
|
||||
parseEnv(argv['--build-env']),
|
||||
buildDebugEnv
|
||||
);
|
||||
|
||||
// If there's any undefined values, then inherit them from this process
|
||||
@@ -313,33 +322,45 @@ export default async function main(
|
||||
|
||||
try {
|
||||
// $FlowFixMe
|
||||
const project = getProjectName({argv, nowConfig: localConfig, isFile, paths});
|
||||
const project = getProjectName({
|
||||
argv,
|
||||
nowConfig: localConfig,
|
||||
isFile,
|
||||
paths,
|
||||
});
|
||||
log(`Using project ${chalk.bold(project)}`);
|
||||
|
||||
const createArgs = {
|
||||
name: project,
|
||||
env: deploymentEnv,
|
||||
build: { env: deploymentBuildEnv },
|
||||
forceNew: argv['--force'],
|
||||
quiet,
|
||||
wantsPublic: argv['--public'] || localConfig.public,
|
||||
isFile,
|
||||
type: null,
|
||||
nowConfig: localConfig,
|
||||
regions,
|
||||
meta
|
||||
name: project,
|
||||
env: deploymentEnv,
|
||||
build: { env: deploymentBuildEnv },
|
||||
forceNew: argv['--force'],
|
||||
quiet,
|
||||
wantsPublic: argv['--public'] || localConfig.public,
|
||||
isFile,
|
||||
type: null,
|
||||
nowConfig: localConfig,
|
||||
regions,
|
||||
meta,
|
||||
deployStamp,
|
||||
};
|
||||
|
||||
if (argv['--target']) {
|
||||
const deprecatedTarget = argv['--target'];
|
||||
|
||||
if (!['staging', 'production'].includes(deprecatedTarget)) {
|
||||
error(`The specified ${param('--target')} ${code(deprecatedTarget)} is not valid`);
|
||||
error(
|
||||
`The specified ${param('--target')} ${code(
|
||||
deprecatedTarget
|
||||
)} is not valid`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (deprecatedTarget === 'production') {
|
||||
warn('We recommend using the much shorter `--prod` option instead of `--target production` (deprecated)');
|
||||
warn(
|
||||
'We recommend using the much shorter `--prod` option instead of `--target production` (deprecated)'
|
||||
);
|
||||
}
|
||||
|
||||
output.debug(`Setting target to ${deprecatedTarget}`);
|
||||
@@ -351,7 +372,7 @@ export default async function main(
|
||||
|
||||
deployStamp = stamp();
|
||||
|
||||
const firstDeployCall = await createDeploy(
|
||||
deployment = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
@@ -360,13 +381,49 @@ export default async function main(
|
||||
ctx
|
||||
);
|
||||
|
||||
if (deployment instanceof NotDomainOwner) {
|
||||
output.error(deployment);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const deploymentResponse = handleCertError(
|
||||
output,
|
||||
await getDeploymentByIdOrHost(now, contextName, deployment.id, 'v9')
|
||||
);
|
||||
|
||||
if (deploymentResponse === 1) {
|
||||
return deploymentResponse;
|
||||
}
|
||||
|
||||
if (
|
||||
firstDeployCall instanceof DomainNotFound &&
|
||||
firstDeployCall.meta && firstDeployCall.meta.domain
|
||||
deploymentResponse instanceof DeploymentNotFound ||
|
||||
deploymentResponse instanceof DeploymentPermissionDenied ||
|
||||
deploymentResponse instanceof InvalidDeploymentId
|
||||
) {
|
||||
output.debug(`The domain ${
|
||||
firstDeployCall.meta.domain
|
||||
} was not found, trying to purchase it`);
|
||||
output.error(deploymentResponse.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (handleCertError(output, deployment) === 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (deployment === null) {
|
||||
error('Uploading failed. Please try again.');
|
||||
return 1;
|
||||
}
|
||||
} catch (err) {
|
||||
debug(`Error: ${err}\n${err.stack}`);
|
||||
|
||||
if (err instanceof NotDomainOwner) {
|
||||
output.error(err.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (err instanceof DomainNotFound && err.meta && err.meta.domain) {
|
||||
output.debug(
|
||||
`The domain ${err.meta.domain} was not found, trying to purchase it`
|
||||
);
|
||||
|
||||
const purchase = await purchaseDomainIfAvailable(
|
||||
output,
|
||||
@@ -374,16 +431,14 @@ export default async function main(
|
||||
apiUrl: ctx.apiUrl,
|
||||
token: ctx.authConfig.token,
|
||||
currentTeam: ctx.config.currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
}),
|
||||
firstDeployCall.meta.domain,
|
||||
err.meta.domain,
|
||||
contextName
|
||||
);
|
||||
|
||||
if (purchase === true) {
|
||||
output.success(`Successfully purchased the domain ${
|
||||
firstDeployCall.meta.domain
|
||||
}!`);
|
||||
output.success(`Successfully purchased the domain ${err.meta.domain}!`);
|
||||
|
||||
// We exit if the purchase is completed since
|
||||
// the domain verification can take some time
|
||||
@@ -391,7 +446,7 @@ export default async function main(
|
||||
}
|
||||
|
||||
if (purchase === false || purchase instanceof UserAborted) {
|
||||
handleCreateDeployError(output, firstDeployCall);
|
||||
handleCreateDeployError(output, deployment);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -399,120 +454,36 @@ export default async function main(
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (handleCertError(output, firstDeployCall) === 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (
|
||||
firstDeployCall instanceof DomainNotFound ||
|
||||
firstDeployCall instanceof DomainNotVerified ||
|
||||
firstDeployCall instanceof DomainPermissionDenied ||
|
||||
firstDeployCall instanceof DomainVerificationFailed ||
|
||||
firstDeployCall instanceof SchemaValidationFailed ||
|
||||
firstDeployCall instanceof InvalidDomain ||
|
||||
firstDeployCall instanceof DeploymentNotFound ||
|
||||
firstDeployCall instanceof BuildsRateLimited ||
|
||||
firstDeployCall instanceof DeploymentsRateLimited ||
|
||||
firstDeployCall instanceof AliasDomainConfigured ||
|
||||
firstDeployCall instanceof MissingBuildScript ||
|
||||
firstDeployCall instanceof ConflictingFilePath ||
|
||||
firstDeployCall instanceof ConflictingPathSegment
|
||||
err instanceof DomainNotFound ||
|
||||
err instanceof DomainNotVerified ||
|
||||
err instanceof NotDomainOwner ||
|
||||
err instanceof DomainPermissionDenied ||
|
||||
err instanceof DomainVerificationFailed ||
|
||||
err instanceof SchemaValidationFailed ||
|
||||
err instanceof InvalidDomain ||
|
||||
err instanceof DeploymentNotFound ||
|
||||
err instanceof BuildsRateLimited ||
|
||||
err instanceof DeploymentsRateLimited ||
|
||||
err instanceof AliasDomainConfigured ||
|
||||
err instanceof MissingBuildScript ||
|
||||
err instanceof ConflictingFilePath ||
|
||||
err instanceof ConflictingPathSegment
|
||||
) {
|
||||
handleCreateDeployError(output, firstDeployCall);
|
||||
handleCreateDeployError(output, err);
|
||||
return 1;
|
||||
}
|
||||
|
||||
deployment = firstDeployCall;
|
||||
if (err instanceof BuildError) {
|
||||
output.error('Build failed');
|
||||
output.error(
|
||||
`Check your logs at ${now.url}/_logs or run ${code(
|
||||
`now logs ${now.url}`
|
||||
)}`
|
||||
);
|
||||
|
||||
if (now.syncFileCount > 0) {
|
||||
const uploadStamp = stamp();
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
if (now.syncFileCount !== now.fileCount) {
|
||||
debug(`Total files ${now.fileCount}, ${now.syncFileCount} changed`);
|
||||
}
|
||||
|
||||
const size = bytes(now.syncAmount);
|
||||
syncCount = `${now.syncFileCount} file${now.syncFileCount > 1
|
||||
? 's'
|
||||
: ''}`;
|
||||
const bar = new Progress(
|
||||
`${chalk.gray(
|
||||
'>'
|
||||
)} Upload [:bar] :percent :etas (${size}) [${syncCount}]`,
|
||||
{
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: now.syncAmount,
|
||||
clear: true
|
||||
}
|
||||
);
|
||||
|
||||
now.upload({ scale: {} });
|
||||
|
||||
now.on('upload', ({ names, data }) => {
|
||||
debug(`Uploaded: ${names.join(' ')} (${bytes(data.length)})`);
|
||||
});
|
||||
|
||||
now.on('uploadProgress', progress => {
|
||||
bar.tick(progress);
|
||||
});
|
||||
|
||||
now.on('complete', resolve);
|
||||
|
||||
now.on('error', err => {
|
||||
error('Upload failed');
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
if (!quiet && syncCount) {
|
||||
log(`Synced ${syncCount} (${bytes(now.syncAmount)}) ${uploadStamp()}`);
|
||||
}
|
||||
|
||||
for (let i = 0; i < 4; i += 1) {
|
||||
deployStamp = stamp();
|
||||
const secondDeployCall = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
paths,
|
||||
createArgs
|
||||
);
|
||||
|
||||
if (handleCertError(output, secondDeployCall) === 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (
|
||||
secondDeployCall instanceof DomainPermissionDenied ||
|
||||
secondDeployCall instanceof DomainVerificationFailed ||
|
||||
secondDeployCall instanceof SchemaValidationFailed ||
|
||||
secondDeployCall instanceof DeploymentNotFound ||
|
||||
secondDeployCall instanceof DeploymentsRateLimited ||
|
||||
secondDeployCall instanceof AliasDomainConfigured ||
|
||||
secondDeployCall instanceof MissingBuildScript ||
|
||||
secondDeployCall instanceof ConflictingFilePath ||
|
||||
secondDeployCall instanceof ConflictingPathSegment
|
||||
) {
|
||||
handleCreateDeployError(output, secondDeployCall);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (now.syncFileCount === 0) {
|
||||
deployment = secondDeployCall;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (deployment === null) {
|
||||
error('Uploading failed. Please try again.');
|
||||
return 1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
} catch (err) {
|
||||
debug(`Error: ${err}\n${err.stack}`);
|
||||
|
||||
if (err.keyword === 'additionalProperties' && err.dataPath === '.scale') {
|
||||
const { additionalProperty = '' } = err.params || {};
|
||||
@@ -531,114 +502,14 @@ export default async function main(
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { url } = now;
|
||||
|
||||
if (isTTY) {
|
||||
log(`${url} ${chalk.gray(`[v2]`)} ${deployStamp()}`);
|
||||
} else {
|
||||
process.stdout.write(url);
|
||||
}
|
||||
|
||||
// If an error occurred, we want to let it fall down to rendering
|
||||
// builds so the user can see in which build the error occurred.
|
||||
if (isReady(deployment)) {
|
||||
return printDeploymentStatus(output, deployment, deployStamp, !argv['--no-clipboard'], localConfig);
|
||||
}
|
||||
|
||||
const sleepingTime = ms('1.5s');
|
||||
const allBuildsTime = stamp();
|
||||
const times = {};
|
||||
const buildsUrl = `/v1/now/deployments/${deployment.id}/builds`;
|
||||
|
||||
let builds = [];
|
||||
let buildsCompleted = false;
|
||||
let buildSpinner = null;
|
||||
|
||||
let deploymentSpinner = null;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (!buildsCompleted) {
|
||||
const { builds: freshBuilds } = await now.fetch(buildsUrl);
|
||||
|
||||
// If there are no builds, we need to exit.
|
||||
if (freshBuilds.length === 0 || freshBuilds.every(isDone)) {
|
||||
builds = freshBuilds;
|
||||
buildsCompleted = true;
|
||||
} else {
|
||||
for (const build of freshBuilds) {
|
||||
const id = build.id;
|
||||
const done = isDone(build);
|
||||
|
||||
if (times[id]) {
|
||||
if (done && typeof times[id] === 'function') {
|
||||
times[id] = times[id]();
|
||||
}
|
||||
} else {
|
||||
times[id] = done ? allBuildsTime() : stamp();
|
||||
}
|
||||
}
|
||||
|
||||
if (JSON.stringify(builds) !== JSON.stringify(freshBuilds)) {
|
||||
builds = freshBuilds;
|
||||
|
||||
if (buildSpinner === null) {
|
||||
buildSpinner = wait('Building...');
|
||||
}
|
||||
|
||||
buildsCompleted = builds.every(isDone);
|
||||
|
||||
if (builds.some(isFailed)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const deploymentResponse = handleCertError(
|
||||
output,
|
||||
await getDeploymentByIdOrHost(now, contextName, deployment.id, 'v9')
|
||||
)
|
||||
|
||||
if (deploymentResponse === 1) {
|
||||
return deploymentResponse;
|
||||
}
|
||||
|
||||
if (
|
||||
deploymentResponse instanceof DeploymentNotFound ||
|
||||
deploymentResponse instanceof DeploymentPermissionDenied ||
|
||||
deploymentResponse instanceof InvalidDeploymentId
|
||||
) {
|
||||
output.error(deploymentResponse.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (isReady(deploymentResponse) || isFailed(deploymentResponse)) {
|
||||
deployment = deploymentResponse;
|
||||
|
||||
if (typeof deploymentSpinner === 'function') {
|
||||
// This stops it
|
||||
deploymentSpinner();
|
||||
}
|
||||
|
||||
break;
|
||||
} else if (!deploymentSpinner) {
|
||||
if (typeof buildSpinner === 'function') {
|
||||
buildSpinner();
|
||||
}
|
||||
|
||||
deploymentSpinner = wait('Finalizing...');
|
||||
}
|
||||
}
|
||||
|
||||
await sleep(sleepingTime);
|
||||
}
|
||||
|
||||
if (typeof buildSpinner === 'function') {
|
||||
buildSpinner();
|
||||
}
|
||||
|
||||
return printDeploymentStatus(output, deployment, deployStamp, !argv['--no-clipboard'], localConfig, builds);
|
||||
};
|
||||
return printDeploymentStatus(
|
||||
output,
|
||||
deployment,
|
||||
deployStamp,
|
||||
!argv['--no-clipboard'],
|
||||
localConfig
|
||||
);
|
||||
}
|
||||
|
||||
function handleCreateDeployError(output, error) {
|
||||
if (error instanceof InvalidDomain) {
|
||||
@@ -708,18 +579,20 @@ function handleCreateDeployError(output, error) {
|
||||
}
|
||||
if (error instanceof TooManyRequests) {
|
||||
output.error(
|
||||
`Too many requests detected for ${error.meta
|
||||
.api} API. Try again in ${ms(error.meta.retryAfter * 1000, {
|
||||
long: true
|
||||
})}.`
|
||||
`Too many requests detected for ${error.meta.api} API. Try again in ${ms(
|
||||
error.meta.retryAfter * 1000,
|
||||
{
|
||||
long: true,
|
||||
}
|
||||
)}.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
if (error instanceof DomainNotVerified) {
|
||||
output.error(
|
||||
`The domain used as an alias ${
|
||||
chalk.underline(error.meta.domain)
|
||||
} is not verified yet. Please verify it.`
|
||||
`The domain used as an alias ${chalk.underline(
|
||||
error.meta.domain
|
||||
)} is not verified yet. Please verify it.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
@@ -730,6 +603,7 @@ function handleCreateDeployError(output, error) {
|
||||
}
|
||||
if (
|
||||
error instanceof DeploymentNotFound ||
|
||||
error instanceof NotDomainOwner ||
|
||||
error instanceof DeploymentsRateLimited ||
|
||||
error instanceof AliasDomainConfigured ||
|
||||
error instanceof MissingBuildScript ||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { resolve, basename, join } from 'path';
|
||||
import { eraseLines } from 'ansi-escapes';
|
||||
// @ts-ignore
|
||||
import { write as copy } from 'clipboardy';
|
||||
import bytes from 'bytes';
|
||||
import chalk from 'chalk';
|
||||
import dotenv from 'dotenv';
|
||||
import fs from 'fs-extra';
|
||||
@@ -13,7 +12,6 @@ import ms from 'ms';
|
||||
// @ts-ignore
|
||||
import title from 'title';
|
||||
import plural from 'pluralize';
|
||||
import Progress from 'progress';
|
||||
// @ts-ignore
|
||||
import { handleError } from '../../util/error';
|
||||
import chars from '../../util/output/chars';
|
||||
@@ -34,19 +32,16 @@ import promptOptions from '../../util/prompt-options';
|
||||
// @ts-ignore
|
||||
import readMetaData from '../../util/read-metadata';
|
||||
import toHumanPath from '../../util/humanize-path';
|
||||
import combineAsyncGenerators from '../../util/combine-async-generators';
|
||||
// @ts-ignore
|
||||
import createDeploy from '../../util/deploy/create-deploy';
|
||||
import eventListenerToGenerator from '../../util/event-listener-to-generator';
|
||||
// @ts-ignore
|
||||
import formatLogCmd from '../../util/output/format-log-cmd';
|
||||
// @ts-ignore
|
||||
import formatLogOutput from '../../util/output/format-log-output';
|
||||
// @ts-ignore
|
||||
import getEventsStream from '../../util/deploy/get-events-stream';
|
||||
import shouldDeployDir from '../../util/deploy/should-deploy-dir';
|
||||
// @ts-ignore
|
||||
import getInstanceIndex from '../../util/deploy/get-instance-index';
|
||||
import getStateChangeFromPolling from '../../util/deploy/get-state-change-from-polling';
|
||||
import joinWords from '../../util/output/join-words';
|
||||
// @ts-ignore
|
||||
import normalizeRegionsList from '../../util/scale/normalize-regions-list';
|
||||
@@ -67,11 +62,12 @@ import {
|
||||
DomainVerificationFailed,
|
||||
TooManyRequests,
|
||||
VerifyScaleTimeout,
|
||||
DeploymentsRateLimited
|
||||
DeploymentsRateLimited,
|
||||
NotDomainOwner,
|
||||
} from '../../util/errors-ts';
|
||||
import {
|
||||
InvalidAllForScale,
|
||||
InvalidRegionOrDCForScale
|
||||
InvalidRegionOrDCForScale,
|
||||
} from '../../util/errors';
|
||||
import { SchemaValidationFailed } from '../../util/errors';
|
||||
import handleCertError from '../../util/certs/handle-cert-error';
|
||||
@@ -198,7 +194,7 @@ const promptForEnvFields = async (list: string[]) => {
|
||||
for (const field of list) {
|
||||
questions.push({
|
||||
name: field,
|
||||
message: field
|
||||
message: field,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -220,7 +216,7 @@ const promptForEnvFields = async (list: string[]) => {
|
||||
|
||||
async function canUseZeroConfig(cwd: string): Promise<boolean> {
|
||||
try {
|
||||
const pkg = (await readPackage(join(cwd, 'package.json')));
|
||||
const pkg = await readPackage(join(cwd, 'package.json'));
|
||||
|
||||
if (!pkg || pkg instanceof Error) {
|
||||
return false;
|
||||
@@ -250,7 +246,7 @@ async function canUseZeroConfig(cwd: string): Promise<boolean> {
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
} catch(_) {}
|
||||
} catch (_) {}
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -275,6 +271,10 @@ export default async function main(
|
||||
paths = [process.cwd()];
|
||||
}
|
||||
|
||||
if (!(await shouldDeployDir(argv._[0], output))) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Options
|
||||
forceNew = argv.force;
|
||||
deploymentName = argv.name;
|
||||
@@ -296,15 +296,17 @@ export default async function main(
|
||||
quiet = !isTTY;
|
||||
({ log, error, note, debug, warn } = output);
|
||||
|
||||
const infoUrl = await canUseZeroConfig(paths[0])
|
||||
const infoUrl = (await canUseZeroConfig(paths[0]))
|
||||
? 'https://zeit.co/guides/migrate-to-zeit-now'
|
||||
: 'https://zeit.co/docs/v2/advanced/platform/changes-in-now-2-0'
|
||||
: 'https://zeit.co/docs/v2/advanced/platform/changes-in-now-2-0';
|
||||
|
||||
warn(`You are using an old version of the Now Platform. More: ${link(infoUrl)}`);
|
||||
warn(
|
||||
`You are using an old version of the Now Platform. More: ${link(infoUrl)}`
|
||||
);
|
||||
|
||||
const {
|
||||
authConfig: { token },
|
||||
config
|
||||
config,
|
||||
} = ctx;
|
||||
|
||||
try {
|
||||
@@ -314,7 +316,7 @@ export default async function main(
|
||||
token,
|
||||
config,
|
||||
firstRun: true,
|
||||
deploymentType: undefined
|
||||
deploymentType: undefined,
|
||||
});
|
||||
} catch (err) {
|
||||
await stopDeployment(err);
|
||||
@@ -327,7 +329,7 @@ async function sync({
|
||||
token,
|
||||
config: { currentTeam },
|
||||
firstRun,
|
||||
deploymentType
|
||||
deploymentType,
|
||||
}: SyncOptions): Promise<void> {
|
||||
return new Promise(async (_resolve, reject) => {
|
||||
let deployStamp = stamp();
|
||||
@@ -476,7 +478,7 @@ async function sync({
|
||||
|
||||
// XXX: legacy
|
||||
deploymentType,
|
||||
sessionAffinity
|
||||
sessionAffinity,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -486,7 +488,7 @@ async function sync({
|
||||
meta,
|
||||
deploymentName,
|
||||
deploymentType,
|
||||
sessionAffinity
|
||||
sessionAffinity,
|
||||
} = await readMeta(
|
||||
paths[0],
|
||||
deploymentName,
|
||||
@@ -499,7 +501,7 @@ async function sync({
|
||||
'dockerfile_missing',
|
||||
'no_dockerfile_commands',
|
||||
'unsupported_deployment_type',
|
||||
'multiple_manifests'
|
||||
'multiple_manifests',
|
||||
];
|
||||
|
||||
if (
|
||||
@@ -537,7 +539,7 @@ async function sync({
|
||||
// Read scale and fail if we have both regions and scale
|
||||
if (regions.length > 0 && Object.keys(scaleFromConfig).length > 0) {
|
||||
error(
|
||||
'Can\'t set both `regions` and `scale` options simultaneously',
|
||||
"Can't set both `regions` and `scale` options simultaneously",
|
||||
'regions-and-scale-at-once'
|
||||
);
|
||||
await exit(1);
|
||||
@@ -548,9 +550,7 @@ async function sync({
|
||||
dcIds = normalizeRegionsList(regions);
|
||||
if (dcIds instanceof InvalidRegionOrDCForScale) {
|
||||
error(
|
||||
`The value "${
|
||||
dcIds.meta.regionOrDC
|
||||
}" is not a valid region or DC identifier`
|
||||
`The value "${dcIds.meta.regionOrDC}" is not a valid region or DC identifier`
|
||||
);
|
||||
await exit(1);
|
||||
return 1;
|
||||
@@ -565,7 +565,7 @@ async function sync({
|
||||
scale = dcIds.reduce(
|
||||
(result: DcScale, dcId: string) => ({
|
||||
...result,
|
||||
[dcId]: { min: 0, max: 1 }
|
||||
[dcId]: { min: 0, max: 1 },
|
||||
}),
|
||||
{}
|
||||
);
|
||||
@@ -661,8 +661,9 @@ async function sync({
|
||||
}
|
||||
|
||||
const hasSecrets = Object.keys(deploymentEnv).some(key =>
|
||||
deploymentEnv[key].startsWith('@')
|
||||
(deploymentEnv[key] || '').startsWith('@')
|
||||
);
|
||||
|
||||
const secretsPromise = hasSecrets ? now.listSecrets() : null;
|
||||
|
||||
const findSecret = async (uidOrName: string) => {
|
||||
@@ -754,15 +755,13 @@ async function sync({
|
||||
parseMeta(argv.meta)
|
||||
);
|
||||
|
||||
let syncCount;
|
||||
|
||||
try {
|
||||
meta.name = getProjectName({
|
||||
argv,
|
||||
nowConfig,
|
||||
isFile,
|
||||
paths,
|
||||
pre: meta.name
|
||||
pre: meta.name,
|
||||
});
|
||||
log(`Using project ${chalk.bold(meta.name)}`);
|
||||
const createArgs = Object.assign(
|
||||
@@ -776,13 +775,15 @@ async function sync({
|
||||
scale,
|
||||
wantsPublic,
|
||||
sessionAffinity,
|
||||
isFile
|
||||
isFile,
|
||||
nowConfig,
|
||||
deployStamp,
|
||||
},
|
||||
meta
|
||||
);
|
||||
|
||||
deployStamp = stamp();
|
||||
const firstDeployCall = await createDeploy(
|
||||
deployment = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
@@ -790,118 +791,24 @@ async function sync({
|
||||
createArgs
|
||||
);
|
||||
|
||||
const handledResult = handleCertError(output, firstDeployCall);
|
||||
const handledResult = handleCertError(output, deployment);
|
||||
if (handledResult === 1) {
|
||||
return handledResult;
|
||||
}
|
||||
|
||||
if (
|
||||
firstDeployCall instanceof DomainNotFound ||
|
||||
firstDeployCall instanceof DomainPermissionDenied ||
|
||||
firstDeployCall instanceof DomainVerificationFailed ||
|
||||
firstDeployCall instanceof SchemaValidationFailed ||
|
||||
firstDeployCall instanceof DeploymentNotFound ||
|
||||
firstDeployCall instanceof DeploymentsRateLimited
|
||||
deployment instanceof DomainNotFound ||
|
||||
deployment instanceof NotDomainOwner ||
|
||||
deployment instanceof DomainPermissionDenied ||
|
||||
deployment instanceof DomainVerificationFailed ||
|
||||
deployment instanceof SchemaValidationFailed ||
|
||||
deployment instanceof DeploymentNotFound ||
|
||||
deployment instanceof DeploymentsRateLimited
|
||||
) {
|
||||
handleCreateDeployError(output, firstDeployCall);
|
||||
handleCreateDeployError(output, deployment);
|
||||
await exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
deployment = firstDeployCall;
|
||||
|
||||
if (now.syncFileCount > 0) {
|
||||
const uploadStamp = stamp();
|
||||
await new Promise(resolve => {
|
||||
if (now.syncFileCount !== now.fileCount) {
|
||||
debug(`Total files ${now.fileCount}, ${now.syncFileCount} changed`);
|
||||
}
|
||||
|
||||
const size = bytes(now.syncAmount);
|
||||
syncCount = `${now.syncFileCount} file${
|
||||
now.syncFileCount > 1 ? 's' : ''
|
||||
}`;
|
||||
const bar = new Progress(
|
||||
`${chalk.gray(
|
||||
'>'
|
||||
)} Upload [:bar] :percent :etas (${size}) [${syncCount}]`,
|
||||
{
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: now.syncAmount,
|
||||
clear: true
|
||||
}
|
||||
);
|
||||
|
||||
now.upload({ scale });
|
||||
|
||||
now.on(
|
||||
'upload',
|
||||
({ names, data }: { names: string[]; data: Buffer }) => {
|
||||
debug(`Uploaded: ${names.join(' ')} (${bytes(data.length)})`);
|
||||
}
|
||||
);
|
||||
|
||||
now.on('uploadProgress', (progress: number) => {
|
||||
bar.tick(progress);
|
||||
});
|
||||
|
||||
now.on('complete', resolve);
|
||||
|
||||
now.on('error', (err: Error) => {
|
||||
error('Upload failed');
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
if (!quiet && syncCount) {
|
||||
log(
|
||||
`Synced ${syncCount} (${bytes(now.syncAmount)}) ${uploadStamp()}`
|
||||
);
|
||||
}
|
||||
|
||||
for (let i = 0; i < 4; i += 1) {
|
||||
deployStamp = stamp();
|
||||
const secondDeployCall = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
paths,
|
||||
createArgs
|
||||
);
|
||||
|
||||
const handledResult = handleCertError(output, secondDeployCall);
|
||||
if (handledResult === 1) {
|
||||
return handledResult;
|
||||
}
|
||||
|
||||
if (
|
||||
secondDeployCall instanceof DomainNotFound ||
|
||||
secondDeployCall instanceof DomainPermissionDenied ||
|
||||
secondDeployCall instanceof DomainVerificationFailed ||
|
||||
secondDeployCall instanceof SchemaValidationFailed ||
|
||||
secondDeployCall instanceof TooManyRequests ||
|
||||
secondDeployCall instanceof DeploymentNotFound ||
|
||||
secondDeployCall instanceof DeploymentsRateLimited
|
||||
) {
|
||||
handleCreateDeployError(output, secondDeployCall);
|
||||
await exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
if (now.syncFileCount === 0) {
|
||||
deployment = secondDeployCall;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (deployment === null) {
|
||||
error('Uploading failed. Please try again.');
|
||||
await exit(1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'plan_requires_public') {
|
||||
if (!wantsPublic) {
|
||||
@@ -914,7 +821,7 @@ async function sync({
|
||||
|
||||
if (isTTY) {
|
||||
proceed = await promptBool('Are you sure you want to proceed?', {
|
||||
trailing: eraseLines(1)
|
||||
trailing: eraseLines(1),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -954,10 +861,10 @@ async function sync({
|
||||
output,
|
||||
token,
|
||||
config: {
|
||||
currentTeam
|
||||
currentTeam,
|
||||
},
|
||||
firstRun: false,
|
||||
deploymentType
|
||||
deploymentType,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1002,8 +909,6 @@ async function sync({
|
||||
} else {
|
||||
log(`${chalk.bold(chalk.cyan(url))}${dcs} ${deployStamp()}`);
|
||||
}
|
||||
} else {
|
||||
process.stdout.write(url);
|
||||
}
|
||||
|
||||
if (deploymentType === 'static') {
|
||||
@@ -1022,96 +927,52 @@ async function sync({
|
||||
// Show build logs
|
||||
// (We have to add this check for flow but it will never happen)
|
||||
if (deployment !== null) {
|
||||
// If the created deployment is ready it was a deduping and we should exit
|
||||
if (deployment.readyState !== 'READY') {
|
||||
require('assert')(deployment); // mute linter
|
||||
const instanceIndex = getInstanceIndex();
|
||||
const eventsStream = await maybeGetEventsStream(now, deployment);
|
||||
const eventsGenerator = getEventsGenerator(
|
||||
const instanceIndex = getInstanceIndex();
|
||||
const eventsStream = await maybeGetEventsStream(now, deployment);
|
||||
|
||||
if (!noVerify) {
|
||||
output.log(
|
||||
`Verifying instantiation in ${joinWords(
|
||||
Object.keys(deployment.scale).map(dc => chalk.bold(dc))
|
||||
)}`
|
||||
);
|
||||
const verifyStamp = stamp();
|
||||
const verifyDCsGenerator = getVerifyDCsGenerator(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
deployment,
|
||||
eventsStream
|
||||
);
|
||||
|
||||
for await (const _event of eventsGenerator) {
|
||||
const event = _event as any;
|
||||
// Stop when the deployment is ready
|
||||
if (
|
||||
event.type === 'state-change' &&
|
||||
event.payload.value === 'READY'
|
||||
) {
|
||||
output.log(`Build completed`);
|
||||
break;
|
||||
}
|
||||
|
||||
// Stop then there is an error state
|
||||
if (
|
||||
event.type === 'state-change' &&
|
||||
event.payload.value === 'ERROR'
|
||||
) {
|
||||
output.error(`Build failed`);
|
||||
await exit(1);
|
||||
}
|
||||
|
||||
// For any relevant event we receive, print the result
|
||||
if (event.type === 'build-start') {
|
||||
output.log('Building…');
|
||||
} else if (event.type === 'command') {
|
||||
output.log(formatLogCmd(event.payload.text));
|
||||
} else if (event.type === 'stdout' || event.type === 'stderr') {
|
||||
formatLogOutput(event.payload.text).forEach((msg: string) =>
|
||||
output.log(msg)
|
||||
for await (const _dcOrEvent of verifyDCsGenerator) {
|
||||
const dcOrEvent = _dcOrEvent as any;
|
||||
if (dcOrEvent instanceof VerifyScaleTimeout) {
|
||||
output.error(
|
||||
`Instance verification timed out (${ms(dcOrEvent.meta.timeout)})`
|
||||
);
|
||||
output.log(
|
||||
'Read more: https://err.sh/now-cli/verification-timeout'
|
||||
);
|
||||
await exit(1);
|
||||
} else if (Array.isArray(dcOrEvent)) {
|
||||
const [dc, instances] = dcOrEvent;
|
||||
output.log(
|
||||
`${chalk.cyan(chars.tick)} Scaled ${plural(
|
||||
'instance',
|
||||
instances,
|
||||
true
|
||||
)} in ${chalk.bold(dc)} ${verifyStamp()}`
|
||||
);
|
||||
} else if (
|
||||
dcOrEvent &&
|
||||
(dcOrEvent.type === 'stdout' || dcOrEvent.type === 'stderr')
|
||||
) {
|
||||
const prefix = chalk.gray(
|
||||
`[${instanceIndex(dcOrEvent.payload.instanceId)}] `
|
||||
);
|
||||
formatLogOutput(dcOrEvent.payload.text, prefix).forEach(
|
||||
(msg: string) => output.log(msg)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!noVerify) {
|
||||
output.log(
|
||||
`Verifying instantiation in ${joinWords(
|
||||
Object.keys(deployment.scale).map(dc => chalk.bold(dc))
|
||||
)}`
|
||||
);
|
||||
const verifyStamp = stamp();
|
||||
const verifyDCsGenerator = getVerifyDCsGenerator(
|
||||
output,
|
||||
now,
|
||||
deployment,
|
||||
eventsStream
|
||||
);
|
||||
|
||||
for await (const _dcOrEvent of verifyDCsGenerator) {
|
||||
const dcOrEvent = _dcOrEvent as any;
|
||||
if (dcOrEvent instanceof VerifyScaleTimeout) {
|
||||
output.error(
|
||||
`Instance verification timed out (${ms(
|
||||
dcOrEvent.meta.timeout
|
||||
)})`
|
||||
);
|
||||
output.log(
|
||||
'Read more: https://err.sh/now/verification-timeout'
|
||||
);
|
||||
await exit(1);
|
||||
} else if (Array.isArray(dcOrEvent)) {
|
||||
const [dc, instances] = dcOrEvent;
|
||||
output.log(
|
||||
`${chalk.cyan(chars.tick)} Scaled ${plural(
|
||||
'instance',
|
||||
instances,
|
||||
true
|
||||
)} in ${chalk.bold(dc)} ${verifyStamp()}`
|
||||
);
|
||||
} else if (
|
||||
dcOrEvent &&
|
||||
(dcOrEvent.type === 'stdout' || dcOrEvent.type === 'stderr')
|
||||
) {
|
||||
const prefix = chalk.gray(
|
||||
`[${instanceIndex(dcOrEvent.payload.instanceId)}] `
|
||||
);
|
||||
formatLogOutput(dcOrEvent.payload.text, prefix).forEach(
|
||||
(msg: string) => output.log(msg)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1133,7 +994,7 @@ async function readMeta(
|
||||
deploymentType,
|
||||
deploymentName: _deploymentName,
|
||||
quiet: true,
|
||||
sessionAffinity: _sessionAffinity
|
||||
sessionAffinity: _sessionAffinity,
|
||||
});
|
||||
|
||||
if (!deploymentType) {
|
||||
@@ -1150,7 +1011,7 @@ async function readMeta(
|
||||
meta,
|
||||
deploymentName: _deploymentName,
|
||||
deploymentType,
|
||||
sessionAffinity: _sessionAffinity
|
||||
sessionAffinity: _sessionAffinity,
|
||||
};
|
||||
} catch (err) {
|
||||
if (isTTY && err.code === 'multiple_manifests') {
|
||||
@@ -1164,7 +1025,7 @@ async function readMeta(
|
||||
try {
|
||||
deploymentType = await promptOptions([
|
||||
['npm', `${chalk.bold('package.json')}\t${chalk.gray(' --npm')} `],
|
||||
['docker', `${chalk.bold('Dockerfile')}\t${chalk.gray('--docker')} `]
|
||||
['docker', `${chalk.bold('Dockerfile')}\t${chalk.gray('--docker')} `],
|
||||
]);
|
||||
} catch (_) {
|
||||
throw err;
|
||||
@@ -1190,35 +1051,13 @@ async function maybeGetEventsStream(now: Now, deployment: any) {
|
||||
try {
|
||||
return await getEventsStream(now, deployment.deploymentId, {
|
||||
direction: 'forward',
|
||||
follow: true
|
||||
follow: true,
|
||||
});
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getEventsGenerator(
|
||||
now: Now,
|
||||
contextName: string,
|
||||
deployment: any,
|
||||
eventsStream: any
|
||||
) {
|
||||
const stateChangeFromPollingGenerator = getStateChangeFromPolling(
|
||||
now,
|
||||
contextName,
|
||||
deployment.deploymentId,
|
||||
deployment.readyState
|
||||
);
|
||||
if (eventsStream !== null) {
|
||||
return combineAsyncGenerators(
|
||||
eventListenerToGenerator('data', eventsStream),
|
||||
stateChangeFromPollingGenerator
|
||||
);
|
||||
}
|
||||
|
||||
return stateChangeFromPollingGenerator;
|
||||
}
|
||||
|
||||
function getVerifyDCsGenerator(
|
||||
output: Output,
|
||||
now: Now,
|
||||
@@ -1228,7 +1067,7 @@ function getVerifyDCsGenerator(
|
||||
const verifyDeployment = verifyDeploymentScale(
|
||||
output,
|
||||
now,
|
||||
deployment.deploymentId,
|
||||
deployment.deploymentId || deployment.uid,
|
||||
deployment.scale
|
||||
);
|
||||
|
||||
@@ -1295,9 +1134,9 @@ function handleCreateDeployError(output: Output, error: Error) {
|
||||
output.error(
|
||||
`Failed to validate ${highlight(
|
||||
'now.json'
|
||||
)}: ${message}\nDocumentation: ${
|
||||
link('https://zeit.co/docs/v2/advanced/configuration')
|
||||
}`
|
||||
)}: ${message}\nDocumentation: ${link(
|
||||
'https://zeit.co/docs/v2/advanced/configuration'
|
||||
)}`
|
||||
);
|
||||
|
||||
return 1;
|
||||
@@ -1307,7 +1146,7 @@ function handleCreateDeployError(output: Output, error: Error) {
|
||||
`Too many requests detected for ${error.meta.api} API. Try again in ${ms(
|
||||
error.meta.retryAfter * 1000,
|
||||
{
|
||||
long: true
|
||||
long: true,
|
||||
}
|
||||
)}.`
|
||||
);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import path from 'path';
|
||||
import chalk from 'chalk';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
|
||||
import getArgs from '../../util/get-args';
|
||||
import getSubcommand from '../../util/get-subcommand';
|
||||
@@ -11,11 +12,10 @@ import logo from '../../util/output/logo';
|
||||
import cmd from '../../util/output/cmd';
|
||||
import dev from './dev';
|
||||
import readPackage from '../../util/read-package';
|
||||
import { Package } from '../../util/dev/types';
|
||||
import readConfig from '../../util/config/read-config';
|
||||
|
||||
const COMMAND_CONFIG = {
|
||||
dev: ['dev']
|
||||
dev: ['dev'],
|
||||
};
|
||||
|
||||
const help = () => {
|
||||
@@ -54,18 +54,12 @@ export default async function main(ctx: NowContext) {
|
||||
|
||||
// Deprecated
|
||||
'--port': Number,
|
||||
'-p': '--port'
|
||||
'-p': '--port',
|
||||
});
|
||||
const debug = argv['--debug'];
|
||||
args = getSubcommand(argv._.slice(1), COMMAND_CONFIG).args;
|
||||
output = createOutput({ debug });
|
||||
|
||||
// Builders won't show debug logs by default
|
||||
// the `NOW_BUILDER_DEBUG` env variable will enable them
|
||||
if (debug) {
|
||||
process.env.NOW_BUILDER_DEBUG = '1';
|
||||
}
|
||||
|
||||
if ('--port' in argv) {
|
||||
output.warn('`--port` is deprecated, please use `--listen` instead');
|
||||
argv['--listen'] = String(argv['--port']);
|
||||
@@ -90,7 +84,7 @@ export default async function main(ctx: NowContext) {
|
||||
const pkg = await readPackage(path.join(dir, 'package.json'));
|
||||
|
||||
if (pkg) {
|
||||
const { scripts } = pkg as Package;
|
||||
const { scripts } = pkg as PackageJson;
|
||||
|
||||
if (scripts && scripts.dev && /\bnow\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
@@ -98,9 +92,7 @@ export default async function main(ctx: NowContext) {
|
||||
'package.json'
|
||||
)} must not contain ${cmd('now dev')}`
|
||||
);
|
||||
output.error(
|
||||
`More details: http://err.sh/now/now-dev-as-dev-script`
|
||||
);
|
||||
output.error(`More details: http://err.sh/now/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,7 +74,7 @@ export default async function main(ctx) {
|
||||
'--all': Boolean,
|
||||
'--meta': [String],
|
||||
'-a': '--all',
|
||||
'-m': '--meta'
|
||||
'-m': '--meta',
|
||||
});
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
@@ -84,7 +84,7 @@ export default async function main(ctx) {
|
||||
const debugEnabled = argv['--debug'];
|
||||
|
||||
const { print, log, error, note, debug } = createOutput({
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
|
||||
if (argv._.length > 2) {
|
||||
@@ -103,13 +103,16 @@ export default async function main(ctx) {
|
||||
}
|
||||
|
||||
const meta = parseMeta(argv['--meta']);
|
||||
const { authConfig: { token }, config } = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config,
|
||||
} = ctx;
|
||||
const { currentTeam, includeScheme } = config;
|
||||
const client = new Client({
|
||||
apiUrl,
|
||||
token,
|
||||
currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
let contextName = null;
|
||||
|
||||
@@ -202,7 +205,16 @@ export default async function main(ctx) {
|
||||
const item = aliases.find(e => e.uid === app || e.alias === app);
|
||||
|
||||
if (item) {
|
||||
debug('Found alias that matches app name');
|
||||
debug(`Found alias that matches app name: ${item.alias}`);
|
||||
|
||||
if (Array.isArray(item.rules)) {
|
||||
now.close();
|
||||
stopSpinner();
|
||||
log(`Found matching path alias: ${chalk.cyan(item.alias)}`);
|
||||
log(`Please run ${cmd(`now alias ls ${item.alias}`)} instead`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const match = await now.findDeployment(item.deploymentId);
|
||||
const instances = await getDeploymentInstances(
|
||||
now,
|
||||
@@ -250,7 +262,9 @@ export default async function main(ctx) {
|
||||
|
||||
// information to help the user find other deployments or instances
|
||||
if (app == null) {
|
||||
log(`To list more deployments for a project run ${cmd('now ls [project]')}`);
|
||||
log(
|
||||
`To list more deployments for a project run ${cmd('now ls [project]')}`
|
||||
);
|
||||
} else if (!argv['--all']) {
|
||||
log(`To list deployment instances run ${cmd('now ls --all [project]')}`);
|
||||
}
|
||||
@@ -260,7 +274,9 @@ export default async function main(ctx) {
|
||||
console.log(
|
||||
`${table(
|
||||
[
|
||||
['project', 'latest deployment', 'inst #', 'type', 'state', 'age'].map(s => chalk.dim(s)),
|
||||
['project', 'latest deployment', 'inst #', 'type', 'state', 'age'].map(
|
||||
s => chalk.dim(s)
|
||||
),
|
||||
...deployments
|
||||
.sort(sortRecent())
|
||||
.map(dep => [
|
||||
@@ -272,7 +288,7 @@ export default async function main(ctx) {
|
||||
: dep.instanceCount,
|
||||
dep.type === 'LAMBDAS' ? chalk.gray('-') : dep.type,
|
||||
stateString(dep.state),
|
||||
chalk.gray(ms(Date.now() - new Date(dep.created)))
|
||||
chalk.gray(ms(Date.now() - new Date(dep.created))),
|
||||
],
|
||||
...(argv['--all']
|
||||
? dep.instances.map(i => [
|
||||
@@ -280,9 +296,9 @@ export default async function main(ctx) {
|
||||
` ${chalk.gray('-')} ${i.url} `,
|
||||
'',
|
||||
'',
|
||||
''
|
||||
'',
|
||||
])
|
||||
: [])
|
||||
: []),
|
||||
])
|
||||
// flatten since the previous step returns a nested
|
||||
// array of the deployment and (optionally) its instances
|
||||
@@ -293,12 +309,12 @@ export default async function main(ctx) {
|
||||
// we only want to render one deployment per app
|
||||
filterUniqueApps()
|
||||
: () => true
|
||||
)
|
||||
),
|
||||
],
|
||||
{
|
||||
align: ['l', 'l', 'r', 'l', 'b'],
|
||||
hsep: ' '.repeat(4),
|
||||
stringLength: strlen
|
||||
stringLength: strlen,
|
||||
}
|
||||
).replace(/^/gm, ' ')}\n\n`
|
||||
);
|
||||
@@ -310,7 +326,7 @@ function getProjectName(d) {
|
||||
return 'files';
|
||||
}
|
||||
|
||||
return d.name
|
||||
return d.name;
|
||||
}
|
||||
|
||||
// renders the state string
|
||||
|
||||
@@ -84,8 +84,8 @@ export default async function main(ctx) {
|
||||
debug: 'd',
|
||||
query: 'q',
|
||||
follow: 'f',
|
||||
output: 'o'
|
||||
}
|
||||
output: 'o',
|
||||
},
|
||||
});
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
@@ -138,7 +138,7 @@ export default async function main(ctx) {
|
||||
|
||||
const {
|
||||
authConfig: { token },
|
||||
config
|
||||
config,
|
||||
} = ctx;
|
||||
const { currentTeam } = config;
|
||||
const now = new Now({ apiUrl, token, debug, currentTeam });
|
||||
@@ -146,7 +146,7 @@ export default async function main(ctx) {
|
||||
apiUrl,
|
||||
token,
|
||||
currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
let contextName = null;
|
||||
|
||||
@@ -209,7 +209,7 @@ export default async function main(ctx) {
|
||||
types,
|
||||
instanceId,
|
||||
since,
|
||||
until
|
||||
until,
|
||||
}; // no follow
|
||||
const storage = [];
|
||||
const storeEvent = event => storage.push(event);
|
||||
@@ -219,7 +219,7 @@ export default async function main(ctx) {
|
||||
onEvent: storeEvent,
|
||||
quiet: false,
|
||||
debug,
|
||||
findOpts: findOpts1
|
||||
findOpts: findOpts1,
|
||||
});
|
||||
|
||||
const printedEventIds = new Set();
|
||||
@@ -241,14 +241,14 @@ export default async function main(ctx) {
|
||||
types,
|
||||
instanceId,
|
||||
since: since2,
|
||||
follow: true
|
||||
follow: true,
|
||||
};
|
||||
await printEvents(now, deployment.uid || deployment.id, currentTeam, {
|
||||
mode: 'logs',
|
||||
onEvent: printEvent,
|
||||
quiet: false,
|
||||
debug,
|
||||
findOpts: findOpts2
|
||||
findOpts: findOpts2,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -283,27 +283,40 @@ function printLogShort(log) {
|
||||
` ${obj.status} ${obj.bodyBytesSent}`;
|
||||
} else if (log.type === 'event') {
|
||||
data = `EVENT ${log.event} ${JSON.stringify(log.payload)}`;
|
||||
} else if (obj) {
|
||||
data = JSON.stringify(obj, null, 2);
|
||||
} else {
|
||||
data = obj
|
||||
? JSON.stringify(obj, null, 2)
|
||||
: (log.text || '')
|
||||
data = (log.text || '')
|
||||
.replace(/\n$/, '')
|
||||
.replace(/^\n/, '')
|
||||
// eslint-disable-next-line no-control-regex
|
||||
.replace(/\x1b\[1000D/g, '')
|
||||
.replace(/\x1b\[0K/g, '')
|
||||
.replace(/\x1b\[1A/g, '');
|
||||
if (/warning/i.test(data)) {
|
||||
data = chalk.yellow(data);
|
||||
} else if (log.type === 'stderr') {
|
||||
data = chalk.red(data);
|
||||
}
|
||||
}
|
||||
|
||||
const date = new Date(log.created).toISOString();
|
||||
|
||||
data.split('\n').forEach((line, i) => {
|
||||
if (line.includes('START RequestId:') || line.includes('END RequestId:')) {
|
||||
if (
|
||||
line.includes('START RequestId:') ||
|
||||
line.includes('END RequestId:') ||
|
||||
line.includes('XRAY TraceId:')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (line.includes('REPORT RequestId:')) {
|
||||
line = line.substring(line.indexOf('Duration:'), line.length);
|
||||
|
||||
if (line.includes('Init Duration:')) {
|
||||
line = line.substring(0, line.indexOf('Init Duration:'));
|
||||
}
|
||||
}
|
||||
|
||||
if (i === 0) {
|
||||
@@ -345,7 +358,7 @@ function printLogRaw(log) {
|
||||
|
||||
const logPrinters = {
|
||||
short: printLogShort,
|
||||
raw: printLogRaw
|
||||
raw: printLogRaw,
|
||||
};
|
||||
|
||||
function toTimestamp(datestr) {
|
||||
|
||||
@@ -10,7 +10,7 @@ import Client from '../util/client.ts';
|
||||
import logo from '../util/output/logo';
|
||||
import getScope from '../util/get-scope';
|
||||
|
||||
const e = encodeURIComponent
|
||||
const e = encodeURIComponent;
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
@@ -48,8 +48,8 @@ const main = async ctx => {
|
||||
argv = mri(ctx.argv.slice(2), {
|
||||
boolean: ['help'],
|
||||
alias: {
|
||||
help: 'h'
|
||||
}
|
||||
help: 'h',
|
||||
},
|
||||
});
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
@@ -63,7 +63,10 @@ const main = async ctx => {
|
||||
await exit(0);
|
||||
}
|
||||
|
||||
const { authConfig: { token }, config: { currentTeam }} = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config: { currentTeam },
|
||||
} = ctx;
|
||||
const client = new Client({ apiUrl, token, currentTeam, debug });
|
||||
|
||||
const { contextName } = await getScope(client);
|
||||
@@ -93,17 +96,21 @@ async function run({ client, contextName }) {
|
||||
if (args.length !== 0) {
|
||||
console.error(
|
||||
error(
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan('`now projects ls`')}`
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan(
|
||||
'`now projects ls`'
|
||||
)}`
|
||||
)
|
||||
);
|
||||
return exit(1);
|
||||
}
|
||||
|
||||
const list = await client.fetch('/projects/list', {method: 'GET'});
|
||||
const list = await client.fetch('/v2/projects/', { method: 'GET' });
|
||||
const elapsed = ms(new Date() - start);
|
||||
|
||||
console.log(
|
||||
`> ${plural('project', list.length, true)} found under ${chalk.bold(contextName)} ${chalk.gray(`[${elapsed}]`)}`
|
||||
`> ${plural('project', list.length, true)} found under ${chalk.bold(
|
||||
contextName
|
||||
)} ${chalk.gray(`[${elapsed}]`)}`
|
||||
);
|
||||
|
||||
if (list.length > 0) {
|
||||
@@ -114,19 +121,19 @@ async function run({ client, contextName }) {
|
||||
header.concat(
|
||||
list.map(secret => [
|
||||
'',
|
||||
chalk.bold(secret.name),
|
||||
chalk.gray(`${ms(cur - new Date(secret.updatedAt)) } ago`)
|
||||
])
|
||||
chalk.bold(secret.name),
|
||||
chalk.gray(`${ms(cur - new Date(secret.updatedAt))} ago`),
|
||||
])
|
||||
),
|
||||
{
|
||||
align: ['l', 'l', 'l'],
|
||||
hsep: ' '.repeat(2),
|
||||
stringLength: strlen
|
||||
stringLength: strlen,
|
||||
}
|
||||
);
|
||||
|
||||
if (out) {
|
||||
console.log(`\n${ out }\n`);
|
||||
console.log(`\n${out}\n`);
|
||||
}
|
||||
}
|
||||
return;
|
||||
@@ -148,11 +155,11 @@ async function run({ client, contextName }) {
|
||||
|
||||
// Check the existence of the project
|
||||
try {
|
||||
await client.fetch(`/projects/info/${e(name)}`)
|
||||
} catch(err) {
|
||||
await client.fetch(`/projects/info/${e(name)}`);
|
||||
} catch (err) {
|
||||
if (err.status === 404) {
|
||||
console.error(error('No such project exists'))
|
||||
return exit(1)
|
||||
console.error(error('No such project exists'));
|
||||
return exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,7 +169,9 @@ async function run({ client, contextName }) {
|
||||
return exit(0);
|
||||
}
|
||||
|
||||
await client.fetch('/projects/remove', {method: 'DELETE', body: {name}});
|
||||
await client.fetch(`/v2/projects/${name}`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
const elapsed = ms(new Date() - start);
|
||||
console.log(
|
||||
`${chalk.cyan('> Success!')} Project ${chalk.bold(
|
||||
@@ -193,7 +202,10 @@ async function run({ client, contextName }) {
|
||||
}
|
||||
|
||||
const [name] = args;
|
||||
await client.fetch('/projects/ensure-project', {method: 'POST', body: {name}});
|
||||
await client.fetch('/projects/ensure-project', {
|
||||
method: 'POST',
|
||||
body: { name },
|
||||
});
|
||||
const elapsed = ms(new Date() - start);
|
||||
|
||||
console.log(
|
||||
@@ -204,9 +216,7 @@ async function run({ client, contextName }) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.error(
|
||||
error('Please specify a valid subcommand: ls | add | rm')
|
||||
);
|
||||
console.error(error('Please specify a valid subcommand: ls | add | rm'));
|
||||
help();
|
||||
exit(1);
|
||||
}
|
||||
@@ -220,7 +230,7 @@ function readConfirmation(projectName) {
|
||||
return new Promise(resolve => {
|
||||
process.stdout.write(
|
||||
`The project: ${chalk.bold(projectName)} will be removed permanently.\n` +
|
||||
`It will also delete everything under the project including deployments.\n`
|
||||
`It will also delete everything under the project including deployments.\n`
|
||||
);
|
||||
|
||||
process.stdout.write(
|
||||
|
||||
@@ -70,11 +70,12 @@ let subcommand;
|
||||
|
||||
const main = async ctx => {
|
||||
argv = mri(ctx.argv.slice(2), {
|
||||
boolean: ['help', 'debug'],
|
||||
boolean: ['help', 'debug', 'yes'],
|
||||
alias: {
|
||||
help: 'h',
|
||||
debug: 'd'
|
||||
}
|
||||
debug: 'd',
|
||||
yes: 'y',
|
||||
},
|
||||
});
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
@@ -88,7 +89,10 @@ const main = async ctx => {
|
||||
await exit(0);
|
||||
}
|
||||
|
||||
const { authConfig: { token }, config: { currentTeam } } = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config: { currentTeam },
|
||||
} = ctx;
|
||||
const output = createOutput({ debug });
|
||||
const client = new Client({ apiUrl, token, currentTeam, debug });
|
||||
let contextName = null;
|
||||
@@ -105,7 +109,7 @@ const main = async ctx => {
|
||||
}
|
||||
|
||||
try {
|
||||
await run({ token, contextName, currentTeam });
|
||||
await run({ output, token, contextName, currentTeam });
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
exit(1);
|
||||
@@ -121,7 +125,7 @@ export default async ctx => {
|
||||
}
|
||||
};
|
||||
|
||||
async function run({ token, contextName, currentTeam }) {
|
||||
async function run({ output, token, contextName, currentTeam }) {
|
||||
const secrets = new NowSecrets({ apiUrl, token, debug, currentTeam });
|
||||
const args = argv._.slice(1);
|
||||
const start = Date.now();
|
||||
@@ -153,13 +157,13 @@ async function run({ token, contextName, currentTeam }) {
|
||||
list.map(secret => [
|
||||
'',
|
||||
chalk.bold(secret.name),
|
||||
chalk.gray(`${ms(cur - new Date(secret.created))} ago`)
|
||||
chalk.gray(`${ms(cur - new Date(secret.created))} ago`),
|
||||
])
|
||||
),
|
||||
{
|
||||
align: ['l', 'l', 'l'],
|
||||
hsep: ' '.repeat(2),
|
||||
stringLength: strlen
|
||||
stringLength: strlen,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -185,7 +189,7 @@ async function run({ token, contextName, currentTeam }) {
|
||||
const theSecret = list.find(secret => secret.name === args[0]);
|
||||
|
||||
if (theSecret) {
|
||||
const yes = await readConfirmation(theSecret);
|
||||
const yes = argv.yes || (await readConfirmation(theSecret));
|
||||
if (!yes) {
|
||||
console.error(error('User abort'));
|
||||
return exit(0);
|
||||
@@ -250,6 +254,10 @@ async function run({ token, contextName, currentTeam }) {
|
||||
await secrets.add(name, value);
|
||||
const elapsed = ms(new Date() - start);
|
||||
|
||||
if (name !== name.toLowerCase()) {
|
||||
output.warn(`Your secret name was converted to lower-case`);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`${chalk.cyan('> Success!')} Secret ${chalk.bold(
|
||||
name.toLowerCase()
|
||||
@@ -275,7 +283,7 @@ function readConfirmation(secret) {
|
||||
const time = chalk.gray(`${ms(new Date() - new Date(secret.created))} ago`);
|
||||
const tbl = table([[chalk.bold(secret.name), time]], {
|
||||
align: ['r', 'l'],
|
||||
hsep: ' '.repeat(6)
|
||||
hsep: ' '.repeat(6),
|
||||
});
|
||||
|
||||
process.stdout.write(
|
||||
|
||||
@@ -17,7 +17,7 @@ import info from './util/output/info';
|
||||
import getNowDir from './util/config/global-path';
|
||||
import {
|
||||
getDefaultConfig,
|
||||
getDefaultAuthConfig
|
||||
getDefaultAuthConfig,
|
||||
} from './util/config/get-default';
|
||||
import hp from './util/humanize-path';
|
||||
import commands from './commands/index.ts';
|
||||
@@ -53,7 +53,7 @@ sourceMap.install();
|
||||
Sentry.init({
|
||||
dsn: SENTRY_DSN,
|
||||
release: `now-cli@${pkg.version}`,
|
||||
environment: pkg.version.includes('canary') ? 'canary' : 'stable'
|
||||
environment: pkg.version.includes('canary') ? 'canary' : 'stable',
|
||||
});
|
||||
|
||||
let debug = () => {};
|
||||
@@ -71,7 +71,7 @@ const main = async argv_ => {
|
||||
'--version': Boolean,
|
||||
'-v': '--version',
|
||||
'--debug': Boolean,
|
||||
'-d': '--debug'
|
||||
'-d': '--debug',
|
||||
},
|
||||
{ permissive: true }
|
||||
);
|
||||
@@ -102,7 +102,10 @@ const main = async argv_ => {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (localConfig instanceof NowError && !(localConfig instanceof ERRORS.CantFindConfig)) {
|
||||
if (
|
||||
localConfig instanceof NowError &&
|
||||
!(localConfig instanceof ERRORS.CantFindConfig)
|
||||
) {
|
||||
output.error(`Failed to load local config file: ${localConfig.message}`);
|
||||
return 1;
|
||||
}
|
||||
@@ -118,7 +121,7 @@ const main = async argv_ => {
|
||||
if (targetOrSubcommand !== 'update') {
|
||||
update = await checkForUpdate(pkg, {
|
||||
interval: ms('1d'),
|
||||
distTag: pkg.version.includes('canary') ? 'canary' : 'latest'
|
||||
distTag: pkg.version.includes('canary') ? 'canary' : 'latest',
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -135,7 +138,15 @@ const main = async argv_ => {
|
||||
console.log(
|
||||
info(
|
||||
`${chalk.bgRed('UPDATE AVAILABLE')} ` +
|
||||
`Run ${cmd(await getUpdateCommand())} to install Now CLI ${update.latest}`
|
||||
`Run ${cmd(await getUpdateCommand())} to install Now CLI ${
|
||||
update.latest
|
||||
}`
|
||||
)
|
||||
);
|
||||
|
||||
console.log(
|
||||
info(
|
||||
`Changelog: https://github.com/zeit/now/releases/tag/now@${update.latest}`
|
||||
)
|
||||
);
|
||||
}
|
||||
@@ -307,9 +318,9 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error(
|
||||
`${'An unexpected error occurred while trying to write the ' +
|
||||
`default now config to "${hp(
|
||||
NOW_AUTH_CONFIG_PATH
|
||||
)}" `}${err.message}`
|
||||
`default now config to "${hp(NOW_AUTH_CONFIG_PATH)}" `}${
|
||||
err.message
|
||||
}`
|
||||
)
|
||||
);
|
||||
return 1;
|
||||
@@ -329,7 +340,7 @@ const main = async argv_ => {
|
||||
config,
|
||||
authConfig,
|
||||
localConfig,
|
||||
argv: argv_
|
||||
argv: argv_,
|
||||
};
|
||||
|
||||
let subcommand;
|
||||
@@ -339,7 +350,8 @@ const main = async argv_ => {
|
||||
const targetPath = join(process.cwd(), targetOrSubcommand);
|
||||
const targetPathExists = existsSync(targetPath);
|
||||
const subcommandExists =
|
||||
GLOBAL_COMMANDS.has(targetOrSubcommand) || commands.has(targetOrSubcommand);
|
||||
GLOBAL_COMMANDS.has(targetOrSubcommand) ||
|
||||
commands.has(targetOrSubcommand);
|
||||
|
||||
if (targetPathExists && subcommandExists) {
|
||||
console.error(
|
||||
@@ -412,7 +424,7 @@ const main = async argv_ => {
|
||||
message:
|
||||
'No existing credentials found. Please run ' +
|
||||
`${param('now login')} or pass ${param('--token')}`,
|
||||
slug: 'no-credentials-found'
|
||||
slug: 'no-credentials-found',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -426,7 +438,7 @@ const main = async argv_ => {
|
||||
message: `This command doesn't work with ${param(
|
||||
'--token'
|
||||
)}. Please use ${param('--scope')}.`,
|
||||
slug: 'no-token-allowed'
|
||||
slug: 'no-token-allowed',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -440,7 +452,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: `You defined ${param('--token')}, but it's missing a value`,
|
||||
slug: 'missing-token-value'
|
||||
slug: 'missing-token-value',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -459,11 +471,22 @@ const main = async argv_ => {
|
||||
const targetCommand = commands.get(subcommand);
|
||||
|
||||
if (argv['--team']) {
|
||||
output.warn(`The ${param('--team')} flag is deprecated. Please use ${param('--scope')} instead.`);
|
||||
output.warn(
|
||||
`The ${param('--team')} flag is deprecated. Please use ${param(
|
||||
'--scope'
|
||||
)} instead.`
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof scope === 'string' && targetCommand !== 'login' && targetCommand !== 'dev' && !(targetCommand === 'teams' && argv._[3] !== 'invite')) {
|
||||
const { authConfig: { token } } = ctx;
|
||||
if (
|
||||
typeof scope === 'string' &&
|
||||
targetCommand !== 'login' &&
|
||||
targetCommand !== 'dev' &&
|
||||
!(targetCommand === 'teams' && argv._[3] !== 'invite')
|
||||
) {
|
||||
const {
|
||||
authConfig: { token },
|
||||
} = ctx;
|
||||
const client = new Client({ apiUrl, token });
|
||||
|
||||
let user = null;
|
||||
@@ -475,7 +498,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: `You do not have access to the specified account`,
|
||||
slug: 'scope-not-accessible'
|
||||
slug: 'scope-not-accessible',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -499,7 +522,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: `You do not have access to the specified team`,
|
||||
slug: 'scope-not-accessible'
|
||||
slug: 'scope-not-accessible',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -517,7 +540,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: 'The specified scope does not exist',
|
||||
slug: 'scope-not-existent'
|
||||
slug: 'scope-not-existent',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -577,7 +600,8 @@ const main = async argv_ => {
|
||||
if (shouldCollectMetrics) {
|
||||
metric
|
||||
.event(eventCategory, '1', pkg.version)
|
||||
.exception(err.message).send();
|
||||
.exception(err.message)
|
||||
.send();
|
||||
}
|
||||
|
||||
return 1;
|
||||
@@ -586,7 +610,8 @@ const main = async argv_ => {
|
||||
if (shouldCollectMetrics) {
|
||||
metric
|
||||
.event(eventCategory, '1', pkg.version)
|
||||
.exception(err.message).send();
|
||||
.exception(err.message)
|
||||
.send();
|
||||
}
|
||||
|
||||
// Otherwise it is an unexpected error and we should show the trace
|
||||
@@ -647,9 +672,7 @@ process.on('uncaughtException', handleUnexpected);
|
||||
// subcommands waiting for further data won't work (like `logs` and `logout`)!
|
||||
main(process.argv)
|
||||
.then(exitCode => {
|
||||
process.exitCode = exitCode;
|
||||
process.emit('nowExit');
|
||||
process.on('beforeExit', () => {
|
||||
process.exit(exitCode);
|
||||
});
|
||||
})
|
||||
.catch(handleUnexpected);
|
||||
|
||||
@@ -195,28 +195,31 @@ export type DNSRecord = {
|
||||
};
|
||||
|
||||
type SRVRecordData = {
|
||||
name: string,
|
||||
type: 'SRV',
|
||||
name: string;
|
||||
type: 'SRV';
|
||||
srv: {
|
||||
port: number,
|
||||
priority: number,
|
||||
target: string,
|
||||
weight: number,
|
||||
}
|
||||
}
|
||||
|
||||
type MXRecordData = {
|
||||
name: string,
|
||||
type: 'MX',
|
||||
value: string,
|
||||
mxPriority: number,
|
||||
port: number;
|
||||
priority: number;
|
||||
target: string;
|
||||
weight: number;
|
||||
};
|
||||
};
|
||||
|
||||
export type DNSRecordData = {
|
||||
name: string,
|
||||
type: string,
|
||||
value: string,
|
||||
} | SRVRecordData | MXRecordData;
|
||||
type MXRecordData = {
|
||||
name: string;
|
||||
type: 'MX';
|
||||
value: string;
|
||||
mxPriority: number;
|
||||
};
|
||||
|
||||
export type DNSRecordData =
|
||||
| {
|
||||
name: string;
|
||||
type: string;
|
||||
value: string;
|
||||
}
|
||||
| SRVRecordData
|
||||
| MXRecordData;
|
||||
|
||||
export interface Project {
|
||||
id: string;
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
import { JsonBody, StreamBody, context } from 'fetch-h2';
|
||||
|
||||
// Packages
|
||||
import { parse } from 'url';
|
||||
import Sema from 'async-sema';
|
||||
import createOutput, { Output } from './output/create-output';
|
||||
|
||||
const MAX_REQUESTS_PER_CONNECTION = 1000;
|
||||
|
||||
type CurrentContext = ReturnType<typeof context> & {
|
||||
fetchesMade: number;
|
||||
ongoingFetches: number;
|
||||
};
|
||||
|
||||
export interface AgentFetchOptions {
|
||||
method?: 'GET' | 'POST' | 'PATCH' | 'PUT' | 'DELETE';
|
||||
body?: NodeJS.ReadableStream | string;
|
||||
headers: { [key: string]: string };
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a `fetch` version with a similar API to the browser's configured with a
|
||||
* HTTP2 agent. It encodes `body` automatically as JSON.
|
||||
*
|
||||
* @param {String} host
|
||||
* @return {Function} fetch
|
||||
*/
|
||||
export default class NowAgent {
|
||||
_contexts: ReturnType<typeof context>[];
|
||||
_currContext: CurrentContext;
|
||||
_output: Output;
|
||||
_protocol?: string;
|
||||
_sema: Sema;
|
||||
_url: string;
|
||||
|
||||
constructor(url: string, { debug = false } = {}) {
|
||||
// We use multiple contexts because each context represent one connection
|
||||
// With nginx, we're limited to 1000 requests before a connection is closed
|
||||
// http://nginx.org/en/docs/http/ngx_http_v2_module.html#http2_max_requests
|
||||
// To get arround this, we keep track of requests made on a connection. when we're about to hit 1000
|
||||
// we start up a new connection, and re-route all future traffic through the new connection
|
||||
// and when the final request from the old connection resolves, we auto-close the old connection
|
||||
this._contexts = [context()];
|
||||
this._currContext = {
|
||||
...this._contexts[0],
|
||||
fetchesMade: 0,
|
||||
ongoingFetches: 0
|
||||
};
|
||||
|
||||
const parsed = parse(url);
|
||||
this._url = url;
|
||||
this._protocol = parsed.protocol;
|
||||
this._sema = new Sema(20);
|
||||
this._output = createOutput({ debug });
|
||||
}
|
||||
|
||||
setConcurrency({
|
||||
maxStreams,
|
||||
capacity
|
||||
}: {
|
||||
maxStreams: number;
|
||||
capacity: number;
|
||||
}) {
|
||||
this._sema = new Sema(maxStreams || 20, { capacity });
|
||||
}
|
||||
|
||||
async fetch(path: string, opts: AgentFetchOptions) {
|
||||
const { debug } = this._output;
|
||||
await this._sema.acquire();
|
||||
let currentContext: CurrentContext;
|
||||
this._currContext.fetchesMade++;
|
||||
if (this._currContext.fetchesMade >= MAX_REQUESTS_PER_CONNECTION) {
|
||||
const ctx = { ...context(), fetchesMade: 1, ongoingFetches: 0 };
|
||||
this._contexts.push(ctx);
|
||||
this._currContext = ctx;
|
||||
}
|
||||
|
||||
// If we're changing contexts, we don't want to record the ongoingFetch on the old context
|
||||
// That'll cause an off-by-one error when trying to close the old socket later
|
||||
this._currContext.ongoingFetches++;
|
||||
currentContext = this._currContext;
|
||||
|
||||
debug(
|
||||
`Total requests made on socket #${this._contexts.length}: ${this
|
||||
._currContext.fetchesMade}`
|
||||
);
|
||||
debug(
|
||||
`Concurrent requests on socket #${this._contexts.length}: ${this
|
||||
._currContext.ongoingFetches}`
|
||||
);
|
||||
|
||||
let body: JsonBody | StreamBody | string | undefined;
|
||||
if (opts.body && typeof opts.body === 'object') {
|
||||
if (typeof (<NodeJS.ReadableStream>opts.body).pipe === 'function') {
|
||||
body = new StreamBody(<NodeJS.ReadableStream>opts.body);
|
||||
} else {
|
||||
opts.headers['Content-Type'] = 'application/json';
|
||||
body = new JsonBody(opts.body);
|
||||
}
|
||||
} else {
|
||||
body = opts.body;
|
||||
}
|
||||
|
||||
const { host, protocol } = parse(path);
|
||||
const url = host ? `${protocol}//${host}` : this._url;
|
||||
const handleCompleted = async <T>(res: T) => {
|
||||
currentContext.ongoingFetches--;
|
||||
if (
|
||||
(currentContext !== this._currContext || host) &&
|
||||
currentContext.ongoingFetches <= 0
|
||||
) {
|
||||
// We've completely moved on to a new socket
|
||||
// close the old one
|
||||
|
||||
// TODO: Fix race condition:
|
||||
// If the response is a stream, and the server is still streaming data
|
||||
// we should check if the stream has closed before disconnecting
|
||||
// hasCompleted CAN technically be called before the res body stream is closed
|
||||
debug('Closing old socket');
|
||||
currentContext.disconnect(url);
|
||||
}
|
||||
|
||||
this._sema.release();
|
||||
return res;
|
||||
};
|
||||
|
||||
return currentContext
|
||||
.fetch((host ? '' : this._url) + path, { ...opts, body })
|
||||
.then(res => handleCompleted(res))
|
||||
.catch((err: Error) => {
|
||||
handleCompleted(null);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
close() {
|
||||
const { debug } = this._output;
|
||||
debug('Closing agent');
|
||||
|
||||
this._currContext.disconnect(this._url);
|
||||
}
|
||||
}
|
||||
@@ -34,6 +34,11 @@ export default async function getDeploymentForAlias(
|
||||
}
|
||||
|
||||
const appName = await getAppName(output, localConfig, localConfigPath);
|
||||
|
||||
if (!appName) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const deployment = await getAppLastDeployment(
|
||||
output,
|
||||
client,
|
||||
|
||||
@@ -7,7 +7,11 @@ export default async function getInferredTargets(
|
||||
output: Output,
|
||||
config: Config
|
||||
) {
|
||||
output.warn(`The ${cmd('now alias')} command (no arguments) was deprecated in favour of ${cmd('now --prod')}.`);
|
||||
output.warn(
|
||||
`The ${cmd(
|
||||
'now alias'
|
||||
)} command (no arguments) was deprecated in favor of ${cmd('now --prod')}.`
|
||||
);
|
||||
|
||||
// This field is deprecated, warn about it
|
||||
if (config.aliases) {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import qs from 'querystring';
|
||||
import { EventEmitter } from 'events';
|
||||
import { parse as parseUrl } from 'url';
|
||||
import fetch, { RequestInit } from 'node-fetch';
|
||||
import retry, { RetryFunction, Options as RetryOptions } from 'async-retry';
|
||||
import createOutput, { Output } from './output/create-output';
|
||||
import Agent, { AgentFetchOptions } from './agent';
|
||||
import responseError from './response-error';
|
||||
import ua from './ua';
|
||||
|
||||
@@ -17,7 +17,6 @@ export type FetchOptions = {
|
||||
};
|
||||
|
||||
export default class Client extends EventEmitter {
|
||||
_agent: Agent;
|
||||
_apiUrl: string;
|
||||
_debug: boolean;
|
||||
_forceNew: boolean;
|
||||
@@ -30,7 +29,7 @@ export default class Client extends EventEmitter {
|
||||
token,
|
||||
currentTeam,
|
||||
forceNew = false,
|
||||
debug = false
|
||||
debug = false,
|
||||
}: {
|
||||
apiUrl: string;
|
||||
token: string;
|
||||
@@ -44,30 +43,23 @@ export default class Client extends EventEmitter {
|
||||
this._forceNew = forceNew;
|
||||
this._output = createOutput({ debug });
|
||||
this._apiUrl = apiUrl;
|
||||
this._agent = new Agent(apiUrl, { debug });
|
||||
this._onRetry = this._onRetry.bind(this);
|
||||
this.currentTeam = currentTeam;
|
||||
|
||||
const closeAgent = () => {
|
||||
this._agent.close();
|
||||
process.removeListener('nowExit', closeAgent);
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
process.on('nowExit', closeAgent);
|
||||
}
|
||||
|
||||
retry<T>(fn: RetryFunction<T>, { retries = 3, maxTimeout = Infinity } = {}) {
|
||||
return retry(fn, {
|
||||
retries,
|
||||
maxTimeout,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
});
|
||||
}
|
||||
|
||||
_fetch(_url: string, opts: FetchOptions = {}) {
|
||||
const parsedUrl = parseUrl(_url, true);
|
||||
const apiUrl = parsedUrl.host ? `${parsedUrl.protocol}//${parsedUrl.host}` : '';
|
||||
const apiUrl = parsedUrl.host
|
||||
? `${parsedUrl.protocol}//${parsedUrl.host}`
|
||||
: '';
|
||||
|
||||
if (opts.useCurrentTeam !== false && this.currentTeam) {
|
||||
const query = parsedUrl.query;
|
||||
@@ -80,20 +72,19 @@ export default class Client extends EventEmitter {
|
||||
Object.assign(opts, {
|
||||
body: JSON.stringify(opts.body),
|
||||
headers: Object.assign({}, opts.headers, {
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
opts.headers = opts.headers || {};
|
||||
opts.headers.authorization = `Bearer ${this._token}`;
|
||||
opts.headers.Authorization = `Bearer ${this._token}`;
|
||||
opts.headers['user-agent'] = ua;
|
||||
|
||||
const url = `${apiUrl ? '' : this._apiUrl}${_url}`;
|
||||
return this._output.time(
|
||||
`${opts.method || 'GET'} ${apiUrl ? '' : this._apiUrl}${_url} ${JSON.stringify(
|
||||
opts.body
|
||||
) || ''}`,
|
||||
this._agent.fetch(_url, opts as AgentFetchOptions)
|
||||
`${opts.method || 'GET'} ${url} ${JSON.stringify(opts.body) || ''}`,
|
||||
fetch(url, opts as RequestInit)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -126,7 +117,5 @@ export default class Client extends EventEmitter {
|
||||
this._output.debug(`Retrying: ${error}\n${error.stack}`);
|
||||
}
|
||||
|
||||
close() {
|
||||
this._agent.close();
|
||||
}
|
||||
close() {}
|
||||
}
|
||||
|
||||
@@ -15,40 +15,45 @@ export default async function createDeploy(
|
||||
return await now.create(paths, createArgs);
|
||||
} catch (error) {
|
||||
if (error.code === 'rate_limited') {
|
||||
return new ERRORS_TS.DeploymentsRateLimited(error.message);
|
||||
throw new ERRORS_TS.DeploymentsRateLimited(error.message);
|
||||
}
|
||||
|
||||
// Means that the domain used as a suffix no longer exists
|
||||
if (error.code === 'domain_missing') {
|
||||
return new ERRORS_TS.DomainNotFound(error.value);
|
||||
throw new ERRORS_TS.DomainNotFound(error.value);
|
||||
}
|
||||
|
||||
if (error.code === 'domain_not_found' && error.domain) {
|
||||
return new ERRORS_TS.DomainNotFound(error.domain);
|
||||
throw new ERRORS_TS.DomainNotFound(error.domain);
|
||||
}
|
||||
|
||||
// This error occures when a domain used in the `alias`
|
||||
// is not yet verified
|
||||
if (error.code === 'domain_not_verified' && error.domain) {
|
||||
return new ERRORS_TS.DomainNotVerified(error.domain);
|
||||
throw new ERRORS_TS.DomainNotVerified(error.domain);
|
||||
}
|
||||
|
||||
// If the domain used as a suffix is not verified, we fail
|
||||
if (error.code === 'domain_not_verified' && error.value) {
|
||||
return new ERRORS_TS.DomainVerificationFailed(error.value);
|
||||
throw new ERRORS_TS.DomainVerificationFailed(error.value);
|
||||
}
|
||||
|
||||
// If the domain isn't owned by the user
|
||||
if (error.code === 'not_domain_owner') {
|
||||
throw new ERRORS_TS.NotDomainOwner(error.message);
|
||||
}
|
||||
|
||||
if (error.code === 'builds_rate_limited') {
|
||||
return new ERRORS_TS.BuildsRateLimited(error.message);
|
||||
throw new ERRORS_TS.BuildsRateLimited(error.message);
|
||||
}
|
||||
|
||||
// If the user doesn't have permissions over the domain used as a suffix we fail
|
||||
if (error.code === 'forbidden') {
|
||||
return new ERRORS_TS.DomainPermissionDenied(error.value, contextName);
|
||||
throw new ERRORS_TS.DomainPermissionDenied(error.value, contextName);
|
||||
}
|
||||
|
||||
if (error.code === 'bad_request' && error.keyword) {
|
||||
return new ERRORS.SchemaValidationFailed(
|
||||
throw new ERRORS.SchemaValidationFailed(
|
||||
error.message,
|
||||
error.keyword,
|
||||
error.dataPath,
|
||||
@@ -57,19 +62,19 @@ export default async function createDeploy(
|
||||
}
|
||||
|
||||
if (error.code === 'domain_configured') {
|
||||
return new ERRORS_TS.AliasDomainConfigured(error);
|
||||
throw new ERRORS_TS.AliasDomainConfigured(error);
|
||||
}
|
||||
|
||||
if (error.code === 'missing_build_script') {
|
||||
return new ERRORS_TS.MissingBuildScript(error);
|
||||
throw new ERRORS_TS.MissingBuildScript(error);
|
||||
}
|
||||
|
||||
if (error.code === 'conflicting_file_path') {
|
||||
return new ERRORS_TS.ConflictingFilePath(error);
|
||||
throw new ERRORS_TS.ConflictingFilePath(error);
|
||||
}
|
||||
|
||||
if (error.code === 'conflicting_path_segment') {
|
||||
return new ERRORS_TS.ConflictingPathSegment(error);
|
||||
throw new ERRORS_TS.ConflictingPathSegment(error);
|
||||
}
|
||||
|
||||
// If the cert is missing we try to generate a new one and the retry
|
||||
@@ -87,10 +92,10 @@ export default async function createDeploy(
|
||||
}
|
||||
|
||||
if (error.code === 'not_found') {
|
||||
return new ERRORS_TS.DeploymentNotFound({ context: contextName });
|
||||
throw new ERRORS_TS.DeploymentNotFound({ context: contextName });
|
||||
}
|
||||
|
||||
const certError = mapCertError(error)
|
||||
const certError = mapCertError(error);
|
||||
if (certError) {
|
||||
return certError;
|
||||
}
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
//
|
||||
import sleep from '../sleep';
|
||||
|
||||
import createPollingFn from '../create-polling-fn';
|
||||
|
||||
import getDeploymentByIdOrThrow from './get-deployment-by-id-or-throw';
|
||||
|
||||
const POLLING_INTERVAL = 5000;
|
||||
|
||||
async function* getStatusChangeFromPolling(
|
||||
now: any,
|
||||
contextName: string,
|
||||
idOrHost: string,
|
||||
initialState: string
|
||||
) {
|
||||
const pollDeployment = createPollingFn(
|
||||
getDeploymentByIdOrThrow,
|
||||
POLLING_INTERVAL
|
||||
);
|
||||
let prevState = initialState;
|
||||
for await (const deployment of pollDeployment(now, contextName, idOrHost)) {
|
||||
if (prevState !== deployment.state) {
|
||||
await sleep(5000);
|
||||
yield {
|
||||
type: 'state-change',
|
||||
created: Date.now(),
|
||||
payload: { value: deployment.state }
|
||||
};
|
||||
} else {
|
||||
prevState = deployment.state;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default getStatusChangeFromPolling;
|
||||
222
packages/now-cli/src/util/deploy/process-deployment.ts
Normal file
222
packages/now-cli/src/util/deploy/process-deployment.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import bytes from 'bytes';
|
||||
import Progress from 'progress';
|
||||
import chalk from 'chalk';
|
||||
import pluralize from 'pluralize';
|
||||
import {
|
||||
createDeployment,
|
||||
createLegacyDeployment,
|
||||
DeploymentOptions,
|
||||
} from '../../../../now-client';
|
||||
import wait from '../output/wait';
|
||||
import { Output } from '../output';
|
||||
// @ts-ignore
|
||||
import Now from '../../util';
|
||||
import { NowConfig } from '../dev/types';
|
||||
|
||||
export default async function processDeployment({
|
||||
now,
|
||||
output,
|
||||
hashes,
|
||||
paths,
|
||||
requestBody,
|
||||
uploadStamp,
|
||||
deployStamp,
|
||||
legacy,
|
||||
env,
|
||||
quiet,
|
||||
nowConfig,
|
||||
}: {
|
||||
now: Now;
|
||||
output: Output;
|
||||
hashes: { [key: string]: any };
|
||||
paths: string[];
|
||||
requestBody: DeploymentOptions;
|
||||
uploadStamp: () => number;
|
||||
deployStamp: () => number;
|
||||
legacy: boolean;
|
||||
env: any;
|
||||
quiet: boolean;
|
||||
nowConfig?: NowConfig;
|
||||
}) {
|
||||
const { warn, log, debug, note } = output;
|
||||
let bar: Progress | null = null;
|
||||
|
||||
const path0 = paths[0];
|
||||
const opts: DeploymentOptions = {
|
||||
...requestBody,
|
||||
debug: now._debug,
|
||||
};
|
||||
|
||||
if (!legacy) {
|
||||
let buildSpinner = null;
|
||||
let deploySpinner = null;
|
||||
|
||||
for await (const event of createDeployment(path0, opts, nowConfig)) {
|
||||
if (event.type === 'hashes-calculated') {
|
||||
hashes = event.payload;
|
||||
}
|
||||
|
||||
if (event.type === 'warning') {
|
||||
warn(event.payload);
|
||||
}
|
||||
|
||||
if (event.type === 'notice') {
|
||||
note(event.payload);
|
||||
}
|
||||
|
||||
if (event.type === 'file_count') {
|
||||
debug(
|
||||
`Total files ${event.payload.total.size}, ${event.payload.missing.length} changed`
|
||||
);
|
||||
|
||||
if (!quiet) {
|
||||
log(
|
||||
`Synced ${pluralize(
|
||||
'file',
|
||||
event.payload.missing.length,
|
||||
true
|
||||
)} ${uploadStamp()}`
|
||||
);
|
||||
}
|
||||
|
||||
const missingSize = event.payload.missing
|
||||
.map((sha: string) => event.payload.total.get(sha).data.length)
|
||||
.reduce((a: number, b: number) => a + b, 0);
|
||||
|
||||
bar = new Progress(`${chalk.gray('>')} Upload [:bar] :percent :etas`, {
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: missingSize,
|
||||
clear: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (event.type === 'file-uploaded') {
|
||||
debug(
|
||||
`Uploaded: ${event.payload.file.names.join(' ')} (${bytes(
|
||||
event.payload.file.data.length
|
||||
)})`
|
||||
);
|
||||
|
||||
if (bar) {
|
||||
bar.tick(event.payload.file.data.length);
|
||||
}
|
||||
}
|
||||
|
||||
if (event.type === 'created') {
|
||||
now._host = event.payload.url;
|
||||
|
||||
if (!quiet) {
|
||||
const version = legacy ? `${chalk.grey('[v1]')} ` : '';
|
||||
log(`https://${event.payload.url} ${version}${deployStamp()}`);
|
||||
} else {
|
||||
process.stdout.write(`https://${event.payload.url}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (event.type === 'build-state-changed') {
|
||||
if (buildSpinner === null) {
|
||||
buildSpinner = wait('Building...');
|
||||
}
|
||||
}
|
||||
|
||||
if (event.type === 'all-builds-completed') {
|
||||
if (buildSpinner) {
|
||||
buildSpinner();
|
||||
}
|
||||
|
||||
deploySpinner = wait('Finalizing...');
|
||||
}
|
||||
|
||||
// Handle error events
|
||||
if (event.type === 'error') {
|
||||
if (buildSpinner) {
|
||||
buildSpinner();
|
||||
}
|
||||
|
||||
if (deploySpinner) {
|
||||
deploySpinner();
|
||||
}
|
||||
|
||||
throw await now.handleDeploymentError(event.payload, { hashes, env });
|
||||
}
|
||||
|
||||
// Handle ready event
|
||||
if (event.type === 'ready') {
|
||||
if (deploySpinner) {
|
||||
deploySpinner();
|
||||
}
|
||||
|
||||
return event.payload;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for await (const event of createLegacyDeployment(path0, opts, nowConfig)) {
|
||||
if (event.type === 'hashes-calculated') {
|
||||
hashes = event.payload;
|
||||
}
|
||||
|
||||
if (event.type === 'file_count') {
|
||||
debug(
|
||||
`Total files ${event.payload.total.size}, ${event.payload.missing.length} changed`
|
||||
);
|
||||
if (!quiet) {
|
||||
log(
|
||||
`Synced ${pluralize(
|
||||
'file',
|
||||
event.payload.missing.length,
|
||||
true
|
||||
)} ${uploadStamp()}`
|
||||
);
|
||||
}
|
||||
|
||||
const missingSize = event.payload.missing
|
||||
.map((sha: string) => event.payload.total.get(sha).data.length)
|
||||
.reduce((a: number, b: number) => a + b, 0);
|
||||
|
||||
bar = new Progress(`${chalk.gray('>')} Upload [:bar] :percent :etas`, {
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: missingSize,
|
||||
clear: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (event.type === 'file-uploaded') {
|
||||
debug(
|
||||
`Uploaded: ${event.payload.file.names.join(' ')} (${bytes(
|
||||
event.payload.file.data.length
|
||||
)})`
|
||||
);
|
||||
|
||||
if (bar) {
|
||||
bar.tick(event.payload.file.data.length);
|
||||
}
|
||||
}
|
||||
|
||||
if (event.type === 'created') {
|
||||
now._host = event.payload.url;
|
||||
|
||||
if (!quiet) {
|
||||
const version = legacy ? `${chalk.grey('[v1]')} ` : '';
|
||||
log(`${event.payload.url} ${version}${deployStamp()}`);
|
||||
} else {
|
||||
process.stdout.write(`https://${event.payload.url}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle error events
|
||||
if (event.type === 'error') {
|
||||
throw await now.handleDeploymentError(event.payload, { hashes, env });
|
||||
}
|
||||
|
||||
// Handle ready event
|
||||
if (event.type === 'ready') {
|
||||
log(`Build completed`);
|
||||
return event.payload;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
18
packages/now-cli/src/util/deploy/should-deploy-dir.ts
Normal file
18
packages/now-cli/src/util/deploy/should-deploy-dir.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { homedir } from 'os';
|
||||
import promptBool from '../input/prompt-bool';
|
||||
import { Output } from '../output';
|
||||
|
||||
export default async function shouldDeployDir(argv0: string, output: Output) {
|
||||
let yes = true;
|
||||
if (argv0 === homedir()) {
|
||||
if (
|
||||
!(await promptBool(
|
||||
'You are deploying your home directory. Do you want to continue?'
|
||||
))
|
||||
) {
|
||||
output.log('Aborted');
|
||||
yes = false;
|
||||
}
|
||||
}
|
||||
return yes;
|
||||
}
|
||||
@@ -8,6 +8,7 @@ import { createHash } from 'crypto';
|
||||
import { createGunzip } from 'zlib';
|
||||
import { join, resolve } from 'path';
|
||||
import { funCacheDir } from '@zeit/fun';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
import XDGAppPaths from 'xdg-app-paths';
|
||||
import {
|
||||
createReadStream,
|
||||
@@ -15,7 +16,7 @@ import {
|
||||
readFile,
|
||||
readJSON,
|
||||
writeFile,
|
||||
remove
|
||||
remove,
|
||||
} from 'fs-extra';
|
||||
import pkg from '../../../package.json';
|
||||
|
||||
@@ -23,10 +24,10 @@ import { NoBuilderCacheError, BuilderCacheCleanError } from '../errors-ts';
|
||||
import wait from '../output/wait';
|
||||
import { Output } from '../output';
|
||||
import { getDistTag } from '../get-dist-tag';
|
||||
import { devDependencies } from '../../../package.json';
|
||||
|
||||
import * as staticBuilder from './static-builder';
|
||||
import { BuilderWithPackage, Package } from './types';
|
||||
import { BuilderWithPackage } from './types';
|
||||
import { getBundledBuilders } from './get-bundled-builders';
|
||||
|
||||
const registryTypes = new Set(['version', 'tag', 'range']);
|
||||
|
||||
@@ -34,14 +35,10 @@ const localBuilders: { [key: string]: BuilderWithPackage } = {
|
||||
'@now/static': {
|
||||
runInProcess: true,
|
||||
builder: Object.freeze(staticBuilder),
|
||||
package: Object.freeze({ name: '@now/static', version: '' })
|
||||
}
|
||||
package: Object.freeze({ name: '@now/static', version: '' }),
|
||||
},
|
||||
};
|
||||
|
||||
const bundledBuilders = Object.keys(devDependencies).filter(d =>
|
||||
d.startsWith('@now/')
|
||||
);
|
||||
|
||||
const distTag = getDistTag(pkg.version);
|
||||
|
||||
export const cacheDirPromise = prepareCacheDir();
|
||||
@@ -117,7 +114,7 @@ export async function prepareBuilderDir() {
|
||||
export async function prepareBuilderModulePath() {
|
||||
const [builderDir, builderContents] = await Promise.all([
|
||||
builderDirPromise,
|
||||
readFile(join(__dirname, 'builder-worker.js'))
|
||||
readFile(join(__dirname, 'builder-worker.js')),
|
||||
]);
|
||||
let needsWrite = false;
|
||||
const builderSha = getSha(builderContents);
|
||||
@@ -179,7 +176,7 @@ export function getBuildUtils(packages: string[]): string {
|
||||
export function filterPackage(
|
||||
builderSpec: string,
|
||||
distTag: string,
|
||||
buildersPkg: Package
|
||||
buildersPkg: PackageJson
|
||||
) {
|
||||
if (builderSpec in localBuilders) return false;
|
||||
const parsed = npa(builderSpec);
|
||||
@@ -187,7 +184,7 @@ export function filterPackage(
|
||||
parsed.name &&
|
||||
parsed.type === 'tag' &&
|
||||
parsed.fetchSpec === distTag &&
|
||||
bundledBuilders.includes(parsed.name) &&
|
||||
getBundledBuilders().includes(parsed.name) &&
|
||||
buildersPkg.dependencies
|
||||
) {
|
||||
const parsedInstalled = npa(
|
||||
@@ -259,10 +256,10 @@ export async function installBuilders(
|
||||
'--exact',
|
||||
'--no-lockfile',
|
||||
'--non-interactive',
|
||||
...packagesToInstall
|
||||
...packagesToInstall,
|
||||
],
|
||||
{
|
||||
cwd: builderDir
|
||||
cwd: builderDir,
|
||||
}
|
||||
);
|
||||
} finally {
|
||||
@@ -294,10 +291,10 @@ export async function updateBuilders(
|
||||
'--exact',
|
||||
'--no-lockfile',
|
||||
'--non-interactive',
|
||||
...packages.filter(p => p !== '@now/static')
|
||||
...packages.filter(p => p !== '@now/static'),
|
||||
],
|
||||
{
|
||||
cwd: builderDir
|
||||
cwd: builderDir,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -336,7 +333,7 @@ export async function getBuilder(
|
||||
const pkg = require(join(dest, 'package.json'));
|
||||
builderWithPkg = {
|
||||
builder: Object.freeze(mod),
|
||||
package: Object.freeze(pkg)
|
||||
package: Object.freeze(pkg),
|
||||
};
|
||||
} catch (err) {
|
||||
if (err.code === 'MODULE_NOT_FOUND') {
|
||||
@@ -357,7 +354,7 @@ export async function getBuilder(
|
||||
|
||||
function getPackageName(
|
||||
parsed: npa.Result,
|
||||
buildersPkg: Package
|
||||
buildersPkg: PackageJson
|
||||
): string | null {
|
||||
if (registryTypes.has(parsed.type)) {
|
||||
return parsed.name;
|
||||
@@ -378,7 +375,7 @@ function getSha(buffer: Buffer): string {
|
||||
}
|
||||
|
||||
function hasBundledBuilders(dependencies: { [name: string]: string }): boolean {
|
||||
for (const name of bundledBuilders) {
|
||||
for (const name of getBundledBuilders()) {
|
||||
if (!(name in dependencies)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -2,16 +2,17 @@
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import ms from 'ms';
|
||||
import bytes from 'bytes';
|
||||
import { promisify } from 'util';
|
||||
import { delimiter, dirname, join } from 'path';
|
||||
import { fork, ChildProcess } from 'child_process';
|
||||
import { createFunction } from '@zeit/fun';
|
||||
import { File, Lambda, FileBlob, FileFsRef } from '@now/build-utils';
|
||||
import { Builder, File, Lambda, FileBlob, FileFsRef } from '@now/build-utils';
|
||||
import stripAnsi from 'strip-ansi';
|
||||
import chalk from 'chalk';
|
||||
import which from 'which';
|
||||
import plural from 'pluralize';
|
||||
import ora, { Ora } from 'ora';
|
||||
import minimatch from 'minimatch';
|
||||
import _treeKill from 'tree-kill';
|
||||
|
||||
import { Output } from '../output';
|
||||
import highlight from '../output/highlight';
|
||||
@@ -23,12 +24,11 @@ import { builderModulePathPromise, getBuilder } from './builder-cache';
|
||||
import {
|
||||
EnvConfig,
|
||||
NowConfig,
|
||||
BuildConfig,
|
||||
BuildMatch,
|
||||
BuildResult,
|
||||
BuilderInputs,
|
||||
BuilderOutput,
|
||||
BuilderOutputs
|
||||
BuilderOutputs,
|
||||
} from './types';
|
||||
|
||||
interface BuildMessage {
|
||||
@@ -41,7 +41,7 @@ interface BuildMessageResult extends BuildMessage {
|
||||
error?: object;
|
||||
}
|
||||
|
||||
const isLogging = new WeakSet<ChildProcess>();
|
||||
const treeKill = promisify(_treeKill);
|
||||
|
||||
let nodeBinPromise: Promise<string>;
|
||||
|
||||
@@ -49,43 +49,48 @@ async function getNodeBin(): Promise<string> {
|
||||
return which.sync('node', { nothrow: true }) || process.execPath;
|
||||
}
|
||||
|
||||
function pipeChildLogging(child: ChildProcess): void {
|
||||
if (!isLogging.has(child)) {
|
||||
child.stdout!.pipe(process.stdout);
|
||||
child.stderr!.pipe(process.stderr);
|
||||
isLogging.add(child);
|
||||
}
|
||||
}
|
||||
|
||||
async function createBuildProcess(
|
||||
match: BuildMatch,
|
||||
buildEnv: EnvConfig,
|
||||
workPath: string,
|
||||
output: Output,
|
||||
yarnPath?: string
|
||||
yarnPath?: string,
|
||||
debugEnabled: boolean = false
|
||||
): Promise<ChildProcess> {
|
||||
if (!nodeBinPromise) {
|
||||
nodeBinPromise = getNodeBin();
|
||||
}
|
||||
const [execPath, modulePath] = await Promise.all([
|
||||
nodeBinPromise,
|
||||
builderModulePathPromise
|
||||
builderModulePathPromise,
|
||||
]);
|
||||
|
||||
// Ensure that `node` is in the builder's `PATH`
|
||||
let PATH = `${dirname(execPath)}${delimiter}${process.env.PATH}`;
|
||||
|
||||
// Ensure that `yarn` is in the builder's `PATH`
|
||||
if (yarnPath) {
|
||||
PATH = `${yarnPath}${delimiter}${PATH}`;
|
||||
}
|
||||
|
||||
const env: EnvConfig = {
|
||||
...process.env,
|
||||
PATH,
|
||||
...buildEnv,
|
||||
NOW_REGION: 'dev1',
|
||||
};
|
||||
|
||||
// Builders won't show debug logs by default.
|
||||
// The `NOW_BUILDER_DEBUG` env variable enables them.
|
||||
if (debugEnabled) {
|
||||
env.NOW_BUILDER_DEBUG = '1';
|
||||
}
|
||||
|
||||
const buildProcess = fork(modulePath, [], {
|
||||
cwd: workPath,
|
||||
env: {
|
||||
...process.env,
|
||||
PATH,
|
||||
...buildEnv,
|
||||
NOW_REGION: 'dev1'
|
||||
},
|
||||
env,
|
||||
execPath,
|
||||
execArgv: [],
|
||||
stdio: ['ignore', 'pipe', 'pipe', 'ipc']
|
||||
});
|
||||
match.buildProcess = buildProcess;
|
||||
|
||||
@@ -96,9 +101,6 @@ async function createBuildProcess(
|
||||
match.buildProcess = undefined;
|
||||
});
|
||||
|
||||
buildProcess.stdout!.setEncoding('utf8');
|
||||
buildProcess.stderr!.setEncoding('utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// The first message that the builder process sends is the `ready` event
|
||||
buildProcess.once('message', ({ type }) => {
|
||||
@@ -122,7 +124,7 @@ export async function executeBuild(
|
||||
filesRemoved?: string[]
|
||||
): Promise<void> {
|
||||
const {
|
||||
builderWithPkg: { runInProcess, builder, package: pkg }
|
||||
builderWithPkg: { runInProcess, builder, package: pkg },
|
||||
} = match;
|
||||
const { src: entrypoint } = match;
|
||||
const { env, debug, buildEnv, yarnPath, cwd: workPath } = devServer;
|
||||
@@ -150,7 +152,8 @@ export async function executeBuild(
|
||||
buildEnv,
|
||||
workPath,
|
||||
devServer.output,
|
||||
yarnPath
|
||||
yarnPath,
|
||||
debug
|
||||
);
|
||||
}
|
||||
|
||||
@@ -165,91 +168,45 @@ export async function executeBuild(
|
||||
filesChanged,
|
||||
filesRemoved,
|
||||
env,
|
||||
buildEnv
|
||||
}
|
||||
buildEnv,
|
||||
},
|
||||
};
|
||||
|
||||
let buildResultOrOutputs: BuilderOutputs | BuildResult;
|
||||
if (buildProcess) {
|
||||
let spinLogger;
|
||||
let spinner: Ora | undefined;
|
||||
const fullLogs: string[] = [];
|
||||
buildProcess.send({
|
||||
type: 'build',
|
||||
builderName: pkg.name,
|
||||
buildParams,
|
||||
});
|
||||
|
||||
if (isInitialBuild && !debug && process.stdout.isTTY) {
|
||||
const logTitle = `${chalk.bold(
|
||||
`Preparing ${chalk.underline(entrypoint)} for build`
|
||||
)}:`;
|
||||
spinner = ora(logTitle).start();
|
||||
|
||||
spinLogger = (data: Buffer) => {
|
||||
const rawLog = stripAnsi(data.toString());
|
||||
fullLogs.push(rawLog);
|
||||
|
||||
const lines = rawLog.replace(/\s+$/, '').split('\n');
|
||||
const spinText = `${logTitle} ${lines[lines.length - 1]}`;
|
||||
const maxCols = process.stdout.columns || 80;
|
||||
const overflow = stripAnsi(spinText).length + 2 - maxCols;
|
||||
spinner!.text =
|
||||
overflow > 0 ? `${spinText.slice(0, -overflow - 3)}...` : spinText;
|
||||
};
|
||||
|
||||
buildProcess!.stdout!.on('data', spinLogger);
|
||||
buildProcess!.stderr!.on('data', spinLogger);
|
||||
} else {
|
||||
pipeChildLogging(buildProcess!);
|
||||
}
|
||||
|
||||
try {
|
||||
buildProcess.send({
|
||||
type: 'build',
|
||||
builderName: pkg.name,
|
||||
buildParams
|
||||
});
|
||||
|
||||
buildResultOrOutputs = await new Promise((resolve, reject) => {
|
||||
function onMessage({ type, result, error }: BuildMessageResult) {
|
||||
cleanup();
|
||||
if (type === 'buildResult') {
|
||||
if (result) {
|
||||
resolve(result);
|
||||
} else if (error) {
|
||||
reject(Object.assign(new Error(), error));
|
||||
}
|
||||
} else {
|
||||
reject(new Error(`Got unexpected message type: ${type}`));
|
||||
buildResultOrOutputs = await new Promise((resolve, reject) => {
|
||||
function onMessage({ type, result, error }: BuildMessageResult) {
|
||||
cleanup();
|
||||
if (type === 'buildResult') {
|
||||
if (result) {
|
||||
resolve(result);
|
||||
} else if (error) {
|
||||
reject(Object.assign(new Error(), error));
|
||||
}
|
||||
} else {
|
||||
reject(new Error(`Got unexpected message type: ${type}`));
|
||||
}
|
||||
function onExit(code: number | null, signal: string | null) {
|
||||
cleanup();
|
||||
const err = new Error(
|
||||
`Builder exited with ${signal || code} before sending build result`
|
||||
);
|
||||
reject(err);
|
||||
}
|
||||
function cleanup() {
|
||||
buildProcess!.removeListener('exit', onExit);
|
||||
buildProcess!.removeListener('message', onMessage);
|
||||
}
|
||||
buildProcess!.on('exit', onExit);
|
||||
buildProcess!.on('message', onMessage);
|
||||
});
|
||||
} catch (err) {
|
||||
if (spinner) {
|
||||
spinner.stop();
|
||||
spinner = undefined;
|
||||
console.log(fullLogs.join(''));
|
||||
}
|
||||
throw err;
|
||||
} finally {
|
||||
if (spinLogger) {
|
||||
buildProcess.stdout!.removeListener('data', spinLogger);
|
||||
buildProcess.stderr!.removeListener('data', spinLogger);
|
||||
function onExit(code: number | null, signal: string | null) {
|
||||
cleanup();
|
||||
const err = new Error(
|
||||
`Builder exited with ${signal || code} before sending build result`
|
||||
);
|
||||
reject(err);
|
||||
}
|
||||
if (spinner) {
|
||||
spinner.stop();
|
||||
function cleanup() {
|
||||
buildProcess!.removeListener('exit', onExit);
|
||||
buildProcess!.removeListener('message', onMessage);
|
||||
}
|
||||
pipeChildLogging(buildProcess!);
|
||||
}
|
||||
buildProcess!.on('exit', onExit);
|
||||
buildProcess!.on('message', onMessage);
|
||||
});
|
||||
} else {
|
||||
buildResultOrOutputs = await builder.build(buildParams);
|
||||
}
|
||||
@@ -260,7 +217,11 @@ export async function executeBuild(
|
||||
result = {
|
||||
output: buildResultOrOutputs as BuilderOutputs,
|
||||
routes: [],
|
||||
watch: []
|
||||
watch: [],
|
||||
distPath:
|
||||
typeof buildResultOrOutputs.distPath === 'string'
|
||||
? buildResultOrOutputs.distPath
|
||||
: undefined,
|
||||
};
|
||||
} else {
|
||||
result = buildResultOrOutputs as BuildResult;
|
||||
@@ -346,9 +307,9 @@ export async function executeBuild(
|
||||
...nowConfig.env,
|
||||
...asset.environment,
|
||||
...env,
|
||||
NOW_REGION: 'dev1'
|
||||
}
|
||||
}
|
||||
NOW_REGION: 'dev1',
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -382,7 +343,7 @@ export async function getBuildMatches(
|
||||
return matches;
|
||||
}
|
||||
|
||||
const noMatches: BuildConfig[] = [];
|
||||
const noMatches: Builder[] = [];
|
||||
const builds = nowConfig.builds || [{ src: '**', use: '@now/static' }];
|
||||
|
||||
for (const buildConfig of builds) {
|
||||
@@ -420,7 +381,7 @@ export async function getBuildMatches(
|
||||
builderWithPkg,
|
||||
buildOutput: {},
|
||||
buildResults: new Map(),
|
||||
buildTimestamp: 0
|
||||
buildTimestamp: 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -442,3 +403,27 @@ export async function getBuildMatches(
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
export async function shutdownBuilder(
|
||||
match: BuildMatch,
|
||||
{ debug }: Output
|
||||
): Promise<void> {
|
||||
const ops: Promise<void>[] = [];
|
||||
|
||||
if (match.buildProcess) {
|
||||
debug(`Killing builder sub-process with PID ${match.buildProcess.pid}`);
|
||||
ops.push(treeKill(match.buildProcess.pid));
|
||||
delete match.buildProcess;
|
||||
}
|
||||
|
||||
if (match.buildOutput) {
|
||||
for (const asset of Object.values(match.buildOutput)) {
|
||||
if (asset.type === 'Lambda' && asset.fn) {
|
||||
debug(`Shutting down Lambda function`);
|
||||
ops.push(asset.fn.destroy());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(ops);
|
||||
}
|
||||
|
||||
@@ -12,13 +12,13 @@ export const httpStatusDescriptionMap = new Map([
|
||||
[502, 'BAD_GATEWAY'],
|
||||
[503, 'SERVICE_UNAVAILABLE'],
|
||||
[504, 'GATEWAY_TIMEOUT'],
|
||||
[508, 'INFINITE_LOOP']
|
||||
[508, 'INFINITE_LOOP'],
|
||||
]);
|
||||
|
||||
export const errorMessageMap = new Map([
|
||||
[400, 'Bad request'],
|
||||
[402, 'Payment required'],
|
||||
[403, 'You don\'t have the required permissions'],
|
||||
[403, "You don't have the required permissions"],
|
||||
[404, 'The page could not be found'],
|
||||
[405, 'Method not allowed'],
|
||||
[410, 'The deployment has been removed'],
|
||||
@@ -28,7 +28,7 @@ export const errorMessageMap = new Map([
|
||||
[501, 'Not implemented'],
|
||||
[503, 'The deployment is currently unavailable'],
|
||||
[504, 'An error occurred with your deployment'],
|
||||
[508, 'Infinite loop detected']
|
||||
[508, 'Infinite loop detected'],
|
||||
]);
|
||||
|
||||
interface ErrorMessage {
|
||||
@@ -40,20 +40,20 @@ interface ErrorMessage {
|
||||
const appError = {
|
||||
title: 'An error occurred with this application.',
|
||||
subtitle: 'This is an error with the application itself, not the platform.',
|
||||
app_error: true
|
||||
app_error: true,
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const infrastructureError = {
|
||||
title: 'An internal error occurred with ZEIT Now.',
|
||||
subtitle: 'This is an error with the platform itself, not the application.',
|
||||
app_error: false
|
||||
app_error: false,
|
||||
};
|
||||
|
||||
const pageNotFoundError = {
|
||||
title: 'The page could not be found.',
|
||||
subtitle: 'The page could not be found in the application.',
|
||||
app_error: true
|
||||
app_error: true,
|
||||
};
|
||||
|
||||
export function generateErrorMessage(
|
||||
@@ -68,7 +68,7 @@ export function generateErrorMessage(
|
||||
}
|
||||
return {
|
||||
title: errorMessageMap.get(statusCode) || 'Error occurred',
|
||||
app_error: false
|
||||
app_error: false,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
11
packages/now-cli/src/util/dev/get-bundled-builders.ts
Normal file
11
packages/now-cli/src/util/dev/get-bundled-builders.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export function getBundledBuilders() {
|
||||
return [
|
||||
'@now/go',
|
||||
'@now/next',
|
||||
'@now/node',
|
||||
'@now/ruby',
|
||||
'@now/python',
|
||||
'@now/static-build',
|
||||
'@now/build-utils',
|
||||
];
|
||||
}
|
||||
@@ -98,7 +98,7 @@ export default async function(
|
||||
headers: combinedHeaders,
|
||||
uri_args: query,
|
||||
matched_route: routeConfig,
|
||||
matched_route_idx: idx
|
||||
matched_route_idx: idx,
|
||||
};
|
||||
break;
|
||||
} else {
|
||||
@@ -114,7 +114,7 @@ export default async function(
|
||||
headers: combinedHeaders,
|
||||
uri_args: query,
|
||||
matched_route: routeConfig,
|
||||
matched_route_idx: idx
|
||||
matched_route_idx: idx,
|
||||
};
|
||||
break;
|
||||
}
|
||||
@@ -127,7 +127,7 @@ export default async function(
|
||||
found: false,
|
||||
dest: reqPathname,
|
||||
uri_args: query,
|
||||
headers: combinedHeaders
|
||||
headers: combinedHeaders,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -16,10 +16,11 @@ import { basename, dirname, extname, join } from 'path';
|
||||
import directoryTemplate from 'serve-handler/src/directory';
|
||||
|
||||
import {
|
||||
Builder,
|
||||
FileFsRef,
|
||||
PackageJson,
|
||||
detectBuilders,
|
||||
detectRoutes
|
||||
detectRoutes,
|
||||
} from '@now/build-utils';
|
||||
|
||||
import { once } from '../once';
|
||||
@@ -33,7 +34,7 @@ import { version as cliVersion } from '../../../package.json';
|
||||
import {
|
||||
createIgnore,
|
||||
staticFiles as getFiles,
|
||||
getAllProjectFiles
|
||||
getAllProjectFiles,
|
||||
} from '../get-files';
|
||||
import { validateNowConfigBuilds, validateNowConfigRoutes } from './validate';
|
||||
|
||||
@@ -41,12 +42,12 @@ import isURL from './is-url';
|
||||
import devRouter from './router';
|
||||
import getMimeType from './mime-type';
|
||||
import { getYarnPath } from './yarn-installer';
|
||||
import { executeBuild, getBuildMatches } from './builder';
|
||||
import { executeBuild, getBuildMatches, shutdownBuilder } from './builder';
|
||||
import { generateErrorMessage, generateHttpStatusDescription } from './errors';
|
||||
import {
|
||||
builderDirPromise,
|
||||
installBuilders,
|
||||
updateBuilders
|
||||
updateBuilders,
|
||||
} from './builder-cache';
|
||||
|
||||
// HTML templates
|
||||
@@ -60,7 +61,6 @@ import {
|
||||
EnvConfig,
|
||||
NowConfig,
|
||||
DevServerOptions,
|
||||
BuildConfig,
|
||||
BuildMatch,
|
||||
BuildResult,
|
||||
BuilderInputs,
|
||||
@@ -70,7 +70,7 @@ import {
|
||||
InvokeResult,
|
||||
ListenSpec,
|
||||
RouteConfig,
|
||||
RouteResult
|
||||
RouteResult,
|
||||
} from './types';
|
||||
|
||||
interface FSEvent {
|
||||
@@ -87,7 +87,7 @@ interface NodeRequire {
|
||||
|
||||
declare const __non_webpack_require__: NodeRequire;
|
||||
|
||||
function sortBuilders(buildA: BuildConfig, buildB: BuildConfig) {
|
||||
function sortBuilders(buildA: Builder, buildB: Builder) {
|
||||
if (buildA && buildA.use && buildA.use.startsWith('@now/static-build')) {
|
||||
return 1;
|
||||
}
|
||||
@@ -182,6 +182,20 @@ export default class DevServer {
|
||||
const filesChanged: Set<string> = new Set();
|
||||
const filesRemoved: Set<string> = new Set();
|
||||
|
||||
const distPaths: string[] = [];
|
||||
|
||||
for (const buildMatch of this.buildMatches.values()) {
|
||||
for (const buildResult of buildMatch.buildResults.values()) {
|
||||
if (buildResult.distPath) {
|
||||
distPaths.push(buildResult.distPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
events = events.filter(event =>
|
||||
distPaths.every(distPath => !event.path.startsWith(distPath))
|
||||
);
|
||||
|
||||
// First, update the `files` mapping of source files
|
||||
for (const event of events) {
|
||||
if (event.type === 'add') {
|
||||
@@ -255,9 +269,7 @@ export default class DevServer {
|
||||
});
|
||||
} else {
|
||||
this.output.debug(
|
||||
`Not rebuilding because \`shouldServe()\` returned \`false\` for "${
|
||||
match.use
|
||||
}" request path "${requestPath}"`
|
||||
`Not rebuilding because \`shouldServe()\` returned \`false\` for "${match.use}" request path "${requestPath}"`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -335,13 +347,18 @@ export default class DevServer {
|
||||
}
|
||||
|
||||
// Delete build matches that no longer exists
|
||||
const ops: Promise<void>[] = [];
|
||||
for (const src of this.buildMatches.keys()) {
|
||||
if (!sources.includes(src)) {
|
||||
this.output.debug(`Removing build match for "${src}"`);
|
||||
// TODO: shutdown lambda functions
|
||||
const match = this.buildMatches.get(src);
|
||||
if (match) {
|
||||
ops.push(shutdownBuilder(match, this.output));
|
||||
}
|
||||
this.buildMatches.delete(src);
|
||||
}
|
||||
}
|
||||
await Promise.all(ops);
|
||||
|
||||
// Add the new matches to the `buildMatches` map
|
||||
const blockingBuilds: Promise<void>[] = [];
|
||||
@@ -376,7 +393,7 @@ export default class DevServer {
|
||||
// Sort build matches to make sure `@now/static-build` is always last
|
||||
this.buildMatches = new Map(
|
||||
[...this.buildMatches.entries()].sort((matchA, matchB) => {
|
||||
return sortBuilders(matchA[1] as BuildConfig, matchB[1] as BuildConfig);
|
||||
return sortBuilders(matchA[1] as Builder, matchB[1] as Builder);
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -413,10 +430,11 @@ export default class DevServer {
|
||||
for (const buildMatch of this.buildMatches.values()) {
|
||||
const {
|
||||
src,
|
||||
builderWithPkg: { package: pkg }
|
||||
builderWithPkg: { package: pkg },
|
||||
} = buildMatch;
|
||||
if (pkg.name === '@now/static') continue;
|
||||
if (updatedBuilders.includes(pkg.name)) {
|
||||
if (pkg.name && updatedBuilders.includes(pkg.name)) {
|
||||
shutdownBuilder(buildMatch, this.output);
|
||||
this.buildMatches.delete(src);
|
||||
this.output.debug(`Invalidated build match for "${src}"`);
|
||||
}
|
||||
@@ -441,7 +459,7 @@ export default class DevServer {
|
||||
}
|
||||
}
|
||||
try {
|
||||
this.validateEnvConfig(fileName, base || {}, env);
|
||||
return this.validateEnvConfig(fileName, base || {}, env);
|
||||
} catch (err) {
|
||||
if (err instanceof MissingDotenvVarsError) {
|
||||
this.output.error(err.message);
|
||||
@@ -450,7 +468,7 @@ export default class DevServer {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return { ...base, ...env };
|
||||
return {};
|
||||
}
|
||||
|
||||
async getNowConfig(
|
||||
@@ -516,8 +534,8 @@ export default class DevServer {
|
||||
`filtered out ${allFiles.length - files.length} files`
|
||||
);
|
||||
|
||||
const { builders, errors } = await detectBuilders(files, pkg, {
|
||||
tag: getDistTag(cliVersion) === 'canary' ? 'canary' : 'latest'
|
||||
const { builders, warnings, errors } = await detectBuilders(files, pkg, {
|
||||
tag: getDistTag(cliVersion) === 'canary' ? 'canary' : 'latest',
|
||||
});
|
||||
|
||||
if (errors) {
|
||||
@@ -525,6 +543,10 @@ export default class DevServer {
|
||||
await this.exit();
|
||||
}
|
||||
|
||||
if (warnings && warnings.length > 0) {
|
||||
warnings.forEach(warning => this.output.warn(warning.message));
|
||||
}
|
||||
|
||||
if (builders) {
|
||||
const { defaultRoutes, error: routesError } = await detectRoutes(
|
||||
files,
|
||||
@@ -608,7 +630,9 @@ export default class DevServer {
|
||||
type: string,
|
||||
env: EnvConfig = {},
|
||||
localEnv: EnvConfig = {}
|
||||
): void {
|
||||
): EnvConfig {
|
||||
// Validate if there are any missing env vars defined in `now.json`,
|
||||
// but not in the `.env` / `.build.env` file
|
||||
const missing: string[] = Object.entries(env)
|
||||
.filter(
|
||||
([name, value]) =>
|
||||
@@ -617,9 +641,36 @@ export default class DevServer {
|
||||
!hasOwnProperty(localEnv, name)
|
||||
)
|
||||
.map(([name]) => name);
|
||||
if (missing.length >= 1) {
|
||||
|
||||
if (missing.length > 0) {
|
||||
throw new MissingDotenvVarsError(type, missing);
|
||||
}
|
||||
|
||||
const merged: EnvConfig = { ...env, ...localEnv };
|
||||
|
||||
// Validate that the env var name matches what AWS Lambda allows:
|
||||
// - https://docs.aws.amazon.com/lambda/latest/dg/env_variables.html
|
||||
let hasInvalidName = false;
|
||||
for (const key of Object.keys(merged)) {
|
||||
if (!/^[a-zA-Z][a-zA-Z0-9_]*$/.test(key)) {
|
||||
this.output.warn(
|
||||
`Ignoring ${type
|
||||
.split('.')
|
||||
.slice(1)
|
||||
.reverse()
|
||||
.join(' ')} var ${JSON.stringify(key)} because name is invalid`
|
||||
);
|
||||
hasInvalidName = true;
|
||||
delete merged[key];
|
||||
}
|
||||
}
|
||||
if (hasInvalidName) {
|
||||
this.output.log(
|
||||
'Env var names must start with letters, and can only contain alphanumeric characters and underscores'
|
||||
);
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -652,7 +703,7 @@ export default class DevServer {
|
||||
const nowConfigBuild = nowConfig.build || {};
|
||||
const [env, buildEnv] = await Promise.all([
|
||||
this.getLocalEnv('.env', nowConfig.env),
|
||||
this.getLocalEnv('.env.build', nowConfigBuild.env)
|
||||
this.getLocalEnv('.env.build', nowConfigBuild.env),
|
||||
]);
|
||||
Object.assign(process.env, buildEnv);
|
||||
this.env = env;
|
||||
@@ -670,8 +721,8 @@ export default class DevServer {
|
||||
|
||||
const builders: Set<string> = new Set(
|
||||
(nowConfig.builds || [])
|
||||
.filter((b: BuildConfig) => b.use)
|
||||
.map((b: BuildConfig) => b.use as string)
|
||||
.filter((b: Builder) => b.use)
|
||||
.map((b: Builder) => b.use as string)
|
||||
);
|
||||
|
||||
await installBuilders(builders, this.yarnPath, this.output);
|
||||
@@ -684,10 +735,12 @@ export default class DevServer {
|
||||
this.yarnPath,
|
||||
this.output
|
||||
)
|
||||
.then(updatedBuilders =>
|
||||
this.invalidateBuildMatches(nowConfig, updatedBuilders)
|
||||
)
|
||||
.then(updatedBuilders => {
|
||||
this.updateBuildersPromise = null;
|
||||
this.invalidateBuildMatches(nowConfig, updatedBuilders);
|
||||
})
|
||||
.catch(err => {
|
||||
this.updateBuildersPromise = null;
|
||||
this.output.error(`Failed to update builders: ${err.message}`);
|
||||
this.output.debug(err.stack);
|
||||
});
|
||||
@@ -716,7 +769,7 @@ export default class DevServer {
|
||||
ignoreInitial: true,
|
||||
useFsEvents: false,
|
||||
usePolling: false,
|
||||
persistent: true
|
||||
persistent: true,
|
||||
});
|
||||
this.watcher.on('add', (path: string) => {
|
||||
this.enqueueFsEvent('add', path);
|
||||
@@ -786,22 +839,18 @@ export default class DevServer {
|
||||
const ops: Promise<void>[] = [];
|
||||
|
||||
for (const match of this.buildMatches.values()) {
|
||||
if (!match.buildOutput) continue;
|
||||
|
||||
for (const asset of Object.values(match.buildOutput)) {
|
||||
if (asset.type === 'Lambda' && asset.fn) {
|
||||
ops.push(asset.fn.destroy());
|
||||
}
|
||||
}
|
||||
ops.push(shutdownBuilder(match, this.output));
|
||||
}
|
||||
|
||||
ops.push(close(this.server));
|
||||
|
||||
if (this.watcher) {
|
||||
this.output.debug(`Closing file watcher`);
|
||||
this.watcher.close();
|
||||
}
|
||||
|
||||
if (this.updateBuildersPromise) {
|
||||
this.output.debug(`Waiting for builders update to complete`);
|
||||
ops.push(this.updateBuildersPromise);
|
||||
}
|
||||
|
||||
@@ -856,8 +905,8 @@ export default class DevServer {
|
||||
const json = JSON.stringify({
|
||||
error: {
|
||||
code: statusCode,
|
||||
message: errorMessage.title
|
||||
}
|
||||
message: errorMessage.title,
|
||||
},
|
||||
});
|
||||
body = `${json}\n`;
|
||||
} else if (accept.includes('html')) {
|
||||
@@ -870,7 +919,7 @@ export default class DevServer {
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
error_code,
|
||||
now_id: nowRequestId
|
||||
now_id: nowRequestId,
|
||||
});
|
||||
} else if (statusCode === 502) {
|
||||
view = errorTemplate502({
|
||||
@@ -878,19 +927,19 @@ export default class DevServer {
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
error_code,
|
||||
now_id: nowRequestId
|
||||
now_id: nowRequestId,
|
||||
});
|
||||
} else {
|
||||
view = errorTemplate({
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
now_id: nowRequestId
|
||||
now_id: nowRequestId,
|
||||
});
|
||||
}
|
||||
body = errorTemplateBase({
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
view
|
||||
view,
|
||||
});
|
||||
} else {
|
||||
res.setHeader('content-type', 'text/plain; charset=utf-8');
|
||||
@@ -917,7 +966,7 @@ export default class DevServer {
|
||||
res.setHeader('content-type', 'application/json');
|
||||
const json = JSON.stringify({
|
||||
redirect: location,
|
||||
status: String(statusCode)
|
||||
status: String(statusCode),
|
||||
});
|
||||
body = `${json}\n`;
|
||||
} else if (accept.includes('html')) {
|
||||
@@ -949,7 +998,7 @@ export default class DevServer {
|
||||
server: 'now',
|
||||
'x-now-trace': 'dev1',
|
||||
'x-now-id': nowRequestId,
|
||||
'x-now-cache': 'MISS'
|
||||
'x-now-cache': 'MISS',
|
||||
};
|
||||
for (const [name, value] of Object.entries(allHeaders)) {
|
||||
res.setHeader(name, value);
|
||||
@@ -976,7 +1025,7 @@ export default class DevServer {
|
||||
'x-now-deployment-url': host,
|
||||
'x-now-id': nowRequestId,
|
||||
'x-now-log-id': nowRequestId.split('-')[2],
|
||||
'x-zeit-co-forwarded-for': ip
|
||||
'x-zeit-co-forwarded-for': ip,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1057,7 +1106,7 @@ export default class DevServer {
|
||||
}
|
||||
|
||||
const method = req.method || 'GET';
|
||||
this.output.log(`${chalk.bold(method)} ${req.url}`);
|
||||
this.output.debug(`${chalk.bold(method)} ${req.url}`);
|
||||
|
||||
try {
|
||||
const nowConfig = await this.getNowConfig();
|
||||
@@ -1183,9 +1232,7 @@ export default class DevServer {
|
||||
Object.assign(origUrl.query, uri_args);
|
||||
const newUrl = url.format(origUrl);
|
||||
this.output.debug(
|
||||
`Checking build result's ${
|
||||
buildResult.routes.length
|
||||
} \`routes\` to match ${newUrl}`
|
||||
`Checking build result's ${buildResult.routes.length} \`routes\` to match ${newUrl}`
|
||||
);
|
||||
const matchedRoute = await devRouter(
|
||||
newUrl,
|
||||
@@ -1238,17 +1285,17 @@ export default class DevServer {
|
||||
headers: [
|
||||
{
|
||||
key: 'Content-Type',
|
||||
value: getMimeType(assetKey)
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
value: getMimeType(assetKey),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
case 'FileBlob':
|
||||
const headers: http.OutgoingHttpHeaders = {
|
||||
'Content-Length': asset.data.length,
|
||||
'Content-Type': getMimeType(assetKey)
|
||||
'Content-Type': getMimeType(assetKey),
|
||||
};
|
||||
this.setResponseHeaders(res, nowRequestId, headers);
|
||||
res.end(asset.data);
|
||||
@@ -1273,7 +1320,7 @@ export default class DevServer {
|
||||
Object.assign(parsed.query, uri_args);
|
||||
const path = url.format({
|
||||
pathname: parsed.pathname,
|
||||
query: parsed.query
|
||||
query: parsed.query,
|
||||
});
|
||||
|
||||
const body = await rawBody(req);
|
||||
@@ -1283,7 +1330,7 @@ export default class DevServer {
|
||||
path,
|
||||
headers: this.getNowProxyHeaders(req, nowRequestId),
|
||||
encoding: 'base64',
|
||||
body: body.toString('base64')
|
||||
body: body.toString('base64'),
|
||||
};
|
||||
|
||||
this.output.debug(`Invoking lambda: "${assetKey}" with ${path}`);
|
||||
@@ -1292,7 +1339,7 @@ export default class DevServer {
|
||||
try {
|
||||
result = await asset.fn<InvokeResult>({
|
||||
Action: 'Invoke',
|
||||
body: JSON.stringify(payload)
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
@@ -1379,7 +1426,7 @@ export default class DevServer {
|
||||
relative: href,
|
||||
ext,
|
||||
title: href,
|
||||
base
|
||||
base,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -1391,13 +1438,13 @@ export default class DevServer {
|
||||
const paths = [
|
||||
{
|
||||
name: directory,
|
||||
url: requestPath
|
||||
}
|
||||
url: requestPath,
|
||||
},
|
||||
];
|
||||
const directoryHtml = directoryTemplate({
|
||||
files,
|
||||
paths,
|
||||
directory
|
||||
directory,
|
||||
});
|
||||
this.setResponseHeaders(res, nowRequestId);
|
||||
res.setHeader('Content-Type', 'text/html; charset=utf-8');
|
||||
@@ -1409,25 +1456,6 @@ export default class DevServer {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serve project directory as a static deployment.
|
||||
*/
|
||||
serveProjectAsStatic = async (
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse,
|
||||
nowRequestId: string
|
||||
) => {
|
||||
const filePath = req.url ? req.url.replace(/^\//, '') : '';
|
||||
|
||||
if (filePath && typeof this.files[filePath] === 'undefined') {
|
||||
await this.send404(req, res, nowRequestId);
|
||||
return;
|
||||
}
|
||||
|
||||
this.setResponseHeaders(res, nowRequestId);
|
||||
return serveStaticFile(req, res, this.cwd, { cleanUrls: true });
|
||||
};
|
||||
|
||||
async hasFilesystem(dest: string): Promise<boolean> {
|
||||
const requestPath = dest.replace(/^\//, '');
|
||||
if (
|
||||
@@ -1459,7 +1487,7 @@ function proxyPass(
|
||||
ws: true,
|
||||
xfwd: true,
|
||||
ignorePath: true,
|
||||
target: dest
|
||||
target: dest,
|
||||
});
|
||||
|
||||
proxy.on('error', (error: NodeJS.ErrnoException) => {
|
||||
@@ -1490,7 +1518,7 @@ function serveStaticFile(
|
||||
public: cwd,
|
||||
cleanUrls: false,
|
||||
etag: true,
|
||||
...opts
|
||||
...opts,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1546,7 +1574,7 @@ async function shouldServe(
|
||||
const {
|
||||
src: entrypoint,
|
||||
config,
|
||||
builderWithPkg: { builder }
|
||||
builderWithPkg: { builder },
|
||||
} = match;
|
||||
if (typeof builder.shouldServe === 'function') {
|
||||
const shouldServe = await builder.shouldServe({
|
||||
@@ -1554,7 +1582,7 @@ async function shouldServe(
|
||||
files,
|
||||
config,
|
||||
requestPath,
|
||||
workPath: devServer.cwd
|
||||
workPath: devServer.cwd,
|
||||
});
|
||||
if (shouldServe) {
|
||||
return true;
|
||||
|
||||
@@ -5,7 +5,7 @@ export const version = 2;
|
||||
|
||||
export function build({ files, entrypoint }: BuilderParams): BuildResult {
|
||||
const output = {
|
||||
[entrypoint]: files[entrypoint]
|
||||
[entrypoint]: files[entrypoint],
|
||||
};
|
||||
const watch = [entrypoint];
|
||||
|
||||
@@ -15,7 +15,7 @@ export function build({ files, entrypoint }: BuilderParams): BuildResult {
|
||||
export function shouldServe({
|
||||
entrypoint,
|
||||
files,
|
||||
requestPath
|
||||
requestPath,
|
||||
}: ShouldServeParams) {
|
||||
if (isIndex(entrypoint)) {
|
||||
const indexPath = join(requestPath, basename(entrypoint));
|
||||
|
||||
@@ -1,7 +1,13 @@
|
||||
import http from 'http';
|
||||
import { ChildProcess } from 'child_process';
|
||||
import { Lambda as FunLambda } from '@zeit/fun';
|
||||
import { FileBlob, FileFsRef, Lambda } from '@now/build-utils';
|
||||
import {
|
||||
Builder as BuildConfig,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
Lambda,
|
||||
PackageJson,
|
||||
} from '@now/build-utils';
|
||||
import { Output } from '../output';
|
||||
|
||||
export interface DevServerOptions {
|
||||
@@ -13,12 +19,6 @@ export interface EnvConfig {
|
||||
[name: string]: string | undefined;
|
||||
}
|
||||
|
||||
export interface BuildConfig {
|
||||
src: string;
|
||||
use?: string;
|
||||
config?: object;
|
||||
}
|
||||
|
||||
export interface BuildMatch extends BuildConfig {
|
||||
builderWithPkg: BuilderWithPackage;
|
||||
buildOutput: BuilderOutputs;
|
||||
@@ -119,6 +119,7 @@ export interface BuildResult {
|
||||
output: BuilderOutputs;
|
||||
routes: RouteConfig[];
|
||||
watch: string[];
|
||||
distPath?: string;
|
||||
}
|
||||
|
||||
export interface ShouldServeParams {
|
||||
@@ -129,18 +130,10 @@ export interface ShouldServeParams {
|
||||
workPath: string;
|
||||
}
|
||||
|
||||
export interface Package {
|
||||
name: string;
|
||||
version: string;
|
||||
scripts?: { [key: string]: string };
|
||||
dependencies?: { [name: string]: string };
|
||||
devDependencies?: { [name: string]: string };
|
||||
}
|
||||
|
||||
export interface BuilderWithPackage {
|
||||
runInProcess?: boolean;
|
||||
builder: Readonly<Builder>;
|
||||
package: Readonly<Package>;
|
||||
package: Readonly<PackageJson>;
|
||||
}
|
||||
|
||||
export interface HttpHeadersConfig {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import Ajv from 'ajv';
|
||||
import { schema as routesSchema } from '@now/routing-utils';
|
||||
import { routesSchema } from '@now/routing-utils';
|
||||
import { NowConfig } from './types';
|
||||
|
||||
const ajv = new Ajv();
|
||||
@@ -16,16 +16,16 @@ const buildsSchema = {
|
||||
src: {
|
||||
type: 'string',
|
||||
minLength: 1,
|
||||
maxLength: 4096
|
||||
maxLength: 4096,
|
||||
},
|
||||
use: {
|
||||
type: 'string',
|
||||
minLength: 3,
|
||||
maxLength: 256
|
||||
maxLength: 256,
|
||||
},
|
||||
config: { type: 'object' }
|
||||
}
|
||||
}
|
||||
config: { type: 'object' },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const validateBuilds = ajv.compile(buildsSchema);
|
||||
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
writeFile,
|
||||
statSync,
|
||||
chmodSync,
|
||||
createReadStream
|
||||
createReadStream,
|
||||
} from 'fs-extra';
|
||||
import pipe from 'promisepipe';
|
||||
import { join } from 'path';
|
||||
@@ -63,7 +63,7 @@ async function installYarn(output: Output): Promise<string> {
|
||||
output.debug(`Downloading ${YARN_URL}`);
|
||||
const response = await fetch(YARN_URL, {
|
||||
compress: false,
|
||||
redirect: 'follow'
|
||||
redirect: 'follow',
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
@@ -90,7 +90,7 @@ async function installYarn(output: Output): Promise<string> {
|
||||
'@echo off',
|
||||
'@SETLOCAL',
|
||||
'@SET PATHEXT=%PATHEXT:;.JS;=;%',
|
||||
'node "%~dp0\\yarn" %*'
|
||||
'node "%~dp0\\yarn" %*',
|
||||
].join('\r\n')
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
import chalk from 'chalk';
|
||||
import { readFileSync } from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import { Response } from 'fetch-h2'
|
||||
import { Response } from 'node-fetch';
|
||||
import { DomainNotFound, InvalidDomain } from '../errors-ts';
|
||||
import Client from '../client';
|
||||
import wait from '../output/wait';
|
||||
|
||||
type JSONResponse = {
|
||||
recordIds: string[]
|
||||
}
|
||||
recordIds: string[];
|
||||
};
|
||||
|
||||
export default async function importZonefile(
|
||||
client: Client,
|
||||
contextName: string,
|
||||
domain: string,
|
||||
zonefilePath: string,
|
||||
zonefilePath: string
|
||||
) {
|
||||
const cancelWait = wait(`Importing Zone file for domain ${domain} under ${chalk.bold(contextName)}`);
|
||||
const cancelWait = wait(
|
||||
`Importing Zone file for domain ${domain} under ${chalk.bold(contextName)}`
|
||||
);
|
||||
const zonefile = readFileSync(resolve(zonefilePath), 'utf8');
|
||||
|
||||
try {
|
||||
@@ -27,7 +29,7 @@ export default async function importZonefile(
|
||||
json: false,
|
||||
});
|
||||
|
||||
const { recordIds } = await res.json() as JSONResponse;
|
||||
const { recordIds } = (await res.json()) as JSONResponse;
|
||||
cancelWait();
|
||||
return recordIds;
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import bytes from 'bytes';
|
||||
import { Response } from 'fetch-h2';
|
||||
import { Response } from 'node-fetch';
|
||||
import { NowError } from './now-error';
|
||||
import param from './output/param';
|
||||
import cmd from './output/cmd';
|
||||
@@ -53,7 +53,7 @@ export class TeamDeleted extends NowError<'TEAM_DELETED', {}> {
|
||||
message: `Your team was deleted. You can switch to a different one using ${param(
|
||||
'now switch'
|
||||
)}.`,
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -67,7 +67,7 @@ export class InvalidToken extends NowError<'NOT_AUTHORIZED', {}> {
|
||||
super({
|
||||
code: `NOT_AUTHORIZED`,
|
||||
message: `The specified token is not valid`,
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -81,7 +81,7 @@ export class MissingUser extends NowError<'MISSING_USER', {}> {
|
||||
super({
|
||||
code: 'MISSING_USER',
|
||||
message: `Not able to load user, missing from response`,
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -98,7 +98,7 @@ export class DomainAlreadyExists extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_ALREADY_EXISTS',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} already exists under a different context.`
|
||||
message: `The domain ${domain} already exists under a different context.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -115,7 +115,7 @@ export class DomainPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_PERMISSION_DENIED',
|
||||
meta: { domain, context },
|
||||
message: `You don't have access to the domain ${domain} under ${context}.`
|
||||
message: `You don't have access to the domain ${domain} under ${context}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -128,7 +128,7 @@ export class DomainExternal extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_EXTERNAL',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} must point to zeit.world.`
|
||||
message: `The domain ${domain} must point to zeit.world.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -143,7 +143,7 @@ export class SourceNotFound extends NowError<'SOURCE_NOT_FOUND', {}> {
|
||||
meta: {},
|
||||
message: `Not able to purchase. Please add a payment method using ${cmd(
|
||||
'now billing add'
|
||||
)}.`
|
||||
)}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -156,7 +156,7 @@ export class InvalidTransferAuthCode extends NowError<
|
||||
super({
|
||||
code: 'INVALID_TRANSFER_AUTH_CODE',
|
||||
meta: { domain, authCode },
|
||||
message: `The provided auth code does not match with the one expected by the current registar`
|
||||
message: `The provided auth code does not match with the one expected by the current registar`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -169,7 +169,7 @@ export class DomainRegistrationFailed extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_REGISTRATION_FAILED',
|
||||
meta: { domain },
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -185,7 +185,7 @@ export class DomainNotFound extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_FOUND',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} can't be found.`
|
||||
message: `The domain ${domain} can't be found.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -198,7 +198,7 @@ export class DomainNotVerified extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_VERIFIED',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} is not verified.`
|
||||
message: `The domain ${domain} is not verified.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -221,7 +221,7 @@ export class DomainVerificationFailed extends NowError<
|
||||
domain,
|
||||
nsVerification,
|
||||
txtVerification,
|
||||
purchased = false
|
||||
purchased = false,
|
||||
}: {
|
||||
domain: string;
|
||||
nsVerification: NSVerificationError;
|
||||
@@ -231,7 +231,7 @@ export class DomainVerificationFailed extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_VERIFICATION_FAILED',
|
||||
meta: { domain, nsVerification, txtVerification, purchased },
|
||||
message: `We can't verify the domain ${domain}. Both Name Servers and DNS TXT verifications failed.`
|
||||
message: `We can't verify the domain ${domain}. Both Name Servers and DNS TXT verifications failed.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -264,7 +264,7 @@ export class DomainNsNotVerifiedForWildcard extends NowError<
|
||||
> {
|
||||
constructor({
|
||||
domain,
|
||||
nsVerification
|
||||
nsVerification,
|
||||
}: {
|
||||
domain: string;
|
||||
nsVerification: NSVerificationError;
|
||||
@@ -272,7 +272,7 @@ export class DomainNsNotVerifiedForWildcard extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NS_NOT_VERIFIED_FOR_WILDCARD',
|
||||
meta: { domain, nsVerification },
|
||||
message: `The domain ${domain} is not verified by nameservers for wildcard alias.`
|
||||
message: `The domain ${domain} is not verified by nameservers for wildcard alias.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -289,7 +289,17 @@ export class InvalidDomain extends NowError<
|
||||
super({
|
||||
code: 'INVALID_DOMAIN',
|
||||
meta: { domain },
|
||||
message: message || `The domain ${domain} is not valid.`
|
||||
message: message || `The domain ${domain} is not valid.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class NotDomainOwner extends NowError<'NOT_DOMAIN_OWNER', {}> {
|
||||
constructor(message: string) {
|
||||
super({
|
||||
code: 'NOT_DOMAIN_OWNER',
|
||||
meta: {},
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -302,7 +312,7 @@ export class InvalidDeploymentId extends NowError<
|
||||
super({
|
||||
code: 'INVALID_DEPLOYMENT_ID',
|
||||
meta: { id },
|
||||
message: `The deployment id "${id}" is not valid.`
|
||||
message: `The deployment id "${id}" is not valid.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -319,7 +329,7 @@ export class UnsupportedTLD extends NowError<
|
||||
super({
|
||||
code: 'UNSUPPORTED_TLD',
|
||||
meta: { domain },
|
||||
message: `The TLD for domain name ${domain} is not supported.`
|
||||
message: `The TLD for domain name ${domain} is not supported.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -336,7 +346,7 @@ export class DomainNotAvailable extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_AVAILABLE',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} is not available to be purchased.`
|
||||
message: `The domain ${domain} is not available to be purchased.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -353,7 +363,7 @@ export class DomainServiceNotAvailable extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_SERVICE_NOT_AVAILABLE',
|
||||
meta: { domain },
|
||||
message: `The domain purchase is unavailable, try again later.`
|
||||
message: `The domain purchase is unavailable, try again later.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -370,7 +380,7 @@ export class DomainNotTransferable extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_TRANSFERABLE',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} is not available to be transferred.`
|
||||
message: `The domain ${domain} is not available to be transferred.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -386,7 +396,7 @@ export class UnexpectedDomainPurchaseError extends NowError<
|
||||
super({
|
||||
code: 'UNEXPECTED_DOMAIN_PURCHASE_ERROR',
|
||||
meta: { domain },
|
||||
message: `An unexpected error happened while purchasing.`
|
||||
message: `An unexpected error happened while purchasing.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -399,7 +409,7 @@ export class DomainPaymentError extends NowError<'DOMAIN_PAYMENT_ERROR', {}> {
|
||||
super({
|
||||
code: 'DOMAIN_PAYMENT_ERROR',
|
||||
meta: {},
|
||||
message: `Your card was declined.`
|
||||
message: `Your card was declined.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -416,7 +426,7 @@ export class DomainPurchasePending extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_PURCHASE_PENDING',
|
||||
meta: { domain },
|
||||
message: `The domain purchase for ${domain} is pending.`
|
||||
message: `The domain purchase for ${domain} is pending.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -430,7 +440,7 @@ export class UserAborted extends NowError<'USER_ABORTED', {}> {
|
||||
super({
|
||||
code: 'USER_ABORTED',
|
||||
meta: {},
|
||||
message: `The user aborted the operation.`
|
||||
message: `The user aborted the operation.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -440,7 +450,7 @@ export class CertNotFound extends NowError<'CERT_NOT_FOUND', { id: string }> {
|
||||
super({
|
||||
code: 'CERT_NOT_FOUND',
|
||||
meta: { id },
|
||||
message: `The cert ${id} can't be found.`
|
||||
message: `The cert ${id} can't be found.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -453,7 +463,7 @@ export class CertsPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'CERTS_PERMISSION_DENIED',
|
||||
meta: { domain },
|
||||
message: `You don't have access to ${domain}'s certs under ${context}.`
|
||||
message: `You don't have access to ${domain}'s certs under ${context}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -466,7 +476,7 @@ export class CertOrderNotFound extends NowError<
|
||||
super({
|
||||
code: 'CERT_ORDER_NOT_FOUND',
|
||||
meta: { cns },
|
||||
message: `No cert order could be found for cns ${cns.join(' ,')}`
|
||||
message: `No cert order could be found for cns ${cns.join(' ,')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -484,7 +494,7 @@ export class TooManyRequests extends NowError<
|
||||
super({
|
||||
code: 'TOO_MANY_REQUESTS',
|
||||
meta: { api, retryAfter },
|
||||
message: `Rate limited. Too many requests to the same endpoint.`
|
||||
message: `Rate limited. Too many requests to the same endpoint.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -518,7 +528,7 @@ export class CertError extends NowError<
|
||||
cns,
|
||||
code,
|
||||
message,
|
||||
helpUrl
|
||||
helpUrl,
|
||||
}: {
|
||||
cns: string[];
|
||||
code: CertErrorCode;
|
||||
@@ -528,7 +538,7 @@ export class CertError extends NowError<
|
||||
super({
|
||||
code: `CERT_ERROR`,
|
||||
meta: { cns, code, helpUrl },
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -547,7 +557,7 @@ export class CertConfigurationError extends NowError<
|
||||
message,
|
||||
external,
|
||||
type,
|
||||
helpUrl
|
||||
helpUrl,
|
||||
}: {
|
||||
cns: string[];
|
||||
message: string;
|
||||
@@ -558,7 +568,7 @@ export class CertConfigurationError extends NowError<
|
||||
super({
|
||||
code: `CERT_CONFIGURATION_ERROR`,
|
||||
meta: { cns, helpUrl, external, type },
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -575,7 +585,7 @@ export class DeploymentNotFound extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_NOT_FOUND',
|
||||
meta: { id, context },
|
||||
message: `Can't find the deployment ${id} under the context ${context}`
|
||||
message: `Can't find the deployment ${id} under the context ${context}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -592,7 +602,7 @@ export class DeploymentNotReady extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_NOT_READY',
|
||||
meta: { url },
|
||||
message: `The deployment https://${url} is not ready.`
|
||||
message: `The deployment https://${url} is not ready.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -605,7 +615,7 @@ export class DeploymentFailedAliasImpossible extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_FAILED_ALIAS_IMPOSSIBLE',
|
||||
meta: {},
|
||||
message: `The deployment build has failed and cannot be aliased`
|
||||
message: `The deployment build has failed and cannot be aliased`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -622,7 +632,7 @@ export class DeploymentPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_PERMISSION_DENIED',
|
||||
meta: { id, context },
|
||||
message: `You don't have access to the deployment ${id} under ${context}.`
|
||||
message: `You don't have access to the deployment ${id} under ${context}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -635,7 +645,7 @@ export class DeploymentTypeUnsupported extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_TYPE_UNSUPPORTED',
|
||||
meta: {},
|
||||
message: `This region only accepts Serverless Docker Deployments`
|
||||
message: `This region only accepts Serverless Docker Deployments`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -649,7 +659,7 @@ export class InvalidAlias extends NowError<'INVALID_ALIAS', { alias: string }> {
|
||||
super({
|
||||
code: 'INVALID_ALIAS',
|
||||
meta: { alias },
|
||||
message: `The given alias ${alias} is not valid`
|
||||
message: `The given alias ${alias} is not valid`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -663,7 +673,7 @@ export class AliasInUse extends NowError<'ALIAS_IN_USE', { alias: string }> {
|
||||
super({
|
||||
code: 'ALIAS_IN_USE',
|
||||
meta: { alias },
|
||||
message: `The alias is already in use`
|
||||
message: `The alias is already in use`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -678,7 +688,7 @@ export class CertMissing extends NowError<'ALIAS_IN_USE', { domain: string }> {
|
||||
super({
|
||||
code: 'ALIAS_IN_USE',
|
||||
meta: { domain },
|
||||
message: `The alias is already in use`
|
||||
message: `The alias is already in use`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -691,7 +701,7 @@ export class ForbiddenScaleMinInstances extends NowError<
|
||||
super({
|
||||
code: 'FORBIDDEN_SCALE_MIN_INSTANCES',
|
||||
meta: { url, max },
|
||||
message: `You can't scale to more than ${max} min instances with your current plan.`
|
||||
message: `You can't scale to more than ${max} min instances with your current plan.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -704,7 +714,7 @@ export class ForbiddenScaleMaxInstances extends NowError<
|
||||
super({
|
||||
code: 'FORBIDDEN_SCALE_MAX_INSTANCES',
|
||||
meta: { url, max },
|
||||
message: `You can't scale to more than ${max} max instances with your current plan.`
|
||||
message: `You can't scale to more than ${max} max instances with your current plan.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -717,7 +727,7 @@ export class InvalidScaleMinMaxRelation extends NowError<
|
||||
super({
|
||||
code: 'INVALID_SCALE_MIN_MAX_RELATION',
|
||||
meta: { url },
|
||||
message: `Min number of instances can't be higher than max.`
|
||||
message: `Min number of instances can't be higher than max.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -730,7 +740,7 @@ export class NotSupportedMinScaleSlots extends NowError<
|
||||
super({
|
||||
code: 'NOT_SUPPORTED_MIN_SCALE_SLOTS',
|
||||
meta: { url },
|
||||
message: `Cloud v2 does not yet support setting a non-zero min scale setting.`
|
||||
message: `Cloud v2 does not yet support setting a non-zero min scale setting.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -743,7 +753,7 @@ export class VerifyScaleTimeout extends NowError<
|
||||
super({
|
||||
code: 'VERIFY_SCALE_TIMEOUT',
|
||||
meta: { timeout },
|
||||
message: `Instance verification timed out (${timeout}ms)`
|
||||
message: `Instance verification timed out (${timeout}ms)`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -756,7 +766,7 @@ export class CantParseJSONFile extends NowError<
|
||||
super({
|
||||
code: 'CANT_PARSE_JSON_FILE',
|
||||
meta: { file },
|
||||
message: `Can't parse json file`
|
||||
message: `Can't parse json file`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -769,7 +779,7 @@ export class CantFindConfig extends NowError<
|
||||
super({
|
||||
code: 'CANT_FIND_CONFIG',
|
||||
meta: { paths },
|
||||
message: `Can't find a configuration file in the given locations.`
|
||||
message: `Can't find a configuration file in the given locations.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -779,7 +789,7 @@ export class FileNotFound extends NowError<'FILE_NOT_FOUND', { file: string }> {
|
||||
super({
|
||||
code: 'FILE_NOT_FOUND',
|
||||
meta: { file },
|
||||
message: `Can't find a file in provided location '${file}'.`
|
||||
message: `Can't find a file in provided location '${file}'.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -792,7 +802,7 @@ export class RulesFileValidationError extends NowError<
|
||||
super({
|
||||
code: 'PATH_ALIAS_VALIDATION_ERROR',
|
||||
meta: { location, message },
|
||||
message: `The provided rules format in file for path alias are invalid`
|
||||
message: `The provided rules format in file for path alias are invalid`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -802,7 +812,7 @@ export class NoAliasInConfig extends NowError<'NO_ALIAS_IN_CONFIG', {}> {
|
||||
super({
|
||||
code: 'NO_ALIAS_IN_CONFIG',
|
||||
meta: {},
|
||||
message: `There is no alias set up in config file.`
|
||||
message: `There is no alias set up in config file.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -815,7 +825,7 @@ export class InvalidAliasInConfig extends NowError<
|
||||
super({
|
||||
code: 'INVALID_ALIAS_IN_CONFIG',
|
||||
meta: { value },
|
||||
message: `Invalid alias option in configuration.`
|
||||
message: `Invalid alias option in configuration.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -828,7 +838,7 @@ export class RuleValidationFailed extends NowError<
|
||||
super({
|
||||
code: 'RULE_VALIDATION_FAILED',
|
||||
meta: { message },
|
||||
message: `The server validation for rules failed`
|
||||
message: `The server validation for rules failed`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -841,7 +851,7 @@ export class InvalidMinForScale extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MIN_FOR_SCALE',
|
||||
meta: { value },
|
||||
message: `Invalid <min> parameter "${value}". A number or "auto" were expected`
|
||||
message: `Invalid <min> parameter "${value}". A number or "auto" were expected`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -854,7 +864,7 @@ export class InvalidArgsForMinMaxScale extends NowError<
|
||||
super({
|
||||
code: 'INVALID_ARGS_FOR_MIN_MAX_SCALE',
|
||||
meta: { min },
|
||||
message: `Invalid number of arguments: expected <min> ("${min}") and [max]`
|
||||
message: `Invalid number of arguments: expected <min> ("${min}") and [max]`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -867,7 +877,7 @@ export class InvalidMaxForScale extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MAX_FOR_SCALE',
|
||||
meta: { value },
|
||||
message: `Invalid <max> parameter "${value}". A number or "auto" were expected`
|
||||
message: `Invalid <max> parameter "${value}". A number or "auto" were expected`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -877,7 +887,7 @@ export class InvalidCert extends NowError<'INVALID_CERT', {}> {
|
||||
super({
|
||||
code: 'INVALID_CERT',
|
||||
meta: {},
|
||||
message: `The provided custom certificate is invalid and couldn't be added`
|
||||
message: `The provided custom certificate is invalid and couldn't be added`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -890,7 +900,7 @@ export class DNSPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'DNS_PERMISSION_DENIED',
|
||||
meta: { domain },
|
||||
message: `You don't have access to the DNS records of ${domain}.`
|
||||
message: `You don't have access to the DNS records of ${domain}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -900,7 +910,7 @@ export class DNSInvalidPort extends NowError<'DNS_INVALID_PORT', {}> {
|
||||
super({
|
||||
code: 'DNS_INVALID_PORT',
|
||||
meta: {},
|
||||
message: `Invalid <port> parameter. A number was expected`
|
||||
message: `Invalid <port> parameter. A number was expected`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -913,7 +923,7 @@ export class DNSInvalidType extends NowError<
|
||||
super({
|
||||
code: 'DNS_INVALID_TYPE',
|
||||
meta: { type },
|
||||
message: `Invalid <type> parameter "${type}". Expected one of A, AAAA, ALIAS, CAA, CNAME, MX, SRV, TXT`
|
||||
message: `Invalid <type> parameter "${type}". Expected one of A, AAAA, ALIAS, CAA, CNAME, MX, SRV, TXT`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -926,7 +936,7 @@ export class DNSConflictingRecord extends NowError<
|
||||
super({
|
||||
code: 'DNS_CONFLICTING_RECORD',
|
||||
meta: { record },
|
||||
message: ` A conflicting record exists "${record}".`
|
||||
message: ` A conflicting record exists "${record}".`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -949,7 +959,7 @@ export class DomainRemovalConflict extends NowError<
|
||||
pendingAsyncPurchase,
|
||||
resolvable,
|
||||
suffix,
|
||||
transferring
|
||||
transferring,
|
||||
}: {
|
||||
aliases: string[];
|
||||
certs: string[];
|
||||
@@ -967,9 +977,9 @@ export class DomainRemovalConflict extends NowError<
|
||||
pendingAsyncPurchase,
|
||||
suffix,
|
||||
transferring,
|
||||
resolvable
|
||||
resolvable,
|
||||
},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -982,7 +992,7 @@ export class DomainMoveConflict extends NowError<
|
||||
message,
|
||||
pendingAsyncPurchase,
|
||||
resolvable,
|
||||
suffix
|
||||
suffix,
|
||||
}: {
|
||||
message: string;
|
||||
pendingAsyncPurchase: boolean;
|
||||
@@ -994,9 +1004,9 @@ export class DomainMoveConflict extends NowError<
|
||||
meta: {
|
||||
pendingAsyncPurchase,
|
||||
resolvable,
|
||||
suffix
|
||||
suffix,
|
||||
},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1006,7 +1016,7 @@ export class InvalidEmail extends NowError<'INVALID_EMAIL', { email: string }> {
|
||||
super({
|
||||
code: 'INVALID_EMAIL',
|
||||
message,
|
||||
meta: { email }
|
||||
meta: { email },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1022,7 +1032,7 @@ export class AccountNotFound extends NowError<
|
||||
super({
|
||||
code: 'ACCOUNT_NOT_FOUND',
|
||||
message,
|
||||
meta: { email }
|
||||
meta: { email },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1035,7 +1045,7 @@ export class InvalidMoveDestination extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MOVE_DESTINATION',
|
||||
message: `Invalid move destination "${destination}"`,
|
||||
meta: { destination }
|
||||
meta: { destination },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1048,7 +1058,7 @@ export class InvalidMoveToken extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MOVE_TOKEN',
|
||||
message: `Invalid move token "${token}"`,
|
||||
meta: { token }
|
||||
meta: { token },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1058,7 +1068,7 @@ export class NoBuilderCacheError extends NowError<'NO_BUILDER_CACHE', {}> {
|
||||
super({
|
||||
code: 'NO_BUILDER_CACHE',
|
||||
message: 'Could not find cache directory for now-builders.',
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1071,7 +1081,7 @@ export class BuilderCacheCleanError extends NowError<
|
||||
super({
|
||||
code: 'BUILDER_CACHE_CLEAN_FAILED',
|
||||
message: `Error cleaning builder cache: ${message}`,
|
||||
meta: { path }
|
||||
meta: { path },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1088,7 +1098,7 @@ export class LambdaSizeExceededError extends NowError<
|
||||
).toLowerCase()}) exceeds the maximum size limit (${bytes(
|
||||
maxLambdaSize
|
||||
).toLowerCase()}). Learn more: https://zeit.co/docs/v2/deployments/concepts/lambdas/#maximum-bundle-size`,
|
||||
meta: { size, maxLambdaSize }
|
||||
meta: { size, maxLambdaSize },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1107,7 +1117,7 @@ export class MissingDotenvVarsError extends NowError<
|
||||
} else {
|
||||
message = [
|
||||
`The following env vars are not defined in ${code(type)} file:`,
|
||||
...missing.map(name => ` - ${JSON.stringify(name)}`)
|
||||
...missing.map(name => ` - ${JSON.stringify(name)}`),
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
@@ -1116,7 +1126,7 @@ export class MissingDotenvVarsError extends NowError<
|
||||
super({
|
||||
code: 'MISSING_DOTENV_VARS',
|
||||
message,
|
||||
meta: { type, missing }
|
||||
meta: { type, missing },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1129,7 +1139,7 @@ export class DeploymentsRateLimited extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENTS_RATE_LIMITED',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1139,7 +1149,7 @@ export class BuildsRateLimited extends NowError<'BUILDS_RATE_LIMITED', {}> {
|
||||
super({
|
||||
code: 'BUILDS_RATE_LIMITED',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1149,7 +1159,7 @@ export class ProjectNotFound extends NowError<'PROJECT_NOT_FOUND', {}> {
|
||||
super({
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
meta: {},
|
||||
message: `There is no project for "${nameOrId}"`
|
||||
message: `There is no project for "${nameOrId}"`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1159,7 +1169,7 @@ export class AliasDomainConfigured extends NowError<'DOMAIN_CONFIGURED', {}> {
|
||||
super({
|
||||
code: 'DOMAIN_CONFIGURED',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1169,7 +1179,7 @@ export class MissingBuildScript extends NowError<'MISSING_BUILD_SCRIPT', {}> {
|
||||
super({
|
||||
code: 'MISSING_BUILD_SCRIPT',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1179,7 +1189,7 @@ export class ConflictingFilePath extends NowError<'CONFLICTING_FILE_PATH', {}> {
|
||||
super({
|
||||
code: 'CONFLICTING_FILE_PATH',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1192,7 +1202,23 @@ export class ConflictingPathSegment extends NowError<
|
||||
super({
|
||||
code: 'CONFLICTING_PATH_SEGMENT',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class BuildError extends NowError<'BUILD_ERROR', {}> {
|
||||
constructor({
|
||||
message,
|
||||
meta,
|
||||
}: {
|
||||
message: string;
|
||||
meta: { entrypoint: string };
|
||||
}) {
|
||||
super({
|
||||
code: 'BUILD_ERROR',
|
||||
meta,
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ export default async function getConfig(output: Output, configFile?: string) {
|
||||
if (config) {
|
||||
return config;
|
||||
}
|
||||
|
||||
// First try with the config supplied by the user via --local-config
|
||||
if (configFile) {
|
||||
const localFilePath = path.resolve(localPath, configFile);
|
||||
@@ -27,8 +26,7 @@ export default async function getConfig(output: Output, configFile?: string) {
|
||||
return localConfig;
|
||||
}
|
||||
if (localConfig !== null) {
|
||||
const castedConfig = localConfig;
|
||||
config = castedConfig;
|
||||
config = localConfig;
|
||||
return config;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,10 @@ export default function handleError(
|
||||
|
||||
if ((<APIError>error).status === 403) {
|
||||
console.error(
|
||||
errorOutput('Authentication error. Run `now login` to log-in again.')
|
||||
errorOutput(
|
||||
error.message ||
|
||||
'Authentication error. Run `now login` to log-in again.'
|
||||
)
|
||||
);
|
||||
} else if ((<APIError>error).status === 429) {
|
||||
// Rate limited: display the message from the server-side,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { homedir } from 'os';
|
||||
import { resolve as resolvePath, join, basename } from 'path';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import EventEmitter from 'events';
|
||||
import qs from 'querystring';
|
||||
import { parse as parseUrl } from 'url';
|
||||
@@ -7,24 +7,22 @@ import bytes from 'bytes';
|
||||
import chalk from 'chalk';
|
||||
import retry from 'async-retry';
|
||||
import { parse as parseIni } from 'ini';
|
||||
import { createReadStream } from 'fs';
|
||||
import fs from 'fs-extra';
|
||||
import ms from 'ms';
|
||||
import fetch from 'node-fetch';
|
||||
import { URLSearchParams } from 'url';
|
||||
import {
|
||||
staticFiles as getFiles,
|
||||
npm as getNpmFiles,
|
||||
docker as getDockerFiles
|
||||
docker as getDockerFiles,
|
||||
} from './get-files';
|
||||
import Agent from './agent.ts';
|
||||
import ua from './ua.ts';
|
||||
import hash from './hash';
|
||||
import processDeployment from './deploy/process-deployment.ts';
|
||||
import highlight from './output/highlight';
|
||||
import createOutput from './output';
|
||||
import { responseError } from './error';
|
||||
|
||||
// How many concurrent HTTP/2 stream uploads
|
||||
const MAX_CONCURRENT = 50;
|
||||
import stamp from './output/stamp';
|
||||
import { BuildError } from './errors-ts';
|
||||
|
||||
// Check if running windows
|
||||
const IS_WIN = process.platform.startsWith('win');
|
||||
@@ -39,14 +37,8 @@ export default class Now extends EventEmitter {
|
||||
this._forceNew = forceNew;
|
||||
this._output = createOutput({ debug });
|
||||
this._apiUrl = apiUrl;
|
||||
this._agent = new Agent(apiUrl, { debug });
|
||||
this._onRetry = this._onRetry.bind(this);
|
||||
this.currentTeam = currentTeam;
|
||||
const closeAgent = () => {
|
||||
this._agent.close();
|
||||
process.removeListener('nowExit', closeAgent);
|
||||
};
|
||||
process.on('nowExit', closeAgent);
|
||||
}
|
||||
|
||||
async create(
|
||||
@@ -61,7 +53,6 @@ export default class Now extends EventEmitter {
|
||||
nowConfig = {},
|
||||
hasNowJson = false,
|
||||
sessionAffinity = 'random',
|
||||
isFile = false,
|
||||
atlas = false,
|
||||
|
||||
// Latest
|
||||
@@ -73,361 +64,152 @@ export default class Now extends EventEmitter {
|
||||
quiet = false,
|
||||
env,
|
||||
build,
|
||||
followSymlinks = true,
|
||||
forceNew = false,
|
||||
target = null
|
||||
target = null,
|
||||
deployStamp,
|
||||
}
|
||||
) {
|
||||
const { log, warn, time } = this._output;
|
||||
const opts = { output: this._output, hasNowJson };
|
||||
const { log, warn, debug } = this._output;
|
||||
const isBuilds = type === null;
|
||||
|
||||
let files = [];
|
||||
let hashes = {};
|
||||
const relatives = {};
|
||||
let engines;
|
||||
let deployment;
|
||||
let requestBody = {};
|
||||
|
||||
await time('Getting files', async () => {
|
||||
const opts = { output: this._output, hasNowJson };
|
||||
if (isBuilds) {
|
||||
requestBody = {
|
||||
token: this._token,
|
||||
teamId: this.currentTeam,
|
||||
env,
|
||||
build,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
name,
|
||||
project,
|
||||
meta,
|
||||
regions,
|
||||
force: forceNew,
|
||||
};
|
||||
|
||||
if (type === 'npm') {
|
||||
files = await getNpmFiles(paths[0], pkg, nowConfig, opts);
|
||||
if (target) {
|
||||
requestBody.target = target;
|
||||
}
|
||||
} else if (type === 'npm') {
|
||||
files = await getNpmFiles(paths[0], pkg, nowConfig, opts);
|
||||
|
||||
// A `start` or `now-start` npm script, or a `server.js` file
|
||||
// in the root directory of the deployment are required
|
||||
if (
|
||||
!isBuilds &&
|
||||
!hasNpmStart(pkg) &&
|
||||
!hasFile(paths[0], files, 'server.js')
|
||||
) {
|
||||
const err = new Error(
|
||||
'Missing `start` (or `now-start`) script in `package.json`. ' +
|
||||
'See: https://docs.npmjs.com/cli/start'
|
||||
);
|
||||
throw err;
|
||||
// A `start` or `now-start` npm script, or a `server.js` file
|
||||
// in the root directory of the deployment are required
|
||||
if (
|
||||
!isBuilds &&
|
||||
!hasNpmStart(pkg) &&
|
||||
!hasFile(paths[0], files, 'server.js')
|
||||
) {
|
||||
const err = new Error(
|
||||
'Missing `start` (or `now-start`) script in `package.json`. ' +
|
||||
'See: https://docs.npmjs.com/cli/start'
|
||||
);
|
||||
throw err;
|
||||
}
|
||||
|
||||
engines = nowConfig.engines || pkg.engines;
|
||||
forwardNpm = forwardNpm || nowConfig.forwardNpm;
|
||||
} else if (type === 'static') {
|
||||
if (paths.length === 1) {
|
||||
files = await getFiles(paths[0], nowConfig, opts);
|
||||
} else {
|
||||
if (!files) {
|
||||
files = [];
|
||||
}
|
||||
|
||||
engines = nowConfig.engines || pkg.engines;
|
||||
forwardNpm = forwardNpm || nowConfig.forwardNpm;
|
||||
} else if (type === 'static') {
|
||||
if (isFile) {
|
||||
files = [resolvePath(paths[0])];
|
||||
} else if (paths.length === 1) {
|
||||
files = await getFiles(paths[0], nowConfig, opts);
|
||||
} else {
|
||||
if (!files) {
|
||||
files = [];
|
||||
}
|
||||
for (const path of paths) {
|
||||
const list = await getFiles(path, {}, opts);
|
||||
files = files.concat(list);
|
||||
|
||||
for (const path of paths) {
|
||||
const list = await getFiles(path, {}, opts);
|
||||
files = files.concat(list);
|
||||
|
||||
for (const file of list) {
|
||||
relatives[file] = path;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (type === 'docker') {
|
||||
files = await getDockerFiles(paths[0], nowConfig, opts);
|
||||
} else if (isBuilds) {
|
||||
opts.isBuilds = isBuilds;
|
||||
|
||||
if (isFile) {
|
||||
files = [resolvePath(paths[0])];
|
||||
} else if (paths.length === 1) {
|
||||
files = await getFiles(paths[0], {}, opts);
|
||||
} else {
|
||||
if (!files) {
|
||||
files = [];
|
||||
}
|
||||
|
||||
for (const path of paths) {
|
||||
const list = await getFiles(path, {}, opts);
|
||||
files = files.concat(list);
|
||||
|
||||
for (const file of list) {
|
||||
relatives[file] = path;
|
||||
}
|
||||
for (const file of list) {
|
||||
relatives[file] = path;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Read `registry.npmjs.org` authToken from .npmrc
|
||||
let authToken;
|
||||
|
||||
if (type === 'npm' && forwardNpm) {
|
||||
authToken =
|
||||
(await readAuthToken(paths[0])) || (await readAuthToken(homedir()));
|
||||
} else if (type === 'docker') {
|
||||
files = await getDockerFiles(paths[0], nowConfig, opts);
|
||||
}
|
||||
|
||||
const hashes = await time('Computing hashes', () => {
|
||||
const pkgDetails = Object.assign({ name }, pkg);
|
||||
return hash(files, pkgDetails);
|
||||
});
|
||||
const uploadStamp = stamp();
|
||||
|
||||
this._files = hashes;
|
||||
if (isBuilds) {
|
||||
deployment = await processDeployment({
|
||||
now: this,
|
||||
output: this._output,
|
||||
hashes,
|
||||
paths,
|
||||
requestBody,
|
||||
uploadStamp,
|
||||
deployStamp,
|
||||
quiet,
|
||||
nowConfig,
|
||||
});
|
||||
} else {
|
||||
// Read `registry.npmjs.org` authToken from .npmrc
|
||||
let authToken;
|
||||
|
||||
const deployment = await this.retry(async bail => {
|
||||
// Flatten the array to contain files to sync where each nested input
|
||||
// array has a group of files with the same sha but different path
|
||||
const files = await time(
|
||||
'Get files ready for deployment',
|
||||
Promise.all(
|
||||
Array.prototype.concat.apply(
|
||||
[],
|
||||
await Promise.all(
|
||||
Array.from(this._files).map(async ([sha, { data, names }]) => {
|
||||
const statFn = followSymlinks ? fs.stat : fs.lstat;
|
||||
|
||||
return names.map(async name => {
|
||||
const getMode = async () => {
|
||||
const st = await statFn(name);
|
||||
return st.mode;
|
||||
};
|
||||
|
||||
const mode = await getMode();
|
||||
const multipleStatic = Object.keys(relatives).length !== 0;
|
||||
|
||||
let file;
|
||||
|
||||
if (isFile) {
|
||||
file = basename(paths[0]);
|
||||
} else if (multipleStatic) {
|
||||
file = toRelative(name, join(relatives[name], '..'));
|
||||
} else {
|
||||
file = toRelative(name, paths[0]);
|
||||
}
|
||||
|
||||
return {
|
||||
sha,
|
||||
size: data.length,
|
||||
file,
|
||||
mode
|
||||
};
|
||||
});
|
||||
})
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// This is a useful warning because it prevents people
|
||||
// from getting confused about a deployment that renders 404.
|
||||
if (
|
||||
files.length === 0 ||
|
||||
files.every(item => item.file.startsWith('.'))
|
||||
) {
|
||||
warn(
|
||||
'There are no files (or only files starting with a dot) inside your deployment.'
|
||||
);
|
||||
if (type === 'npm' && forwardNpm) {
|
||||
authToken =
|
||||
(await readAuthToken(paths[0])) || (await readAuthToken(homedir()));
|
||||
}
|
||||
|
||||
const queryProps = {};
|
||||
const requestBody = isBuilds
|
||||
? {
|
||||
version: 2,
|
||||
env,
|
||||
build,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
name,
|
||||
project,
|
||||
files,
|
||||
meta,
|
||||
regions
|
||||
}
|
||||
: {
|
||||
env,
|
||||
build,
|
||||
meta,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
forceNew,
|
||||
name,
|
||||
project,
|
||||
description,
|
||||
deploymentType: type,
|
||||
registryAuthToken: authToken,
|
||||
files,
|
||||
engines,
|
||||
scale,
|
||||
sessionAffinity,
|
||||
limits: nowConfig.limits,
|
||||
atlas
|
||||
};
|
||||
requestBody = {
|
||||
token: this._token,
|
||||
teamId: this.currentTeam,
|
||||
env,
|
||||
build,
|
||||
meta,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
forceNew,
|
||||
name,
|
||||
project,
|
||||
description,
|
||||
deploymentType: type,
|
||||
registryAuthToken: authToken,
|
||||
engines,
|
||||
scale,
|
||||
sessionAffinity,
|
||||
limits: nowConfig.limits,
|
||||
atlas,
|
||||
config: nowConfig,
|
||||
};
|
||||
|
||||
if (Object.keys(nowConfig).length > 0) {
|
||||
if (isBuilds) {
|
||||
// These properties are only used inside Now CLI and
|
||||
// are not supported on the API.
|
||||
const exclude = ['github', 'scope'];
|
||||
|
||||
// Request properties that are made of a combination of
|
||||
// command flags and config properties were already set
|
||||
// earlier. Here, we are setting request properties that
|
||||
// are purely made of their equally-named config property.
|
||||
for (const key of Object.keys(nowConfig)) {
|
||||
const value = nowConfig[key];
|
||||
|
||||
if (!requestBody[key] && !exclude.includes(key)) {
|
||||
requestBody[key] = value;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
requestBody.config = nowConfig;
|
||||
}
|
||||
}
|
||||
|
||||
if (isBuilds) {
|
||||
if (forceNew) {
|
||||
queryProps.forceNew = 1;
|
||||
}
|
||||
|
||||
if (target) {
|
||||
requestBody.target = target;
|
||||
}
|
||||
|
||||
if (isFile) {
|
||||
requestBody.routes = [
|
||||
{
|
||||
src: '/',
|
||||
dest: `/${files[0].file}`
|
||||
}
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
const query = qs.stringify(queryProps);
|
||||
const version = isBuilds ? 'v9' : 'v4';
|
||||
|
||||
const res = await this._fetch(
|
||||
`/${version}/now/deployments${query ? `?${query}` : ''}`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: requestBody
|
||||
}
|
||||
);
|
||||
|
||||
// No retry on 4xx
|
||||
let body;
|
||||
|
||||
try {
|
||||
body = await res.json();
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Unexpected response error: ${err.message} (${
|
||||
res.status
|
||||
} status code)`
|
||||
);
|
||||
}
|
||||
|
||||
if (res.status === 429) {
|
||||
if (body.error && body.error.code === 'builds_rate_limited') {
|
||||
const err = new Error(body.error.message);
|
||||
err.status = res.status;
|
||||
err.retryAfter = 'never';
|
||||
err.code = body.error.code;
|
||||
|
||||
return bail(err);
|
||||
}
|
||||
|
||||
let msg = 'You have been creating deployments at a very fast pace. ';
|
||||
|
||||
if (body.error && body.error.limit && body.error.limit.reset) {
|
||||
const { reset } = body.error.limit;
|
||||
const difference = reset * 1000 - Date.now();
|
||||
|
||||
msg += `Please retry in ${ms(difference, { long: true })}.`;
|
||||
} else {
|
||||
msg += 'Please slow down.';
|
||||
}
|
||||
|
||||
const err = new Error(msg);
|
||||
|
||||
err.status = res.status;
|
||||
err.retryAfter = 'never';
|
||||
|
||||
return bail(err);
|
||||
}
|
||||
|
||||
// If the deployment domain is missing a cert, bail with the error
|
||||
if (
|
||||
res.status === 400 &&
|
||||
body.error &&
|
||||
body.error.code === 'cert_missing'
|
||||
) {
|
||||
bail(await responseError(res, null, body));
|
||||
}
|
||||
|
||||
if (
|
||||
res.status === 400 &&
|
||||
body.error &&
|
||||
body.error.code === 'missing_files'
|
||||
) {
|
||||
return body;
|
||||
}
|
||||
|
||||
if (res.status === 404 && body.error && body.error.code === 'not_found') {
|
||||
return body;
|
||||
}
|
||||
|
||||
if (res.status >= 400 && res.status < 500) {
|
||||
const err = new Error();
|
||||
|
||||
if (body.error) {
|
||||
const { code, unreferencedBuildSpecs } = body.error;
|
||||
|
||||
if (code === 'env_value_invalid_type') {
|
||||
const { key } = body.error;
|
||||
err.message =
|
||||
`The env key ${key} has an invalid type: ${typeof env[key]}. ` +
|
||||
'Please supply a String or a Number (https://err.sh/now/env-value-invalid-type)';
|
||||
} else if (code === 'unreferenced_build_specifications') {
|
||||
const count = unreferencedBuildSpecs.length;
|
||||
const prefix = count === 1 ? 'build' : 'builds';
|
||||
|
||||
err.message =
|
||||
`You defined ${count} ${prefix} that did not match any source files (please ensure they are NOT defined in ${highlight(
|
||||
'.nowignore'
|
||||
)}):` +
|
||||
`\n- ${unreferencedBuildSpecs
|
||||
.map(item => JSON.stringify(item))
|
||||
.join('\n- ')}`;
|
||||
} else {
|
||||
Object.assign(err, body.error);
|
||||
}
|
||||
} else {
|
||||
err.message = 'Not able to create deployment';
|
||||
}
|
||||
|
||||
return bail(err);
|
||||
}
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(body.error.message);
|
||||
}
|
||||
|
||||
for (const [name, value] of res.headers.entries()) {
|
||||
if (name.startsWith('x-now-warning-')) {
|
||||
this._output.warn(value);
|
||||
}
|
||||
}
|
||||
|
||||
return body;
|
||||
});
|
||||
deployment = await processDeployment({
|
||||
legacy: true,
|
||||
now: this,
|
||||
output: this._output,
|
||||
hashes,
|
||||
paths,
|
||||
requestBody,
|
||||
uploadStamp,
|
||||
deployStamp,
|
||||
quiet,
|
||||
env,
|
||||
nowConfig,
|
||||
});
|
||||
}
|
||||
|
||||
// We report about files whose sizes are too big
|
||||
let missingVersion = false;
|
||||
|
||||
if (deployment.warnings) {
|
||||
if (deployment && deployment.warnings) {
|
||||
let sizeExceeded = 0;
|
||||
|
||||
deployment.warnings.forEach(warning => {
|
||||
if (warning.reason === 'size_limit_exceeded') {
|
||||
const { sha, limit } = warning;
|
||||
const n = hashes.get(sha).names.pop();
|
||||
const n = hashes[sha].names.pop();
|
||||
|
||||
warn(`Skipping file ${n} (size exceeded ${bytes(limit)}`);
|
||||
|
||||
hashes.get(sha).names.unshift(n); // Move name (hack, if duplicate matches we report them in order)
|
||||
hashes[sha].names.unshift(n); // Move name (hack, if duplicate matches we report them in order)
|
||||
sizeExceeded++;
|
||||
} else if (warning.reason === 'node_version_not_found') {
|
||||
warn(`Requested node version ${warning.wanted} is not available`);
|
||||
@@ -445,19 +227,10 @@ export default class Now extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
if (deployment.error && deployment.error.code === 'missing_files') {
|
||||
this._missing = deployment.error.missing || [];
|
||||
this._fileCount = files.length;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!isBuilds && !quiet && type === 'npm' && deployment.nodeVersion) {
|
||||
if (engines && engines.node && !missingVersion) {
|
||||
log(
|
||||
chalk`Using Node.js {bold ${
|
||||
deployment.nodeVersion
|
||||
}} (requested: {dim \`${engines.node}\`})`
|
||||
chalk`Using Node.js {bold ${deployment.nodeVersion}} (requested: {dim \`${engines.node}\`})`
|
||||
);
|
||||
} else {
|
||||
log(chalk`Using Node.js {bold ${deployment.nodeVersion}} (default)`);
|
||||
@@ -472,81 +245,90 @@ export default class Now extends EventEmitter {
|
||||
return deployment;
|
||||
}
|
||||
|
||||
upload({ atlas = false, scale = {} } = {}) {
|
||||
const { debug, time } = this._output;
|
||||
debug(`Will upload ${this._missing.length} files`);
|
||||
async handleDeploymentError(error, { hashes, env }) {
|
||||
if (error.status === 429) {
|
||||
if (error.code === 'builds_rate_limited') {
|
||||
const err = new Error(error.message);
|
||||
err.status = error.status;
|
||||
err.retryAfter = 'never';
|
||||
err.code = error.code;
|
||||
|
||||
this._agent.setConcurrency({
|
||||
maxStreams: MAX_CONCURRENT,
|
||||
capacity: this._missing.length
|
||||
});
|
||||
return err;
|
||||
}
|
||||
|
||||
time(
|
||||
'Uploading files',
|
||||
Promise.all(
|
||||
this._missing.map(sha =>
|
||||
retry(
|
||||
async bail => {
|
||||
const file = this._files.get(sha);
|
||||
const fPath = file.names[0];
|
||||
const stream = createReadStream(fPath);
|
||||
const { data } = file;
|
||||
let msg = 'You have been creating deployments at a very fast pace. ';
|
||||
|
||||
const fstreamPush = stream.push;
|
||||
if (error.limit && error.limit.reset) {
|
||||
const { reset } = error.limit;
|
||||
const difference = reset * 1000 - Date.now();
|
||||
|
||||
let uploadedSoFar = 0;
|
||||
stream.push = chunk => {
|
||||
// If we're about to push the last chunk, then don't do it here
|
||||
// But instead, we'll "hang" the progress bar and do it on 200
|
||||
if (chunk && uploadedSoFar + chunk.length < data.length) {
|
||||
this.emit('uploadProgress', chunk.length);
|
||||
uploadedSoFar += chunk.length;
|
||||
}
|
||||
return fstreamPush.call(stream, chunk);
|
||||
};
|
||||
msg += `Please retry in ${ms(difference, { long: true })}.`;
|
||||
} else {
|
||||
msg += 'Please slow down.';
|
||||
}
|
||||
|
||||
const url = atlas ? '/v1/now/images' : '/v2/now/files';
|
||||
const additionalHeaders = atlas
|
||||
? {
|
||||
'x-now-dcs': Object.keys(scale).join(',')
|
||||
}
|
||||
: {};
|
||||
const res = await this._fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'x-now-digest': sha,
|
||||
'x-now-size': data.length,
|
||||
...additionalHeaders
|
||||
},
|
||||
body: stream
|
||||
});
|
||||
const err = new Error(msg);
|
||||
|
||||
if (res.status === 200) {
|
||||
// What we want
|
||||
this.emit('uploadProgress', file.data.length - uploadedSoFar);
|
||||
this.emit('upload', file);
|
||||
} else if (res.status > 200 && res.status < 500) {
|
||||
// If something is wrong with our request, we don't retry
|
||||
return bail(await responseError(res, `Failed to upload file with status: ${res.status}`));
|
||||
} else {
|
||||
// If something is wrong with the server, we retry
|
||||
throw await responseError(res, 'Failed to upload file');
|
||||
}
|
||||
},
|
||||
{
|
||||
retries: 3,
|
||||
randomize: true,
|
||||
onRetry: this._onRetry
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.then(() => {
|
||||
this.emit('complete');
|
||||
})
|
||||
.catch(err => this.emit('error', err));
|
||||
err.status = error.status;
|
||||
err.retryAfter = 'never';
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
// If the deployment domain is missing a cert, bail with the error
|
||||
if (error.status === 400 && error.code === 'cert_missing') {
|
||||
return responseError(error, null, error);
|
||||
}
|
||||
|
||||
if (error.status === 400 && error.code === 'missing_files') {
|
||||
this._missing = error.missing || [];
|
||||
this._fileCount = hashes.length;
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
if (error.status === 404 && error.code === 'not_found') {
|
||||
return error;
|
||||
}
|
||||
|
||||
if (error.status >= 400 && error.status < 500) {
|
||||
const err = new Error();
|
||||
|
||||
const { code, unreferencedBuildSpecs } = error;
|
||||
|
||||
if (code === 'env_value_invalid_type') {
|
||||
const { key } = error;
|
||||
err.message =
|
||||
`The env key ${key} has an invalid type: ${typeof env[key]}. ` +
|
||||
'Please supply a String or a Number (https://err.sh/now-cli/env-value-invalid-type)';
|
||||
} else if (code === 'unreferenced_build_specifications') {
|
||||
const count = unreferencedBuildSpecs.length;
|
||||
const prefix = count === 1 ? 'build' : 'builds';
|
||||
|
||||
err.message =
|
||||
`You defined ${count} ${prefix} that did not match any source files (please ensure they are NOT defined in ${highlight(
|
||||
'.nowignore'
|
||||
)}):` +
|
||||
`\n- ${unreferencedBuildSpecs
|
||||
.map(item => JSON.stringify(item))
|
||||
.join('\n- ')}`;
|
||||
} else {
|
||||
Object.assign(err, error);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
// Handle build errors
|
||||
if (error.id && error.id.startsWith('bld_')) {
|
||||
return new BuildError({
|
||||
meta: {
|
||||
entrypoint: error.entrypoint,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return new Error(error.message);
|
||||
}
|
||||
|
||||
async listSecrets() {
|
||||
@@ -589,7 +371,7 @@ export default class Now extends EventEmitter {
|
||||
{
|
||||
retries: 3,
|
||||
minTimeout: 2500,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
}
|
||||
);
|
||||
};
|
||||
@@ -597,7 +379,7 @@ export default class Now extends EventEmitter {
|
||||
if (!app && !Object.keys(meta).length) {
|
||||
// Get the 35 latest projects and their latest deployment
|
||||
const query = new URLSearchParams({ limit: 35 });
|
||||
const projects = await fetchRetry(`/projects/list?${query}`);
|
||||
const projects = await fetchRetry(`/v2/projects/?${query}`);
|
||||
|
||||
const deployments = await Promise.all(
|
||||
projects.map(async ({ id: projectId }) => {
|
||||
@@ -647,7 +429,7 @@ export default class Now extends EventEmitter {
|
||||
{
|
||||
retries: 3,
|
||||
minTimeout: 2500,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -727,7 +509,7 @@ export default class Now extends EventEmitter {
|
||||
|
||||
await this.retry(async bail => {
|
||||
const res = await this._fetch(url, {
|
||||
method: 'DELETE'
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
if (res.status === 200) {
|
||||
@@ -748,7 +530,7 @@ export default class Now extends EventEmitter {
|
||||
return retry(fn, {
|
||||
retries,
|
||||
maxTimeout,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -756,9 +538,7 @@ export default class Now extends EventEmitter {
|
||||
this._output.debug(`Retrying: ${err}\n${err.stack}`);
|
||||
}
|
||||
|
||||
close() {
|
||||
this._agent.close();
|
||||
}
|
||||
close() {}
|
||||
|
||||
get id() {
|
||||
return this._id;
|
||||
@@ -802,14 +582,21 @@ export default class Now extends EventEmitter {
|
||||
|
||||
opts.headers = opts.headers || {};
|
||||
opts.headers.accept = 'application/json';
|
||||
opts.headers.authorization = `Bearer ${this._token}`;
|
||||
opts.headers.Authorization = `Bearer ${this._token}`;
|
||||
opts.headers['user-agent'] = ua;
|
||||
|
||||
if (
|
||||
opts.body &&
|
||||
typeof opts.body === 'object' &&
|
||||
opts.body.constructor === Object
|
||||
) {
|
||||
opts.body = JSON.stringify(opts.body);
|
||||
opts.headers['Content-Type'] = 'application/json';
|
||||
}
|
||||
|
||||
return this._output.time(
|
||||
`${opts.method || 'GET'} ${this._apiUrl}${_url} ${JSON.stringify(
|
||||
opts.body
|
||||
) || ''}`,
|
||||
this._agent.fetch(_url, opts)
|
||||
`${opts.method || 'GET'} ${this._apiUrl}${_url} ${opts.body || ''}`,
|
||||
fetch(`${this._apiUrl}${_url}`, opts)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -827,8 +614,8 @@ export default class Now extends EventEmitter {
|
||||
opts = Object.assign({}, opts, {
|
||||
body: JSON.stringify(opts.body),
|
||||
headers: Object.assign({}, opts.headers, {
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
});
|
||||
}
|
||||
const res = await this._fetch(url, opts);
|
||||
@@ -875,6 +662,7 @@ function hasNpmStart(pkg) {
|
||||
|
||||
function hasFile(base, files, name) {
|
||||
const relative = files.map(file => toRelative(file, base));
|
||||
console.log(731, relative);
|
||||
return relative.indexOf(name) !== -1;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { join } from 'path';
|
||||
import { exists } from 'fs-extra';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
|
||||
import Client from './client';
|
||||
import { Config } from '../types';
|
||||
import { Package } from './dev/types';
|
||||
import { CantParseJSONFile, ProjectNotFound } from './errors-ts';
|
||||
import getProjectByIdOrName from './projects/get-project-by-id-or-name';
|
||||
|
||||
@@ -26,14 +27,14 @@ export default async function preferV2Deployment({
|
||||
hasServerfile,
|
||||
pkg,
|
||||
localConfig,
|
||||
projectName
|
||||
projectName,
|
||||
}: {
|
||||
client?: Client,
|
||||
hasDockerfile: boolean,
|
||||
hasServerfile: boolean,
|
||||
pkg: Package | CantParseJSONFile | null,
|
||||
localConfig: Config | undefined,
|
||||
projectName?: string
|
||||
client?: Client;
|
||||
hasDockerfile: boolean;
|
||||
hasServerfile: boolean;
|
||||
pkg: PackageJson | CantParseJSONFile | null;
|
||||
localConfig: Config | undefined;
|
||||
projectName?: string;
|
||||
}): Promise<null | string> {
|
||||
if (localConfig && localConfig.version) {
|
||||
// We will prefer anything that is set here
|
||||
@@ -52,10 +53,14 @@ export default async function preferV2Deployment({
|
||||
const { scripts = {} } = pkg;
|
||||
|
||||
if (!scripts.start && !scripts['now-start']) {
|
||||
return `Deploying to Now 2.0, because ${highlight('package.json')} is missing a ${cmd('start')} script. ${INFO}`;
|
||||
return `Deploying to Now 2.0, because ${highlight(
|
||||
'package.json'
|
||||
)} is missing a ${cmd('start')} script. ${INFO}`;
|
||||
}
|
||||
} else if (!pkg && !hasDockerfile) {
|
||||
return `Deploying to Now 2.0, because no ${highlight('Dockerfile')} was found. ${INFO}`;
|
||||
return `Deploying to Now 2.0, because no ${highlight(
|
||||
'Dockerfile'
|
||||
)} was found. ${INFO}`;
|
||||
}
|
||||
|
||||
if (client && projectName) {
|
||||
|
||||
@@ -2,10 +2,10 @@ import path from 'path';
|
||||
import { CantParseJSONFile } from './errors-ts';
|
||||
import readJSONFile from './read-json-file';
|
||||
import { Config } from '../types';
|
||||
import { Package } from './dev/types';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
|
||||
interface CustomPackage extends Package {
|
||||
now?: Config
|
||||
interface CustomPackage extends PackageJson {
|
||||
now?: Config;
|
||||
}
|
||||
|
||||
export default async function readPackage(file?: string) {
|
||||
@@ -16,8 +16,8 @@ export default async function readPackage(file?: string) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (result){
|
||||
return result as CustomPackage
|
||||
if (result) {
|
||||
return result as CustomPackage;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -22,7 +22,7 @@ export default async (sentry, error, apiUrl, configFiles) => {
|
||||
if (user) {
|
||||
const spec = {
|
||||
email: user.email,
|
||||
id: user.uid
|
||||
id: user.uid,
|
||||
};
|
||||
|
||||
if (user.username) {
|
||||
@@ -44,7 +44,7 @@ export default async (sentry, error, apiUrl, configFiles) => {
|
||||
scope.setExtra('scopeError', {
|
||||
name: scopeError.name,
|
||||
message: scopeError.message,
|
||||
stack: scopeError.stack
|
||||
stack: scopeError.stack,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -81,7 +81,8 @@ export default async (sentry, error, apiUrl, configFiles) => {
|
||||
// Report information about the version of `node` being used
|
||||
scope.setExtra('node', {
|
||||
execPath: process.execPath,
|
||||
version: process.version
|
||||
version: process.version,
|
||||
platform: process.platform,
|
||||
});
|
||||
|
||||
sentry.captureException(error);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Response } from 'fetch-h2';
|
||||
import { Response } from 'node-fetch';
|
||||
import { APIError } from './errors-ts';
|
||||
|
||||
export default async function responseError(
|
||||
|
||||
14
packages/now-cli/test/dev-server.unit.js
vendored
14
packages/now-cli/test/dev-server.unit.js
vendored
@@ -31,7 +31,7 @@ function testFixture(name, fn) {
|
||||
readyResolve = resolve;
|
||||
});
|
||||
|
||||
const debug = false;
|
||||
const debug = true;
|
||||
const output = createOutput({ debug });
|
||||
const origReady = output.ready;
|
||||
|
||||
@@ -329,8 +329,8 @@ test(
|
||||
// HTML response
|
||||
const res = await fetch(`${server.address}/does-not-exist`, {
|
||||
headers: {
|
||||
Accept: 'text/html'
|
||||
}
|
||||
Accept: 'text/html',
|
||||
},
|
||||
});
|
||||
t.is(res.status, 404);
|
||||
t.is(res.headers.get('content-type'), 'text/html; charset=utf-8');
|
||||
@@ -342,8 +342,8 @@ test(
|
||||
// JSON response
|
||||
const res = await fetch(`${server.address}/does-not-exist`, {
|
||||
headers: {
|
||||
Accept: 'application/json'
|
||||
}
|
||||
Accept: 'application/json',
|
||||
},
|
||||
});
|
||||
t.is(res.status, 404);
|
||||
t.is(res.headers.get('content-type'), 'application/json');
|
||||
@@ -401,10 +401,10 @@ test('[DevServer] parseListen()', t => {
|
||||
t.deepEqual(parseListen('127.0.0.1:3005'), [3005, '127.0.0.1']);
|
||||
t.deepEqual(parseListen('tcp://127.0.0.1:5000'), [5000, '127.0.0.1']);
|
||||
t.deepEqual(parseListen('unix:/home/user/server.sock'), [
|
||||
'/home/user/server.sock'
|
||||
'/home/user/server.sock',
|
||||
]);
|
||||
t.deepEqual(parseListen('pipe:\\\\.\\pipe\\PipeName'), [
|
||||
'\\\\.\\pipe\\PipeName'
|
||||
'\\\\.\\pipe\\PipeName',
|
||||
]);
|
||||
|
||||
let err;
|
||||
|
||||
@@ -14,37 +14,37 @@
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/animations": "~8.1.0",
|
||||
"@angular/common": "~8.1.0",
|
||||
"@angular/compiler": "~8.1.0",
|
||||
"@angular/core": "~8.1.0",
|
||||
"@angular/forms": "~8.1.0",
|
||||
"@angular/platform-browser": "~8.1.0",
|
||||
"@angular/platform-browser-dynamic": "~8.1.0",
|
||||
"@angular/router": "~8.1.0",
|
||||
"rxjs": "~6.4.0",
|
||||
"tslib": "^1.9.0",
|
||||
"zone.js": "~0.9.1"
|
||||
"@angular/animations": "8.1.0",
|
||||
"@angular/common": "8.1.0",
|
||||
"@angular/compiler": "8.1.0",
|
||||
"@angular/core": "8.1.0",
|
||||
"@angular/forms": "8.1.0",
|
||||
"@angular/platform-browser": "8.1.0",
|
||||
"@angular/platform-browser-dynamic": "8.1.0",
|
||||
"@angular/router": "8.1.0",
|
||||
"rxjs": "6.4.0",
|
||||
"tslib": "1.9.0",
|
||||
"zone.js": "0.9.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "~0.801.0",
|
||||
"@angular/cli": "~8.1.0",
|
||||
"@angular/compiler-cli": "~8.1.0",
|
||||
"@angular/language-service": "~8.1.0",
|
||||
"@types/node": "~8.9.4",
|
||||
"@types/jasmine": "~3.3.8",
|
||||
"@types/jasminewd2": "~2.0.3",
|
||||
"codelyzer": "^5.0.0",
|
||||
"jasmine-core": "~3.4.0",
|
||||
"jasmine-spec-reporter": "~4.2.1",
|
||||
"karma": "~4.1.0",
|
||||
"karma-chrome-launcher": "~2.2.0",
|
||||
"karma-coverage-istanbul-reporter": "~2.0.1",
|
||||
"karma-jasmine": "~2.0.1",
|
||||
"karma-jasmine-html-reporter": "^1.4.0",
|
||||
"protractor": "~5.4.0",
|
||||
"ts-node": "~7.0.0",
|
||||
"tslint": "~5.15.0",
|
||||
"typescript": "~3.4.3"
|
||||
"@angular-devkit/build-angular": "0.801.0",
|
||||
"@angular/cli": "8.1.0",
|
||||
"@angular/compiler-cli": "8.1.0",
|
||||
"@angular/language-service": "8.1.0",
|
||||
"@types/node": "8.9.4",
|
||||
"@types/jasmine": "3.3.8",
|
||||
"@types/jasminewd2": "2.0.3",
|
||||
"codelyzer": "5.0.0",
|
||||
"jasmine-core": "3.4.0",
|
||||
"jasmine-spec-reporter": "4.2.1",
|
||||
"karma": "4.1.0",
|
||||
"karma-chrome-launcher": "2.2.0",
|
||||
"karma-coverage-istanbul-reporter": "2.0.1",
|
||||
"karma-jasmine": "2.0.1",
|
||||
"karma-jasmine-html-reporter": "1.4.0",
|
||||
"protractor": "5.4.0",
|
||||
"ts-node": "7.0.0",
|
||||
"tslint": "5.15.0",
|
||||
"typescript": "3.4.3"
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
2
packages/now-cli/test/dev/fixtures/25-nextjs-src-dir/.gitignore
vendored
Normal file
2
packages/now-cli/test/dev/fixtures/25-nextjs-src-dir/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules
|
||||
.next
|
||||
@@ -0,0 +1,2 @@
|
||||
README.md
|
||||
yarn.lock
|
||||
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"name": "nextjs",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"dev": "next",
|
||||
"build": "next build"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "^9.1.1",
|
||||
"react": "^16.7.0",
|
||||
"react-dom": "^16.7.0"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,102 @@
|
||||
import { useEffect, useState } from 'react';
|
||||
import Head from 'next/head';
|
||||
|
||||
function Index() {
|
||||
const [date, setDate] = useState(null);
|
||||
useEffect(() => {
|
||||
async function getDate() {
|
||||
const res = await fetch('/api/date');
|
||||
const newDate = await res.text();
|
||||
setDate(newDate);
|
||||
}
|
||||
getDate();
|
||||
}, []);
|
||||
return (
|
||||
<main>
|
||||
<Head>
|
||||
<title>Next.js + Node API</title>
|
||||
</Head>
|
||||
<h1>Next.js + Node.js API</h1>
|
||||
<h2>
|
||||
Deployed with{' '}
|
||||
<a
|
||||
href="https://zeit.co/docs"
|
||||
target="_blank"
|
||||
rel="noreferrer noopener"
|
||||
>
|
||||
ZEIT Now
|
||||
</a>
|
||||
!
|
||||
</h2>
|
||||
<p>
|
||||
<a
|
||||
href="https://github.com/zeit/now-examples/blob/master/nextjs-node"
|
||||
target="_blank"
|
||||
rel="noreferrer noopener"
|
||||
>
|
||||
This project
|
||||
</a>{' '}
|
||||
is a <a href="https://nextjs.org/">Next.js</a> app with two directories,{' '}
|
||||
<code>/pages</code> for static content and <code>/api</code> which
|
||||
contains a serverless <a href="https://nodejs.org/en/">Node.js</a>{' '}
|
||||
function. See{' '}
|
||||
<a href="/api/date">
|
||||
<code>api/date</code> for the Date API with Node.js
|
||||
</a>
|
||||
.
|
||||
</p>
|
||||
<br />
|
||||
<h2>The date according to Node.js is:</h2>
|
||||
<p>{date ? date : 'Loading date...'}</p>
|
||||
<style jsx>{`
|
||||
main {
|
||||
align-content: center;
|
||||
box-sizing: border-box;
|
||||
display: grid;
|
||||
font-family: 'SF Pro Text', 'SF Pro Icons', 'Helvetica Neue',
|
||||
'Helvetica', 'Arial', sans-serif;
|
||||
hyphens: auto;
|
||||
line-height: 1.65;
|
||||
margin: 0 auto;
|
||||
max-width: 680px;
|
||||
min-height: 100vh;
|
||||
padding: 72px 0;
|
||||
text-align: center;
|
||||
}
|
||||
h1 {
|
||||
font-size: 45px;
|
||||
}
|
||||
h2 {
|
||||
margin-top: 1.5em;
|
||||
}
|
||||
p {
|
||||
font-size: 16px;
|
||||
}
|
||||
a {
|
||||
border-bottom: 1px solid white;
|
||||
color: #0076ff;
|
||||
cursor: pointer;
|
||||
text-decoration: none;
|
||||
transition: all 0.2s ease;
|
||||
}
|
||||
a:hover {
|
||||
border-bottom: 1px solid #0076ff;
|
||||
}
|
||||
code,
|
||||
pre {
|
||||
color: #d400ff;
|
||||
font-family: Menlo, Monaco, Lucida Console, Liberation Mono,
|
||||
DejaVu Sans Mono, Bitstream Vera Sans Mono, Courier New, monospace,
|
||||
serif;
|
||||
font-size: 0.92em;
|
||||
}
|
||||
code:before,
|
||||
code:after {
|
||||
content: '\`';
|
||||
}
|
||||
`}</style>
|
||||
</main>
|
||||
);
|
||||
}
|
||||
|
||||
export default Index;
|
||||
5467
packages/now-cli/test/dev/fixtures/25-nextjs-src-dir/yarn.lock
Normal file
5467
packages/now-cli/test/dev/fixtures/25-nextjs-src-dir/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1 @@
|
||||
Invalid env var names test
|
||||
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": 2,
|
||||
"env": {
|
||||
"1": ""
|
||||
},
|
||||
"build": {
|
||||
"env": {
|
||||
"_a": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
1
packages/now-cli/test/dev/fixtures/output-is-source/.gitignore
vendored
Normal file
1
packages/now-cli/test/dev/fixtures/output-is-source/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
now.json
|
||||
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"scripts": {
|
||||
"build": "rm -Rf public && mkdir -p public && echo 'hello first' > public/index.html"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ function validateResponseHeaders(t, res) {
|
||||
|
||||
async function exec(directory, args = []) {
|
||||
return execa(binaryPath, ['dev', directory, ...args], {
|
||||
reject: false
|
||||
reject: false,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -63,6 +63,21 @@ function formatOutput({ stderr, stdout }) {
|
||||
return `Received:\n"${stderr}"\n"${stdout}"`;
|
||||
}
|
||||
|
||||
async function getPackedBuilderPath(builderDirName) {
|
||||
const packagePath = path.join(__dirname, '..', '..', '..', builderDirName);
|
||||
const output = await execa('npm', ['pack'], {
|
||||
cwd: packagePath,
|
||||
});
|
||||
|
||||
if (output.code !== 0 || output.stdout.trim() === '') {
|
||||
throw new Error(
|
||||
`Failed to pack ${builderDirName}: ${formatOutput(output)}`
|
||||
);
|
||||
}
|
||||
|
||||
return path.join(packagePath, output.stdout.trim());
|
||||
}
|
||||
|
||||
async function testFixture(directory, opts = {}, args = []) {
|
||||
await runNpmInstall(directory);
|
||||
|
||||
@@ -72,9 +87,9 @@ async function testFixture(directory, opts = {}, args = []) {
|
||||
reject: false,
|
||||
detached: true,
|
||||
stdio: 'ignore',
|
||||
...opts
|
||||
...opts,
|
||||
}),
|
||||
port
|
||||
port,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -140,14 +155,43 @@ test('[now dev] validate routes', async t => {
|
||||
);
|
||||
});
|
||||
|
||||
test('[now dev] 00-list-directory', async t => {
|
||||
const directory = fixture('00-list-directory');
|
||||
const { dev, port } = await testFixture(directory);
|
||||
test('[now dev] validate env var names', async t => {
|
||||
const directory = fixture('invalid-env-var-name');
|
||||
const { dev } = await testFixture(directory, { stdio: 'pipe' });
|
||||
|
||||
try {
|
||||
// start `now dev` detached in child_process
|
||||
dev.unref();
|
||||
|
||||
let stderr = '';
|
||||
dev.stderr.setEncoding('utf8');
|
||||
|
||||
await new Promise(resolve => {
|
||||
dev.stderr.on('data', b => {
|
||||
stderr += b;
|
||||
if (
|
||||
stderr.includes('Ignoring env var "1" because name is invalid') &&
|
||||
stderr.includes(
|
||||
'Ignoring build env var "_a" because name is invalid'
|
||||
) &&
|
||||
stderr.includes(
|
||||
'Env var names must start with letters, and can only contain alphanumeric characters and underscores'
|
||||
)
|
||||
) {
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
t.pass();
|
||||
} finally {
|
||||
dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test(
|
||||
'[now dev] 00-list-directory',
|
||||
testFixtureStdio('00-list-directory', async (t, port) => {
|
||||
const result = await fetchWithRetry(`http://localhost:${port}`, 60);
|
||||
const response = await result;
|
||||
|
||||
@@ -157,10 +201,8 @@ test('[now dev] 00-list-directory', async t => {
|
||||
t.regex(body, /Files within/gm);
|
||||
t.regex(body, /test1.txt/gm);
|
||||
t.regex(body, /directory/gm);
|
||||
} finally {
|
||||
dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test('[now dev] 01-node', async t => {
|
||||
const directory = fixture('01-node');
|
||||
@@ -187,7 +229,7 @@ if (satisfies(process.version, '10.x')) {
|
||||
test('[now dev] 02-angular-node', async t => {
|
||||
const directory = fixture('02-angular-node');
|
||||
const { dev, port } = await testFixture(directory, { stdio: 'pipe' }, [
|
||||
'--debug'
|
||||
'--debug',
|
||||
]);
|
||||
|
||||
let stderr = '';
|
||||
@@ -273,14 +315,9 @@ test(
|
||||
})
|
||||
);
|
||||
|
||||
test('[now dev] 07-hexo-node', async t => {
|
||||
const directory = fixture('07-hexo-node');
|
||||
const { dev, port } = await testFixture(directory);
|
||||
|
||||
try {
|
||||
// start `now dev` detached in child_process
|
||||
dev.unref();
|
||||
|
||||
test(
|
||||
'[now dev] 07-hexo-node',
|
||||
testFixtureStdio('07-hexo-node', async (t, port) => {
|
||||
const result = await fetchWithRetry(`http://localhost:${port}`, 180);
|
||||
const response = await result;
|
||||
|
||||
@@ -288,10 +325,8 @@ test('[now dev] 07-hexo-node', async t => {
|
||||
|
||||
const body = await response.text();
|
||||
t.regex(body, /Hexo \+ Node.js API/gm);
|
||||
} finally {
|
||||
dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[now dev] 08-hugo',
|
||||
@@ -486,7 +521,7 @@ test('[now dev] double slashes redirect', async t => {
|
||||
|
||||
{
|
||||
const res = await fetch(`http://localhost:${port}////?foo=bar`, {
|
||||
redirect: 'manual'
|
||||
redirect: 'manual',
|
||||
});
|
||||
|
||||
validateResponseHeaders(t, res);
|
||||
@@ -500,7 +535,7 @@ test('[now dev] double slashes redirect', async t => {
|
||||
{
|
||||
const res = await fetch(`http://localhost:${port}///api////date.js`, {
|
||||
method: 'POST',
|
||||
redirect: 'manual'
|
||||
redirect: 'manual',
|
||||
});
|
||||
|
||||
validateResponseHeaders(t, res);
|
||||
@@ -667,7 +702,7 @@ test('[now dev] add a `package.json` to trigger `@now/static-build`', async t =>
|
||||
|
||||
const rnd = Math.random().toString();
|
||||
const pkg = {
|
||||
scripts: { build: `mkdir -p public && echo ${rnd} > public/index.txt` }
|
||||
scripts: { build: `mkdir -p public && echo ${rnd} > public/index.txt` },
|
||||
};
|
||||
await fs.writeFile(
|
||||
path.join(directory, 'package.json'),
|
||||
@@ -691,7 +726,7 @@ test('[now dev] add a `package.json` to trigger `@now/static-build`', async t =>
|
||||
test('[now dev] no build matches warning', async t => {
|
||||
const directory = fixture('no-build-matches');
|
||||
const { dev } = await testFixture(directory, {
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
try {
|
||||
@@ -746,7 +781,7 @@ if (satisfies(process.version, '^8.10.0 || ^10.13.0 || >=11.10.1')) {
|
||||
test('[now dev] render warning for empty cwd dir', async t => {
|
||||
const directory = fixture('empty');
|
||||
const { dev, port } = await testFixture(directory, {
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
try {
|
||||
@@ -775,3 +810,82 @@ test('[now dev] render warning for empty cwd dir', async t => {
|
||||
dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[now dev] do not rebuild for changes in the output directory', async t => {
|
||||
const directory = fixture('output-is-source');
|
||||
|
||||
// Pack the builder and set it in the now.json
|
||||
const builder = await getPackedBuilderPath('now-static-build');
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(directory, 'now.json'),
|
||||
JSON.stringify({
|
||||
builds: [
|
||||
{
|
||||
src: 'package.json',
|
||||
use: `file://${builder}`,
|
||||
config: { zeroConfig: true },
|
||||
},
|
||||
],
|
||||
})
|
||||
);
|
||||
|
||||
const { dev, port } = await testFixture(directory, {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
try {
|
||||
dev.unref();
|
||||
|
||||
let stderr = [];
|
||||
const start = Date.now();
|
||||
|
||||
dev.stderr.on('data', str => stderr.push(str));
|
||||
|
||||
while (stderr.join('').includes('Ready') === false) {
|
||||
await sleep(ms('3s'));
|
||||
|
||||
if (Date.now() - start > ms('30s')) {
|
||||
console.log('stderr:', stderr.join(''));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
const resp1 = await fetch(`http://localhost:${port}`);
|
||||
const text1 = await resp1.text();
|
||||
t.is(text1.trim(), 'hello first', stderr.join(''));
|
||||
|
||||
await fs.writeFile(
|
||||
path.join(directory, 'public', 'index.html'),
|
||||
'hello second'
|
||||
);
|
||||
|
||||
await sleep(ms('3s'));
|
||||
|
||||
const resp2 = await fetch(`http://localhost:${port}`);
|
||||
const text2 = await resp2.text();
|
||||
t.is(text2.trim(), 'hello second', stderr.join(''));
|
||||
} finally {
|
||||
dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[now dev] 25-nextjs-src-dir', async t => {
|
||||
const directory = fixture('25-nextjs-src-dir');
|
||||
const { dev, port } = await testFixture(directory);
|
||||
|
||||
try {
|
||||
// start `now dev` detached in child_process
|
||||
dev.unref();
|
||||
|
||||
const result = await fetchWithRetry(`http://localhost:${port}`, 80);
|
||||
const response = await result;
|
||||
|
||||
validateResponseHeaders(t, response);
|
||||
|
||||
const body = await response.text();
|
||||
t.regex(body, /Next.js \+ Node.js API/gm);
|
||||
} finally {
|
||||
dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
{
|
||||
"version": 2,
|
||||
"name": "now-dev-next",
|
||||
"builds": [
|
||||
{ "src": "package.json", "use": "@now/next" }
|
||||
],
|
||||
"builds": [{ "src": "package.json", "use": "@now/next@canary" }],
|
||||
"routes": [
|
||||
{
|
||||
"src": "/(.*)",
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
"builds": [
|
||||
{
|
||||
"src": "package.json",
|
||||
"use": "@now/static-build",
|
||||
"use": "@now/static-build@canary",
|
||||
"config": {
|
||||
"distDir": "public"
|
||||
}
|
||||
@@ -13,7 +13,5 @@
|
||||
"use": "@now/node"
|
||||
}
|
||||
],
|
||||
"routes": [
|
||||
{ "src": "^/api/date$", "dest": "api/date.js" }
|
||||
]
|
||||
"routes": [{ "src": "^/api/date$", "dest": "api/date.js" }]
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
// Native
|
||||
const { join } = require('path');
|
||||
const { randomBytes } = require('crypto');
|
||||
const { join, dirname } = require('path');
|
||||
|
||||
// Packages
|
||||
const { imageSync: getImageFile } = require('qr-image');
|
||||
@@ -85,14 +85,14 @@ const randomAliasSuffix = randomBytes(6).toString('hex');
|
||||
|
||||
const getRevertAliasConfigFile = () => {
|
||||
return JSON.stringify({
|
||||
'version': 2,
|
||||
'name': `now-revert-alias-${randomAliasSuffix}`,
|
||||
'builds': [
|
||||
{
|
||||
'src': '*.json',
|
||||
'use': '@now/static'
|
||||
}
|
||||
]
|
||||
version: 2,
|
||||
name: `now-revert-alias-${randomAliasSuffix}`,
|
||||
builds: [
|
||||
{
|
||||
src: '*.json',
|
||||
use: '@now/static',
|
||||
},
|
||||
],
|
||||
});
|
||||
};
|
||||
|
||||
@@ -103,14 +103,14 @@ module.exports = async session => {
|
||||
'package.json': getPackageFile(session),
|
||||
'now.json': getConfigFile(false),
|
||||
'first.png': getImageFile(session, {
|
||||
size: 30
|
||||
size: 30,
|
||||
}),
|
||||
'second.png': getImageFile(session, {
|
||||
size: 20
|
||||
size: 20,
|
||||
}),
|
||||
'now.json-builds': getConfigFile(true),
|
||||
'index.html': getIndexHTMLFile(session),
|
||||
'contact.php': getContactFile(session)
|
||||
'contact.php': getContactFile(session),
|
||||
};
|
||||
|
||||
const spec = {
|
||||
@@ -120,26 +120,24 @@ module.exports = async session => {
|
||||
'static-single-file': ['first.png', 'now.json'],
|
||||
'static-multiple-files': ['first.png', 'second.png', 'now.json'],
|
||||
'single-dotfile': {
|
||||
'.testing': 'i am a dotfile'
|
||||
'.testing': 'i am a dotfile',
|
||||
},
|
||||
'config-alias-property': {
|
||||
'now.json':
|
||||
'{ "alias": "test.now.sh", "builds": [ { "src": "*.html", "use": "@now/static" } ] }',
|
||||
'index.html': '<span>test alias</span'
|
||||
'index.html': '<span>test alias</span',
|
||||
},
|
||||
'config-scope-property-email': {
|
||||
'now.json':
|
||||
`{ "scope": "${session}@zeit.pub", "builds": [ { "src": "*.html", "use": "@now/static" } ], "version": 2 }`,
|
||||
'index.html': '<span>test scope email</span'
|
||||
'now.json': `{ "scope": "${session}@zeit.pub", "builds": [ { "src": "*.html", "use": "@now/static" } ], "version": 2 }`,
|
||||
'index.html': '<span>test scope email</span',
|
||||
},
|
||||
'config-scope-property-username': {
|
||||
'now.json':
|
||||
`{ "scope": "${session}", "builds": [ { "src": "*.html", "use": "@now/static" } ] }`,
|
||||
'index.html': '<span>test scope username</span'
|
||||
'now.json': `{ "scope": "${session}", "builds": [ { "src": "*.html", "use": "@now/static" } ] }`,
|
||||
'index.html': '<span>test scope username</span',
|
||||
},
|
||||
'builds-wrong': {
|
||||
'now.json': '{"builder": 1, "type": "static"}',
|
||||
'index.html': '<span>test</span'
|
||||
'index.html': '<span>test</span',
|
||||
},
|
||||
'builds-no-list': {
|
||||
'now.json': `{
|
||||
@@ -161,75 +159,222 @@ module.exports = async session => {
|
||||
FROM alpine
|
||||
RUN mkdir /public
|
||||
RUN echo hello > /public/index.html
|
||||
`
|
||||
`,
|
||||
},
|
||||
'build-env': {
|
||||
'now.json': JSON.stringify({
|
||||
version: 1,
|
||||
type: 'static',
|
||||
build: {
|
||||
env: { FOO: 'bar' }
|
||||
}
|
||||
env: { FOO: 'bar' },
|
||||
},
|
||||
}),
|
||||
Dockerfile: `
|
||||
FROM alpine
|
||||
ARG FOO
|
||||
RUN mkdir /public
|
||||
RUN echo $FOO > /public/index.html
|
||||
`
|
||||
`,
|
||||
},
|
||||
'build-env-arg': {
|
||||
'now.json': JSON.stringify({
|
||||
version: 1,
|
||||
type: 'static'
|
||||
type: 'static',
|
||||
}),
|
||||
Dockerfile: `
|
||||
FROM alpine
|
||||
ARG NONCE
|
||||
RUN mkdir /public
|
||||
RUN echo $NONCE > /public/index.html
|
||||
`
|
||||
`,
|
||||
},
|
||||
'build-env-debug': {
|
||||
'now.json':
|
||||
'{ "builds": [ { "src": "index.js", "use": "@now/node" } ], "version": 2 }',
|
||||
'package.json': `
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "node now-build.js"
|
||||
}
|
||||
}
|
||||
`,
|
||||
'now-build.js': `
|
||||
const fs = require('fs');
|
||||
fs.writeFileSync(
|
||||
'index.js',
|
||||
fs
|
||||
.readFileSync('index.js', 'utf8')
|
||||
.replace('BUILD_ENV_DEBUG', process.env.NOW_BUILDER_DEBUG),
|
||||
);
|
||||
`,
|
||||
'index.js': `
|
||||
module.exports = (req, res) => {
|
||||
res.status(200).send('BUILD_ENV_DEBUG')
|
||||
}
|
||||
`,
|
||||
},
|
||||
'now-revert-alias-1': {
|
||||
'index.json': JSON.stringify({ name: 'now-revert-alias-1' }),
|
||||
'now.json': getRevertAliasConfigFile()
|
||||
'now.json': getRevertAliasConfigFile(),
|
||||
},
|
||||
'now-revert-alias-2': {
|
||||
'index.json': JSON.stringify({ name: 'now-revert-alias-2' }),
|
||||
'now.json': getRevertAliasConfigFile()
|
||||
'now.json': getRevertAliasConfigFile(),
|
||||
},
|
||||
'now-dev-fail-dev-script': {
|
||||
'package.json': JSON.stringify({
|
||||
scripts: {
|
||||
dev: 'now dev'
|
||||
}
|
||||
}, null, 2)
|
||||
'package.json': JSON.stringify(
|
||||
{
|
||||
scripts: {
|
||||
dev: 'now dev',
|
||||
},
|
||||
},
|
||||
null,
|
||||
2
|
||||
),
|
||||
},
|
||||
'v1-warning-link': {
|
||||
'now.json': JSON.stringify({
|
||||
version: 1
|
||||
version: 1,
|
||||
}),
|
||||
'package.json': JSON.stringify({
|
||||
dependencies: {
|
||||
next: '9.0.0'
|
||||
}
|
||||
})
|
||||
next: '9.0.0',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'static-deployment': {
|
||||
'index.txt': 'Hello World'
|
||||
'index.txt': 'Hello World',
|
||||
},
|
||||
nowignore: {
|
||||
'index.txt': 'Hello World',
|
||||
'ignore.txt': 'Should be ignored',
|
||||
'.nowignore': 'ignore.txt',
|
||||
},
|
||||
'nowignore-allowlist': {
|
||||
'index.txt': 'Hello World',
|
||||
'ignore.txt': 'Should be ignored',
|
||||
'.nowignore': '*\n!index.txt',
|
||||
},
|
||||
'failing-build': {
|
||||
'package.json': JSON.stringify({
|
||||
scripts: {
|
||||
build: 'echo hello && exit 1'
|
||||
}
|
||||
})
|
||||
}
|
||||
build: 'echo hello && exit 1',
|
||||
},
|
||||
}),
|
||||
},
|
||||
'failing-alias': {
|
||||
'now.json': JSON.stringify(
|
||||
Object.assign(JSON.parse(getConfigFile(true)), { alias: 'zeit.co' })
|
||||
),
|
||||
},
|
||||
'local-config-cloud-v1': {
|
||||
'.gitignore': '*.html',
|
||||
'index.js': `
|
||||
const { createServer } = require('http');
|
||||
const { readFileSync } = require('fs');
|
||||
const svr = createServer((req, res) => {
|
||||
const { url = '/' } = req;
|
||||
const file = '.' + url;
|
||||
console.log('reading file ' + file);
|
||||
try {
|
||||
let contents = readFileSync(file, 'utf8');
|
||||
res.end(contents || '');
|
||||
} catch (e) {
|
||||
res.statusCode = 404;
|
||||
res.end('Not found');
|
||||
}
|
||||
});
|
||||
svr.listen(3000);`,
|
||||
'main.html': '<h1>hello main</h1>',
|
||||
'test.html': '<h1>hello test</h1>',
|
||||
'folder/file1.txt': 'file1',
|
||||
'folder/sub/file2.txt': 'file2',
|
||||
Dockerfile: `FROM mhart/alpine-node:latest
|
||||
LABEL name "now-cli-dockerfile-${session}"
|
||||
|
||||
RUN mkdir /app
|
||||
WORKDIR /app
|
||||
COPY . /app
|
||||
RUN yarn
|
||||
|
||||
EXPOSE 3000
|
||||
CMD ["node", "index.js"]`,
|
||||
'now.json': JSON.stringify({
|
||||
version: 1,
|
||||
type: 'docker',
|
||||
features: {
|
||||
cloud: 'v1',
|
||||
},
|
||||
files: ['.gitignore', 'folder', 'index.js', 'main.html'],
|
||||
}),
|
||||
'now-test.json': JSON.stringify({
|
||||
version: 1,
|
||||
type: 'docker',
|
||||
features: {
|
||||
cloud: 'v1',
|
||||
},
|
||||
files: ['.gitignore', 'folder', 'index.js', 'test.html'],
|
||||
}),
|
||||
},
|
||||
'local-config-v2': {
|
||||
[`main-${session}.html`]: '<h1>hello main</h1>',
|
||||
[`test-${session}.html`]: '<h1>hello test</h1>',
|
||||
'now.json': JSON.stringify({
|
||||
version: 2,
|
||||
builds: [{ src: `main-${session}.html`, use: '@now/static' }],
|
||||
routes: [{ src: '/another-main', dest: `/main-${session}.html` }],
|
||||
}),
|
||||
'now-test.json': JSON.stringify({
|
||||
version: 2,
|
||||
builds: [{ src: `test-${session}.html`, use: '@now/static' }],
|
||||
routes: [{ src: '/another-test', dest: `/test-${session}.html` }],
|
||||
}),
|
||||
},
|
||||
'alias-rules': {
|
||||
'rules.json': JSON.stringify({
|
||||
rules: [
|
||||
// for example:
|
||||
// { pathname: '/', dest: '' },
|
||||
// { pathname: '/', dest: '', method: 'GET' }
|
||||
// Will be generated by the actual test
|
||||
],
|
||||
}),
|
||||
'invalid-rules.json': JSON.stringify({
|
||||
what: { what: 0 },
|
||||
}),
|
||||
'invalid-type-rules.json': JSON.stringify({
|
||||
rules: { what: 0 },
|
||||
}),
|
||||
'invalid-json-rules.json': '==ok',
|
||||
},
|
||||
'zero-config-next-js': {
|
||||
'pages/index.js':
|
||||
'export default () => <div><h1>Now CLI test</h1><p>Zero-config + Next.js</p></div>',
|
||||
'package.json': JSON.stringify({
|
||||
name: 'zero-config-next-js-test',
|
||||
scripts: {
|
||||
dev: 'next',
|
||||
start: 'next start',
|
||||
build: 'next build',
|
||||
},
|
||||
dependencies: {
|
||||
next: 'latest',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
for (const typeName of Object.keys(spec)) {
|
||||
const needed = spec[typeName];
|
||||
const directory = join(__dirname, '..', 'fixtures', 'integration', typeName);
|
||||
const directory = join(
|
||||
__dirname,
|
||||
'..',
|
||||
'fixtures',
|
||||
'integration',
|
||||
typeName
|
||||
);
|
||||
await mkdirp(directory);
|
||||
|
||||
if (Array.isArray(needed)) {
|
||||
@@ -237,6 +382,7 @@ RUN echo $NONCE > /public/index.html
|
||||
for (const name of needed) {
|
||||
const file = join(directory, name);
|
||||
const content = files[name];
|
||||
await mkdirp(dirname(file));
|
||||
await writeFile(file.replace('-builds', ''), content);
|
||||
}
|
||||
} else {
|
||||
@@ -245,6 +391,7 @@ RUN echo $NONCE > /public/index.html
|
||||
for (const name of names) {
|
||||
const file = join(directory, name);
|
||||
const content = needed[name];
|
||||
await mkdirp(dirname(file));
|
||||
await writeFile(file.replace('-builds', ''), content);
|
||||
}
|
||||
}
|
||||
|
||||
1076
packages/now-cli/test/integration.js
vendored
1076
packages/now-cli/test/integration.js
vendored
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,11 @@
|
||||
{
|
||||
"name": "now-client",
|
||||
"version": "5.1.2",
|
||||
"main": "lib/index.js",
|
||||
"version": "5.2.1",
|
||||
"main": "dist/src/index.js",
|
||||
"typings": "dist/src/index.d.ts",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "ncc build ./src/index.ts -o ./lib --source-map",
|
||||
"build": "tsc",
|
||||
"prepare": "npm run build",
|
||||
"test-integration-once": "jest --verbose --forceExit",
|
||||
"test-lint": "eslint . --ext .js,.ts --ignore-path ../../.eslintignore"
|
||||
@@ -34,11 +35,10 @@
|
||||
"@zeit/fetch": "5.1.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
"fetch-h2": "2.2.0",
|
||||
"fs-extra": "8.0.1",
|
||||
"ignore": "4.0.6",
|
||||
"ms": "2.1.2",
|
||||
"node-fetch": "2.6.0",
|
||||
"now-client": "4.1.2",
|
||||
"querystring": "^0.2.0",
|
||||
"recursive-readdir": "2.2.2",
|
||||
"sleep-promise": "8.0.1"
|
||||
|
||||
@@ -1,81 +1,199 @@
|
||||
import { readdir as readRootFolder, lstatSync } from 'fs-extra'
|
||||
import { readdir as readRootFolder, lstatSync } from 'fs-extra';
|
||||
|
||||
import readdir from 'recursive-readdir'
|
||||
import hashes, { mapToObject } from './utils/hashes'
|
||||
import uploadAndDeploy from './upload'
|
||||
import { getNowIgnore } from './utils'
|
||||
import { DeploymentError } from './errors'
|
||||
import readdir from 'recursive-readdir';
|
||||
import { relative, join } from 'path';
|
||||
import hashes, { mapToObject } from './utils/hashes';
|
||||
import uploadAndDeploy from './upload';
|
||||
import { getNowIgnore, createDebug, parseNowJSON } from './utils';
|
||||
import { DeploymentError } from './errors';
|
||||
import {
|
||||
CreateDeploymentFunction,
|
||||
DeploymentOptions,
|
||||
NowJsonOptions,
|
||||
} from './types';
|
||||
|
||||
export { EVENTS } from './utils'
|
||||
export { EVENTS } from './utils';
|
||||
|
||||
export default function buildCreateDeployment(version: number): CreateDeploymentFunction {
|
||||
export default function buildCreateDeployment(
|
||||
version: number
|
||||
): CreateDeploymentFunction {
|
||||
return async function* createDeployment(
|
||||
path: string | string[],
|
||||
options: DeploymentOptions = {}
|
||||
options: DeploymentOptions = {},
|
||||
nowConfig?: NowJsonOptions
|
||||
): AsyncIterableIterator<any> {
|
||||
const debug = createDebug(options.debug);
|
||||
const cwd = process.cwd();
|
||||
|
||||
debug('Creating deployment...');
|
||||
|
||||
if (typeof path !== 'string' && !Array.isArray(path)) {
|
||||
debug(
|
||||
`Error: 'path' is expected to be a string or an array. Received ${typeof path}`
|
||||
);
|
||||
|
||||
throw new DeploymentError({
|
||||
code: 'missing_path',
|
||||
message: 'Path not provided'
|
||||
})
|
||||
message: 'Path not provided',
|
||||
});
|
||||
}
|
||||
|
||||
if (typeof options.token !== 'string') {
|
||||
debug(
|
||||
`Error: 'token' is expected to be a string. Received ${typeof options.token}`
|
||||
);
|
||||
|
||||
throw new DeploymentError({
|
||||
code: 'token_not_provided',
|
||||
message: 'Options object must include a `token`'
|
||||
})
|
||||
message: 'Options object must include a `token`',
|
||||
});
|
||||
}
|
||||
|
||||
const isDirectory = !Array.isArray(path) && lstatSync(path).isDirectory()
|
||||
const isDirectory = !Array.isArray(path) && lstatSync(path).isDirectory();
|
||||
|
||||
// Get .nowignore
|
||||
let rootFiles
|
||||
let rootFiles: string[];
|
||||
|
||||
if (isDirectory && !Array.isArray(path)) {
|
||||
rootFiles = await readRootFolder(path)
|
||||
debug(`Provided 'path' is a directory. Reading subpaths... `);
|
||||
rootFiles = await readRootFolder(path);
|
||||
debug(`Read ${rootFiles.length} subpaths`);
|
||||
} else if (Array.isArray(path)) {
|
||||
rootFiles = path
|
||||
debug(`Provided 'path' is an array of file paths`);
|
||||
rootFiles = path;
|
||||
} else {
|
||||
rootFiles = [path]
|
||||
debug(`Provided 'path' is a single file`);
|
||||
rootFiles = [path];
|
||||
}
|
||||
|
||||
let ignores: string[] = await getNowIgnore(rootFiles, path)
|
||||
// Get .nowignore
|
||||
let { ig, ignores } = await getNowIgnore(path);
|
||||
|
||||
let fileList
|
||||
debug(`Found ${ig.ignores.length} rules in .nowignore`);
|
||||
|
||||
let fileList: string[];
|
||||
|
||||
debug('Building file tree...');
|
||||
|
||||
if (isDirectory && !Array.isArray(path)) {
|
||||
// Directory path
|
||||
fileList = await readdir(path, ignores)
|
||||
const dirContents = await readdir(path, ignores);
|
||||
const relativeFileList = dirContents.map(filePath =>
|
||||
relative(process.cwd(), filePath)
|
||||
);
|
||||
fileList = ig
|
||||
.filter(relativeFileList)
|
||||
.map((relativePath: string) => join(process.cwd(), relativePath));
|
||||
|
||||
debug(`Read ${fileList.length} files in the specified directory`);
|
||||
} else if (Array.isArray(path)) {
|
||||
// Array of file paths
|
||||
fileList = path
|
||||
fileList = path;
|
||||
debug(`Assigned ${fileList.length} files provided explicitly`);
|
||||
} else {
|
||||
// Single file
|
||||
fileList = [path]
|
||||
fileList = [path];
|
||||
debug(`Deploying the provided path as single file`);
|
||||
}
|
||||
|
||||
const files = await hashes(fileList)
|
||||
if (!nowConfig) {
|
||||
// If the user did not provide a nowConfig,
|
||||
// then use the now.json file in the root.
|
||||
const fileName = 'now.json';
|
||||
const absolutePath = fileList.find(f => relative(cwd, f) === fileName);
|
||||
debug(absolutePath ? `Found ${fileName}` : `Missing ${fileName}`);
|
||||
nowConfig = await parseNowJSON(absolutePath);
|
||||
}
|
||||
|
||||
yield { type: 'hashes-calculated', payload: mapToObject(files) }
|
||||
if (
|
||||
version === 1 &&
|
||||
nowConfig &&
|
||||
Array.isArray(nowConfig.files) &&
|
||||
nowConfig.files.length > 0
|
||||
) {
|
||||
// See the docs: https://zeit.co/docs/v1/features/configuration/#files-(array)
|
||||
debug('Filtering file list based on `files` key in now.json');
|
||||
const allowedFiles = new Set<string>(['Dockerfile']);
|
||||
const allowedDirs = new Set<string>();
|
||||
nowConfig.files.forEach(relPath => {
|
||||
if (lstatSync(relPath).isDirectory()) {
|
||||
allowedDirs.add(relPath);
|
||||
} else {
|
||||
allowedFiles.add(relPath);
|
||||
}
|
||||
});
|
||||
fileList = fileList.filter(absPath => {
|
||||
const relPath = relative(cwd, absPath);
|
||||
if (allowedFiles.has(relPath)) {
|
||||
return true;
|
||||
}
|
||||
for (let dir of allowedDirs) {
|
||||
if (relPath.startsWith(dir + '/')) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
});
|
||||
debug(`Found ${fileList.length} files: ${JSON.stringify(fileList)}`);
|
||||
}
|
||||
|
||||
const { token, teamId, force, defaultName, ...metadata } = options
|
||||
// This is a useful warning because it prevents people
|
||||
// from getting confused about a deployment that renders 404.
|
||||
if (
|
||||
fileList.length === 0 ||
|
||||
fileList.every((item): boolean => {
|
||||
if (!item) {
|
||||
return true;
|
||||
}
|
||||
|
||||
metadata.version = version
|
||||
const segments = item.split('/');
|
||||
|
||||
return segments[segments.length - 1].startsWith('.');
|
||||
})
|
||||
) {
|
||||
debug(
|
||||
`Deployment path has no files (or only dotfiles). Yielding a warning event`
|
||||
);
|
||||
yield {
|
||||
type: 'warning',
|
||||
payload:
|
||||
'There are no files (or only files starting with a dot) inside your deployment.',
|
||||
};
|
||||
}
|
||||
|
||||
const files = await hashes(fileList);
|
||||
|
||||
debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`);
|
||||
yield { type: 'hashes-calculated', payload: mapToObject(files) };
|
||||
|
||||
const {
|
||||
token,
|
||||
teamId,
|
||||
force,
|
||||
defaultName,
|
||||
debug: debug_,
|
||||
...metadata
|
||||
} = options;
|
||||
|
||||
debug(`Setting platform version to ${version}`);
|
||||
metadata.version = version;
|
||||
|
||||
const deploymentOpts = {
|
||||
debug: debug_,
|
||||
totalFiles: files.size,
|
||||
nowConfig,
|
||||
token,
|
||||
isDirectory,
|
||||
path,
|
||||
teamId,
|
||||
force,
|
||||
defaultName,
|
||||
metadata
|
||||
}
|
||||
metadata,
|
||||
};
|
||||
|
||||
debug(`Creating the deployment and starting upload...`);
|
||||
for await (const event of uploadAndDeploy(files, deploymentOpts)) {
|
||||
yield event
|
||||
debug(`Yielding a '${event.type}' event`);
|
||||
yield event;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
import { DeploymentFile } from './utils/hashes'
|
||||
import { DeploymentFile } from './utils/hashes';
|
||||
import {
|
||||
parseNowJSON,
|
||||
fetch,
|
||||
API_DEPLOYMENTS,
|
||||
prepareFiles,
|
||||
API_DEPLOYMENTS_LEGACY
|
||||
} from './utils'
|
||||
import checkDeploymentStatus from './deployment-status'
|
||||
import { generateQueryString } from './utils/query-string'
|
||||
API_DEPLOYMENTS_LEGACY,
|
||||
createDebug,
|
||||
} from './utils';
|
||||
import checkDeploymentStatus from './deployment-status';
|
||||
import { generateQueryString } from './utils/query-string';
|
||||
import { Deployment, DeploymentOptions, NowJsonOptions } from './types';
|
||||
|
||||
export interface Options {
|
||||
metadata: DeploymentOptions;
|
||||
@@ -19,17 +20,22 @@ export interface Options {
|
||||
isDirectory?: boolean;
|
||||
defaultName?: string;
|
||||
preflight?: boolean;
|
||||
debug?: boolean;
|
||||
nowConfig?: NowJsonOptions;
|
||||
}
|
||||
|
||||
async function* createDeployment(
|
||||
metadata: DeploymentOptions,
|
||||
files: Map<string, DeploymentFile>,
|
||||
options: Options
|
||||
options: Options,
|
||||
debug: Function
|
||||
): AsyncIterableIterator<{ type: string; payload: any }> {
|
||||
const preparedFiles = prepareFiles(files, options)
|
||||
const preparedFiles = prepareFiles(files, options);
|
||||
|
||||
let apiDeployments =
|
||||
metadata.version === 2 ? API_DEPLOYMENTS : API_DEPLOYMENTS_LEGACY
|
||||
metadata.version === 2 ? API_DEPLOYMENTS : API_DEPLOYMENTS_LEGACY;
|
||||
|
||||
debug('Sending deployment creation API request');
|
||||
try {
|
||||
const dpl = await fetch(
|
||||
`${apiDeployments}${generateQueryString(options)}`,
|
||||
@@ -38,113 +44,174 @@ async function* createDeployment(
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
Authorization: `Bearer ${options.token}`
|
||||
Authorization: `Bearer ${options.token}`,
|
||||
},
|
||||
body: JSON.stringify({
|
||||
...metadata,
|
||||
files: preparedFiles
|
||||
})
|
||||
files: preparedFiles,
|
||||
}),
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
const json = await dpl.json()
|
||||
const json = await dpl.json();
|
||||
|
||||
debug('Deployment response:', JSON.stringify(json));
|
||||
|
||||
if (!dpl.ok || json.error) {
|
||||
debug('Error: Deployment request status is', dpl.status);
|
||||
// Return error object
|
||||
return yield { type: 'error', payload: json.error ? { ...json.error, status: dpl.status } : { ...json, status: dpl.status } }
|
||||
return yield {
|
||||
type: 'error',
|
||||
payload: json.error
|
||||
? { ...json.error, status: dpl.status }
|
||||
: { ...json, status: dpl.status },
|
||||
};
|
||||
}
|
||||
|
||||
yield { type: 'created', payload: json }
|
||||
for (const [name, value] of dpl.headers.entries()) {
|
||||
if (name.startsWith('x-now-warning-')) {
|
||||
debug('Deployment created with a warning:', value);
|
||||
yield { type: 'warning', payload: value };
|
||||
}
|
||||
if (name.startsWith('x-now-notice-')) {
|
||||
debug('Deployment created with a notice:', value);
|
||||
yield { type: 'notice', payload: value };
|
||||
}
|
||||
}
|
||||
|
||||
yield { type: 'created', payload: json };
|
||||
} catch (e) {
|
||||
return yield { type: 'error', payload: e }
|
||||
return yield { type: 'error', payload: e };
|
||||
}
|
||||
}
|
||||
|
||||
const getDefaultName = (
|
||||
path: string | string[] | undefined,
|
||||
isDirectory: boolean | undefined,
|
||||
files: Map<string, DeploymentFile>
|
||||
files: Map<string, DeploymentFile>,
|
||||
debug: Function
|
||||
): string => {
|
||||
if (isDirectory && typeof path === 'string') {
|
||||
const segments = path.split('/')
|
||||
debug('Provided path is a directory. Using last segment as default name');
|
||||
const segments = path.split('/');
|
||||
|
||||
return segments[segments.length - 1]
|
||||
return segments[segments.length - 1];
|
||||
} else {
|
||||
const filePath = Array.from(files.values())[0].names[0]
|
||||
const segments = filePath.split('/')
|
||||
debug(
|
||||
'Provided path is not a directory. Using last segment of the first file as default name'
|
||||
);
|
||||
const filePath = Array.from(files.values())[0].names[0];
|
||||
const segments = filePath.split('/');
|
||||
|
||||
return segments[segments.length - 1]
|
||||
return segments[segments.length - 1];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export default async function* deploy(
|
||||
files: Map<string, DeploymentFile>,
|
||||
options: Options
|
||||
): AsyncIterableIterator<{ type: string; payload: any }> {
|
||||
const nowJson: DeploymentFile | undefined = Array.from(files.values()).find(
|
||||
(file: DeploymentFile): boolean => {
|
||||
return Boolean(
|
||||
file.names.find((name: string): boolean => name.includes('now.json'))
|
||||
)
|
||||
}
|
||||
)
|
||||
const nowJsonMetadata: NowJsonOptions = parseNowJSON(nowJson)
|
||||
const debug = createDebug(options.debug);
|
||||
const nowJsonMetadata = options.nowConfig || {};
|
||||
delete nowJsonMetadata.github;
|
||||
delete nowJsonMetadata.scope;
|
||||
|
||||
const meta = options.metadata || {}
|
||||
const metadata = { ...nowJsonMetadata, ...meta }
|
||||
const meta = options.metadata || {};
|
||||
const metadata = { ...nowJsonMetadata, ...meta };
|
||||
|
||||
// Check if we should default to a static deployment
|
||||
if (!metadata.version && !metadata.name) {
|
||||
metadata.version = 2
|
||||
metadata.version = 2;
|
||||
metadata.name =
|
||||
options.totalFiles === 1
|
||||
? 'file'
|
||||
: getDefaultName(options.path, options.isDirectory, files)
|
||||
: getDefaultName(options.path, options.isDirectory, files, debug);
|
||||
|
||||
if (metadata.name === 'file') {
|
||||
debug('Setting deployment name to "file" for single-file deployment');
|
||||
}
|
||||
}
|
||||
|
||||
if (options.totalFiles === 1 && !metadata.builds && !metadata.routes) {
|
||||
debug(`Assigning '/' route for single file deployment`);
|
||||
const filePath = Array.from(files.values())[0].names[0];
|
||||
const segments = filePath.split('/');
|
||||
|
||||
metadata.routes = [
|
||||
{
|
||||
src: '/',
|
||||
dest: `/${segments[segments.length - 1]}`,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
if (!metadata.name) {
|
||||
metadata.name =
|
||||
options.defaultName ||
|
||||
getDefaultName(options.path, options.isDirectory, files)
|
||||
getDefaultName(options.path, options.isDirectory, files, debug);
|
||||
debug('No name provided. Defaulting to', metadata.name);
|
||||
}
|
||||
|
||||
if (metadata.version === 1 && !metadata.deploymentType) {
|
||||
metadata.deploymentType = nowJsonMetadata.type
|
||||
debug(`Setting 'type' for 1.0 deployment to '${nowJsonMetadata.type}'`);
|
||||
metadata.deploymentType = nowJsonMetadata.type;
|
||||
}
|
||||
|
||||
delete metadata.github
|
||||
delete metadata.scope
|
||||
if (metadata.version === 1) {
|
||||
debug(`Writing 'config' values for 1.0 deployment`);
|
||||
const nowConfig = { ...nowJsonMetadata };
|
||||
delete nowConfig.version;
|
||||
|
||||
let deployment: Deployment | undefined
|
||||
metadata.config = {
|
||||
...nowConfig,
|
||||
...metadata.config,
|
||||
};
|
||||
}
|
||||
|
||||
let deployment: Deployment | undefined;
|
||||
|
||||
try {
|
||||
for await (const event of createDeployment(metadata, files, options)) {
|
||||
debug('Creating deployment');
|
||||
for await (const event of createDeployment(
|
||||
metadata,
|
||||
files,
|
||||
options,
|
||||
debug
|
||||
)) {
|
||||
if (event.type === 'created') {
|
||||
deployment = event.payload
|
||||
debug('Deployment created');
|
||||
deployment = event.payload;
|
||||
}
|
||||
|
||||
yield event
|
||||
yield event;
|
||||
}
|
||||
} catch (e) {
|
||||
return yield { type: 'error', payload: e }
|
||||
debug('An unexpected error occurred when creating the deployment');
|
||||
return yield { type: 'error', payload: e };
|
||||
}
|
||||
|
||||
if (deployment) {
|
||||
if (deployment.readyState === 'READY') {
|
||||
return yield { type: 'ready', payload: deployment }
|
||||
debug('Deployment is READY. Not performing additional polling');
|
||||
return yield { type: 'ready', payload: deployment };
|
||||
}
|
||||
|
||||
try {
|
||||
debug('Waiting for deployment to be ready...');
|
||||
for await (const event of checkDeploymentStatus(
|
||||
deployment,
|
||||
options.token,
|
||||
metadata.version,
|
||||
options.teamId
|
||||
options.teamId,
|
||||
debug
|
||||
)) {
|
||||
yield event
|
||||
yield event;
|
||||
}
|
||||
} catch (e) {
|
||||
return yield { type: 'error', payload: e }
|
||||
debug(
|
||||
'An unexpected error occurred while waiting for deployment to be ready'
|
||||
);
|
||||
return yield { type: 'error', payload: e };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import sleep from 'sleep-promise'
|
||||
import ms from 'ms'
|
||||
import { fetch, API_DEPLOYMENTS, API_DEPLOYMENTS_LEGACY } from './utils'
|
||||
import { isDone, isReady, isFailed } from './utils/ready-state'
|
||||
import sleep from 'sleep-promise';
|
||||
import ms from 'ms';
|
||||
import { fetch, API_DEPLOYMENTS, API_DEPLOYMENTS_LEGACY } from './utils';
|
||||
import { isDone, isReady, isFailed } from './utils/ready-state';
|
||||
import { Deployment, DeploymentBuild } from './types';
|
||||
|
||||
interface DeploymentStatus {
|
||||
type: string;
|
||||
@@ -13,19 +14,24 @@ export default async function* checkDeploymentStatus(
|
||||
deployment: Deployment,
|
||||
token: string,
|
||||
version: number | undefined,
|
||||
teamId?: string
|
||||
teamId: string | undefined,
|
||||
debug: Function
|
||||
): AsyncIterableIterator<DeploymentStatus> {
|
||||
let deploymentState = deployment;
|
||||
let allBuildsCompleted = false;
|
||||
const buildsState: { [key: string]: DeploymentBuild } = {};
|
||||
let apiDeployments = version === 2 ? API_DEPLOYMENTS : API_DEPLOYMENTS_LEGACY;
|
||||
|
||||
debug(`Using ${version ? `${version}.0` : '2.0'} API for status checks`);
|
||||
|
||||
// If the deployment is ready, we don't want any of this to run
|
||||
if (isDone(deploymentState)) {
|
||||
debug(`Deployment is already READY. Not running status checks`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Build polling
|
||||
debug('Waiting for builds and the deployment to complete...');
|
||||
while (true) {
|
||||
if (!allBuildsCompleted) {
|
||||
const buildsData = await fetch(
|
||||
@@ -34,6 +40,7 @@ export default async function* checkDeploymentStatus(
|
||||
}`,
|
||||
token
|
||||
);
|
||||
|
||||
const data = await buildsData.json();
|
||||
const { builds = [] } = data;
|
||||
|
||||
@@ -41,10 +48,14 @@ export default async function* checkDeploymentStatus(
|
||||
const prevState = buildsState[build.id];
|
||||
|
||||
if (!prevState || prevState.readyState !== build.readyState) {
|
||||
debug(
|
||||
`Build state for '${build.entrypoint}' changed to ${build.readyState}`
|
||||
);
|
||||
yield { type: 'build-state-changed', payload: build };
|
||||
}
|
||||
|
||||
if (build.readyState.includes('ERROR')) {
|
||||
debug(`Build '${build.entrypoint}' has errorred`);
|
||||
return yield { type: 'error', payload: build };
|
||||
}
|
||||
|
||||
@@ -54,6 +65,7 @@ export default async function* checkDeploymentStatus(
|
||||
const readyBuilds = builds.filter((b: DeploymentBuild) => isDone(b));
|
||||
|
||||
if (readyBuilds.length === builds.length) {
|
||||
debug('All builds completed');
|
||||
allBuildsCompleted = true;
|
||||
yield { type: 'all-builds-completed', payload: readyBuilds };
|
||||
}
|
||||
@@ -68,15 +80,21 @@ export default async function* checkDeploymentStatus(
|
||||
const deploymentUpdate = await deploymentData.json();
|
||||
|
||||
if (deploymentUpdate.error) {
|
||||
return yield { type: 'error', payload: deploymentUpdate.error }
|
||||
debug('Deployment status check has errorred');
|
||||
return yield { type: 'error', payload: deploymentUpdate.error };
|
||||
}
|
||||
|
||||
if (isReady(deploymentUpdate)) {
|
||||
debug('Deployment state changed to READY');
|
||||
return yield { type: 'ready', payload: deploymentUpdate };
|
||||
}
|
||||
|
||||
if (isFailed(deploymentUpdate)) {
|
||||
return yield { type: 'error', payload: deploymentUpdate.error || deploymentUpdate };
|
||||
debug('Deployment has failed');
|
||||
return yield {
|
||||
type: 'error',
|
||||
payload: deploymentUpdate.error || deploymentUpdate,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
export class DeploymentError extends Error {
|
||||
constructor(err: { code: string; message: string }) {
|
||||
super(err.message)
|
||||
this.code = err.code
|
||||
this.name = 'DeploymentError'
|
||||
super(err.message);
|
||||
this.code = err.code;
|
||||
this.name = 'DeploymentError';
|
||||
}
|
||||
|
||||
code: string;
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
// Polyfill Node 8 and below
|
||||
// https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-3.html#the-for-await-of-statement
|
||||
if (!Symbol.asyncIterator) {
|
||||
(Symbol as any).asyncIterator = Symbol.for('Symbol.asyncIterator')
|
||||
(Symbol as any).asyncIterator = Symbol.for('Symbol.asyncIterator');
|
||||
}
|
||||
|
||||
import buildCreateDeployment from './create-deployment'
|
||||
import buildCreateDeployment from './create-deployment';
|
||||
|
||||
export const createDeployment = buildCreateDeployment(2)
|
||||
export const createLegacyDeployment = buildCreateDeployment(1)
|
||||
export * from './errors'
|
||||
export const createDeployment = buildCreateDeployment(2);
|
||||
export const createLegacyDeployment = buildCreateDeployment(1);
|
||||
export * from './errors';
|
||||
export * from './types';
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
declare interface Route {
|
||||
export interface Route {
|
||||
src: string;
|
||||
dest: string;
|
||||
headers?: {
|
||||
@@ -8,12 +8,12 @@ declare interface Route {
|
||||
methods?: string[];
|
||||
}
|
||||
|
||||
declare interface Build {
|
||||
export interface Build {
|
||||
src: string;
|
||||
use: string;
|
||||
}
|
||||
|
||||
declare interface Deployment {
|
||||
export interface Deployment {
|
||||
id: string;
|
||||
deploymentId?: string;
|
||||
url: string;
|
||||
@@ -29,19 +29,19 @@ declare interface Deployment {
|
||||
public: boolean;
|
||||
ownerId: string;
|
||||
readyState:
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
state?:
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
createdAt: string;
|
||||
createdIn: string;
|
||||
env: {
|
||||
@@ -56,30 +56,37 @@ declare interface Deployment {
|
||||
alias: string[];
|
||||
}
|
||||
|
||||
declare interface DeploymentBuild {
|
||||
export interface DeploymentBuild {
|
||||
id: string;
|
||||
use: string;
|
||||
createdIn: string;
|
||||
deployedTo: string;
|
||||
readyState:
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
state?:
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
| 'INITIALIZING'
|
||||
| 'ANALYZING'
|
||||
| 'BUILDING'
|
||||
| 'DEPLOYING'
|
||||
| 'READY'
|
||||
| 'ERROR';
|
||||
readyStateAt: string;
|
||||
path: string;
|
||||
}
|
||||
|
||||
declare interface DeploymentOptions {
|
||||
export interface DeploymentGithubData {
|
||||
enabled: boolean;
|
||||
autoAlias: boolean;
|
||||
silent: boolean;
|
||||
autoJobCancelation: boolean;
|
||||
}
|
||||
|
||||
export interface DeploymentOptions {
|
||||
version?: number;
|
||||
regions?: string[];
|
||||
routes?: Route[];
|
||||
@@ -100,7 +107,7 @@ declare interface DeploymentOptions {
|
||||
defaultName?: string;
|
||||
isDirectory?: boolean;
|
||||
path?: string | string[];
|
||||
github?: any;
|
||||
github?: DeploymentGithubData;
|
||||
scope?: string;
|
||||
public?: boolean;
|
||||
forceNew?: boolean;
|
||||
@@ -109,11 +116,19 @@ declare interface DeploymentOptions {
|
||||
engines?: { [key: string]: string };
|
||||
sessionAffinity?: 'ip' | 'random';
|
||||
config?: { [key: string]: any };
|
||||
debug?: boolean;
|
||||
}
|
||||
|
||||
declare interface NowJsonOptions {
|
||||
export interface NowJsonOptions {
|
||||
github?: DeploymentGithubData;
|
||||
scope?: string;
|
||||
type?: 'NPM' | 'STATIC' | 'DOCKER';
|
||||
version?: number;
|
||||
files?: string[];
|
||||
}
|
||||
|
||||
declare type CreateDeploymentFunction = (path: string | string[], options?: DeploymentOptions) => AsyncIterableIterator<any>;
|
||||
export type CreateDeploymentFunction = (
|
||||
path: string | string[],
|
||||
options?: DeploymentOptions,
|
||||
nowConfig?: NowJsonOptions
|
||||
) => AsyncIterableIterator<any>;
|
||||
@@ -1,148 +1,198 @@
|
||||
import { createReadStream } from 'fs'
|
||||
import retry from 'async-retry'
|
||||
import { DeploymentFile } from './utils/hashes'
|
||||
import { fetch, API_FILES } from './utils'
|
||||
import { DeploymentError } from '.'
|
||||
import deploy, { Options } from './deploy'
|
||||
import { createReadStream } from 'fs';
|
||||
import { Agent } from 'https';
|
||||
import retry from 'async-retry';
|
||||
import { Sema } from 'async-sema';
|
||||
import { DeploymentFile } from './utils/hashes';
|
||||
import { fetch, API_FILES, createDebug } from './utils';
|
||||
import { DeploymentError } from '.';
|
||||
import deploy, { Options } from './deploy';
|
||||
|
||||
export default async function* upload(files: Map<string, DeploymentFile>, options: Options): AsyncIterableIterator<any> {
|
||||
const { token, teamId } = options
|
||||
|
||||
if (!files && !token && !teamId) {
|
||||
return
|
||||
const isClientNetworkError = (err: Error | DeploymentError) => {
|
||||
if (err.message) {
|
||||
// These are common network errors that may happen occasionally and we should retry if we encounter these
|
||||
return (
|
||||
err.message.includes('ETIMEDOUT') ||
|
||||
err.message.includes('ECONNREFUSED') ||
|
||||
err.message.includes('ENOTFOUND') ||
|
||||
err.message.includes('ECONNRESET') ||
|
||||
err.message.includes('EAI_FAIL') ||
|
||||
err.message.includes('socket hang up') ||
|
||||
err.message.includes('network socket disconnected')
|
||||
);
|
||||
}
|
||||
|
||||
let missingFiles = []
|
||||
return false;
|
||||
};
|
||||
|
||||
for await(const event of deploy(files, options)) {
|
||||
export default async function* upload(
|
||||
files: Map<string, DeploymentFile>,
|
||||
options: Options,
|
||||
): AsyncIterableIterator<any> {
|
||||
const { token, teamId, debug: isDebug } = options;
|
||||
const debug = createDebug(isDebug);
|
||||
|
||||
if (!files && !token && !teamId) {
|
||||
debug(`Neither 'files', 'token' nor 'teamId are present. Exiting`);
|
||||
return;
|
||||
}
|
||||
|
||||
let missingFiles = [];
|
||||
|
||||
debug('Determining necessary files for upload...');
|
||||
|
||||
for await (const event of deploy(files, options)) {
|
||||
if (event.type === 'error') {
|
||||
if (event.payload.code === 'missing_files') {
|
||||
missingFiles = event.payload.missing
|
||||
missingFiles = event.payload.missing;
|
||||
|
||||
debug(`${missingFiles.length} files are required to upload`);
|
||||
} else {
|
||||
return yield event
|
||||
return yield event;
|
||||
}
|
||||
} else {
|
||||
// If the deployment has succeeded here, don't continue
|
||||
if (event.type === 'ready') {
|
||||
return yield event
|
||||
debug('Deployment succeeded on file check');
|
||||
|
||||
return yield event;
|
||||
}
|
||||
|
||||
yield event
|
||||
yield event;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
const shas = missingFiles
|
||||
const uploadList: { [key: string]: Promise<any> } = {}
|
||||
const shas = missingFiles;
|
||||
|
||||
yield { type: 'file_count', payload: { total: files, missing: shas } };
|
||||
|
||||
const uploadList: { [key: string]: Promise<any> } = {};
|
||||
debug('Building an upload list...');
|
||||
|
||||
const semaphore = new Sema(700, { capacity: 700 });
|
||||
|
||||
shas.map((sha: string): void => {
|
||||
uploadList[sha] = retry(async (bail): Promise<any> => {
|
||||
const file = files.get(sha)
|
||||
uploadList[sha] = retry(
|
||||
async (bail): Promise<any> => {
|
||||
const file = files.get(sha);
|
||||
|
||||
if (!file) {
|
||||
return bail(new Error(`File ${sha} is undefined`))
|
||||
}
|
||||
|
||||
const fPath = file.names[0]
|
||||
const stream = createReadStream(fPath)
|
||||
const { data } = file
|
||||
|
||||
const fstreamPush = stream.push
|
||||
|
||||
let uploadedSoFar = 0
|
||||
// let lastEvent = 0
|
||||
|
||||
stream.push = (chunk: any): boolean => {
|
||||
// If we're about to push the last chunk, then don't do it here
|
||||
// But instead, we'll "hang" the progress bar and do it on 200
|
||||
if (chunk && uploadedSoFar + chunk.length < data.length) {
|
||||
uploadedSoFar += chunk.length
|
||||
// semaphore.release()
|
||||
if (!file) {
|
||||
debug(`File ${sha} is undefined. Bailing`);
|
||||
return bail(new Error(`File ${sha} is undefined`));
|
||||
}
|
||||
|
||||
return fstreamPush.call(stream, chunk)
|
||||
}
|
||||
await semaphore.acquire();
|
||||
|
||||
// while (uploadedSoFar !== file.data.length) {
|
||||
// await semaphore.acquire()
|
||||
const fPath = file.names[0];
|
||||
const stream = createReadStream(fPath);
|
||||
const { data } = file;
|
||||
|
||||
// lastEvent = uploadedSoFar;
|
||||
// yield uploadedSoFar;
|
||||
// }
|
||||
let err;
|
||||
let result;
|
||||
|
||||
let err
|
||||
let result
|
||||
try {
|
||||
const res = await fetch(
|
||||
API_FILES,
|
||||
token,
|
||||
{
|
||||
agent: new Agent({ keepAlive: true }),
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'x-now-digest': sha,
|
||||
'x-now-length': data.length,
|
||||
},
|
||||
body: stream,
|
||||
teamId,
|
||||
},
|
||||
isDebug
|
||||
);
|
||||
|
||||
try {
|
||||
const res = await fetch(API_FILES, token, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'x-now-digest': sha,
|
||||
'x-now-length': data.length,
|
||||
},
|
||||
body: stream,
|
||||
teamId
|
||||
})
|
||||
if (res.status === 200) {
|
||||
debug(
|
||||
`File ${sha} (${file.names[0]}${
|
||||
file.names.length > 1 ? ` +${file.names.length}` : ''
|
||||
}) uploaded`
|
||||
);
|
||||
result = {
|
||||
type: 'file-uploaded',
|
||||
payload: { sha, file },
|
||||
};
|
||||
} else if (res.status > 200 && res.status < 500) {
|
||||
// If something is wrong with our request, we don't retry
|
||||
debug(
|
||||
`An internal error occurred in upload request. Not retrying...`
|
||||
);
|
||||
const { error } = await res.json();
|
||||
|
||||
if (res.status === 200) {
|
||||
result = {
|
||||
type: 'file-uploaded',
|
||||
payload: { sha, file }
|
||||
err = new DeploymentError(error);
|
||||
} else {
|
||||
// If something is wrong with the server, we retry
|
||||
debug(`A server error occurred in upload request. Retrying...`);
|
||||
const { error } = await res.json();
|
||||
|
||||
throw new DeploymentError(error);
|
||||
}
|
||||
} else if (res.status > 200 && res.status < 500) {
|
||||
// If something is wrong with our request, we don't retry
|
||||
const { error } = await res.json()
|
||||
|
||||
err = new DeploymentError(error)
|
||||
} else {
|
||||
// If something is wrong with the server, we retry
|
||||
const { error } = await res.json()
|
||||
|
||||
throw new DeploymentError(error)
|
||||
} catch (e) {
|
||||
debug(`An unexpected error occurred in upload promise:\n${e}`);
|
||||
err = new Error(e);
|
||||
} finally {
|
||||
stream.close();
|
||||
stream.destroy();
|
||||
}
|
||||
} catch (e) {
|
||||
err = new Error(e)
|
||||
} finally {
|
||||
stream.close()
|
||||
stream.destroy()
|
||||
}
|
||||
|
||||
if (err) {
|
||||
return bail(err)
|
||||
}
|
||||
semaphore.release();
|
||||
|
||||
return result
|
||||
},
|
||||
{
|
||||
retries: 6,
|
||||
randomize: true
|
||||
}
|
||||
)
|
||||
})
|
||||
if (err) {
|
||||
if (isClientNetworkError(err)) {
|
||||
debug('Network error, retrying: ' + err.message);
|
||||
// If it's a network error, we retry
|
||||
throw err;
|
||||
} else {
|
||||
debug('Other error, bailing: ' + err.message);
|
||||
// Otherwise we bail
|
||||
return bail(err);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
},
|
||||
{
|
||||
retries: 3,
|
||||
factor: 2,
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
debug('Starting upload');
|
||||
|
||||
while (Object.keys(uploadList).length > 0) {
|
||||
try {
|
||||
const event = await Promise.race(Object.keys(uploadList).map((key): Promise<any> => uploadList[key]))
|
||||
const event = await Promise.race(
|
||||
Object.keys(uploadList).map((key): Promise<any> => uploadList[key])
|
||||
);
|
||||
|
||||
delete uploadList[event.payload.sha]
|
||||
yield event
|
||||
delete uploadList[event.payload.sha];
|
||||
yield event;
|
||||
} catch (e) {
|
||||
return yield { type: 'error', payload: e }
|
||||
return yield { type: 'error', payload: e };
|
||||
}
|
||||
}
|
||||
|
||||
yield { type: 'all-files-uploaded', payload: files }
|
||||
debug('All files uploaded');
|
||||
yield { type: 'all-files-uploaded', payload: files };
|
||||
|
||||
try {
|
||||
for await(const event of deploy(files, options)) {
|
||||
debug('Starting deployment creation');
|
||||
for await (const event of deploy(files, options)) {
|
||||
if (event.type === 'ready') {
|
||||
return yield event
|
||||
debug('Deployment is ready');
|
||||
return yield event;
|
||||
}
|
||||
|
||||
yield event
|
||||
yield event;
|
||||
}
|
||||
} catch (e) {
|
||||
yield { type: 'error', payload: e }
|
||||
debug('An unexpected error occurred when starting deployment creation');
|
||||
yield { type: 'error', payload: e };
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { createHash } from 'crypto'
|
||||
import fs from 'fs-extra'
|
||||
import { createHash } from 'crypto';
|
||||
import fs from 'fs-extra';
|
||||
import { Sema } from 'async-sema';
|
||||
|
||||
export interface DeploymentFile {
|
||||
names: string[];
|
||||
@@ -15,7 +16,7 @@ export interface DeploymentFile {
|
||||
function hash(buf: Buffer): string {
|
||||
return createHash('sha1')
|
||||
.update(buf)
|
||||
.digest('hex')
|
||||
.digest('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -23,14 +24,16 @@ function hash(buf: Buffer): string {
|
||||
* @param map with hashed files
|
||||
* @return {object}
|
||||
*/
|
||||
export const mapToObject = (map: Map<string, DeploymentFile>): { [key: string]: any } => {
|
||||
const obj: { [key: string]: any } = {}
|
||||
export const mapToObject = (
|
||||
map: Map<string, DeploymentFile>
|
||||
): { [key: string]: DeploymentFile } => {
|
||||
const obj: { [key: string]: DeploymentFile } = {};
|
||||
for (const [key, value] of map) {
|
||||
obj[key] = value
|
||||
obj[key] = value;
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
return obj;
|
||||
};
|
||||
|
||||
/**
|
||||
* Computes hashes for the contents of each file given.
|
||||
@@ -39,25 +42,29 @@ export const mapToObject = (map: Map<string, DeploymentFile>): { [key: string]:
|
||||
* @return {Map}
|
||||
*/
|
||||
async function hashes(files: string[]): Promise<Map<string, DeploymentFile>> {
|
||||
const map = new Map()
|
||||
const map = new Map();
|
||||
const semaphore = new Sema(100);
|
||||
|
||||
await Promise.all(
|
||||
files.map(
|
||||
async (name: string): Promise<void> => {
|
||||
const data = await fs.readFile(name)
|
||||
await semaphore.acquire();
|
||||
const data = await fs.readFile(name);
|
||||
|
||||
const h = hash(data)
|
||||
const entry = map.get(h)
|
||||
const h = hash(data);
|
||||
const entry = map.get(h);
|
||||
|
||||
if (entry) {
|
||||
entry.names.push(name)
|
||||
entry.names.push(name);
|
||||
} else {
|
||||
map.set(h, { names: [name], data })
|
||||
map.set(h, { names: [name], data });
|
||||
}
|
||||
},
|
||||
),
|
||||
)
|
||||
return map
|
||||
|
||||
semaphore.release();
|
||||
}
|
||||
)
|
||||
);
|
||||
return map;
|
||||
}
|
||||
|
||||
export default hashes
|
||||
export default hashes;
|
||||
|
||||
@@ -1,52 +1,63 @@
|
||||
import { DeploymentFile } from './hashes'
|
||||
import { parse as parseUrl } from 'url'
|
||||
import { fetch as fetch_ } from 'fetch-h2'
|
||||
import { readFile } from 'fs-extra'
|
||||
import { join } from 'path'
|
||||
import qs from 'querystring'
|
||||
import pkg from '../../package.json'
|
||||
import { Options } from '../deploy'
|
||||
import { DeploymentFile } from './hashes';
|
||||
import { parse as parseUrl } from 'url';
|
||||
import fetch_ from 'node-fetch';
|
||||
import { join, sep } from 'path';
|
||||
import qs from 'querystring';
|
||||
import ignore from 'ignore';
|
||||
import pkg from '../../package.json';
|
||||
import { Options } from '../deploy';
|
||||
import { NowJsonOptions } from '../types';
|
||||
import { Sema } from 'async-sema';
|
||||
import { readFile } from 'fs-extra';
|
||||
const semaphore = new Sema(10);
|
||||
|
||||
export const API_FILES = 'https://api.zeit.co/v2/now/files'
|
||||
export const API_DEPLOYMENTS = 'https://api.zeit.co/v9/now/deployments'
|
||||
export const API_DEPLOYMENTS_LEGACY = 'https://api.zeit.co/v3/now/deployments'
|
||||
export const API_FILES = 'https://api.zeit.co/v2/now/files';
|
||||
export const API_DEPLOYMENTS = 'https://api.zeit.co/v9/now/deployments';
|
||||
export const API_DEPLOYMENTS_LEGACY = 'https://api.zeit.co/v3/now/deployments';
|
||||
export const API_DELETE_DEPLOYMENTS_LEGACY =
|
||||
'https://api.zeit.co/v2/now/deployments'
|
||||
'https://api.zeit.co/v2/now/deployments';
|
||||
|
||||
export const EVENTS = new Set([
|
||||
// File events
|
||||
'hashes-calculated',
|
||||
'file_count',
|
||||
'file-uploaded',
|
||||
'all-files-uploaded',
|
||||
// Deployment events
|
||||
'created',
|
||||
'ready',
|
||||
'warning',
|
||||
'error',
|
||||
// Build events
|
||||
'build-state-changed'
|
||||
])
|
||||
'build-state-changed',
|
||||
]);
|
||||
|
||||
export function parseNowJSON(file?: DeploymentFile): NowJsonOptions {
|
||||
if (!file) {
|
||||
return {}
|
||||
export async function parseNowJSON(filePath?: string): Promise<NowJsonOptions> {
|
||||
if (!filePath) {
|
||||
return {};
|
||||
}
|
||||
|
||||
try {
|
||||
const jsonString = file.data.toString()
|
||||
const jsonString = await readFile(filePath, 'utf8');
|
||||
|
||||
return JSON.parse(jsonString)
|
||||
return JSON.parse(jsonString);
|
||||
} catch (e) {
|
||||
// eslint-disable-next-line no-console
|
||||
console.error(e)
|
||||
console.error(e);
|
||||
|
||||
return {}
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
export async function getNowIgnore(
|
||||
files: string[],
|
||||
path: string | string[]
|
||||
): Promise<string[]> {
|
||||
const maybeRead = async function<T>(path: string, default_: T) {
|
||||
try {
|
||||
return await readFile(path, 'utf8');
|
||||
} catch (err) {
|
||||
return default_;
|
||||
}
|
||||
};
|
||||
|
||||
export async function getNowIgnore(path: string | string[]): Promise<any> {
|
||||
let ignores: string[] = [
|
||||
'.hg',
|
||||
'.git',
|
||||
@@ -70,55 +81,57 @@ export async function getNowIgnore(
|
||||
'node_modules',
|
||||
'__pycache__/',
|
||||
'venv/',
|
||||
'CVS'
|
||||
]
|
||||
'CVS',
|
||||
];
|
||||
|
||||
await Promise.all(
|
||||
files.map(
|
||||
async (file: string): Promise<void> => {
|
||||
if (file.includes('.nowignore')) {
|
||||
const filePath = Array.isArray(path)
|
||||
? file
|
||||
: file.includes(path)
|
||||
? file
|
||||
: join(path, file)
|
||||
const nowIgnore = await readFile(filePath)
|
||||
const nowIgnore = Array.isArray(path)
|
||||
? await maybeRead(
|
||||
join(
|
||||
path.find(fileName => fileName.includes('.nowignore'), '') || '',
|
||||
'.nowignore'
|
||||
),
|
||||
''
|
||||
)
|
||||
: await maybeRead(join(path, '.nowignore'), '');
|
||||
|
||||
nowIgnore
|
||||
.toString()
|
||||
.split('\n')
|
||||
.filter((s: string): boolean => s.length > 0)
|
||||
.forEach((entry: string): number => ignores.push(entry))
|
||||
}
|
||||
}
|
||||
)
|
||||
)
|
||||
const ig = ignore().add(`${ignores.join('\n')}\n${nowIgnore}`);
|
||||
|
||||
return ignores
|
||||
return { ig, ignores };
|
||||
}
|
||||
|
||||
export const fetch = (
|
||||
export const fetch = async (
|
||||
url: string,
|
||||
token: string,
|
||||
opts: any = {}
|
||||
opts: any = {},
|
||||
debugEnabled?: boolean
|
||||
): Promise<any> => {
|
||||
if (opts.teamId) {
|
||||
const parsedUrl = parseUrl(url, true)
|
||||
const query = parsedUrl.query
|
||||
semaphore.acquire();
|
||||
const debug = createDebug(debugEnabled);
|
||||
let time: number;
|
||||
|
||||
query.teamId = opts.teamId
|
||||
url = `${parsedUrl.href}?${qs.encode(query)}`
|
||||
delete opts.teamId
|
||||
if (opts.teamId) {
|
||||
const parsedUrl = parseUrl(url, true);
|
||||
const query = parsedUrl.query;
|
||||
|
||||
query.teamId = opts.teamId;
|
||||
url = `${parsedUrl.href}?${qs.encode(query)}`;
|
||||
delete opts.teamId;
|
||||
}
|
||||
|
||||
opts.headers = opts.headers || {}
|
||||
opts.headers = opts.headers || {};
|
||||
// @ts-ignore
|
||||
opts.headers.authorization = `Bearer ${token}`
|
||||
opts.headers.Authorization = `Bearer ${token}`;
|
||||
// @ts-ignore
|
||||
opts.headers['user-agent'] = `now-client-v${pkg.version}`
|
||||
opts.headers['user-agent'] = `now-client-v${pkg.version}`;
|
||||
|
||||
return fetch_(url, opts)
|
||||
}
|
||||
debug(`${opts.method || 'GET'} ${url}`);
|
||||
time = Date.now();
|
||||
const res = await fetch_(url, opts);
|
||||
debug(`DONE in ${Date.now() - time}ms: ${opts.method || 'GET'} ${url}`);
|
||||
semaphore.release();
|
||||
|
||||
return res;
|
||||
};
|
||||
|
||||
export interface PreparedFile {
|
||||
file: string;
|
||||
@@ -126,39 +139,58 @@ export interface PreparedFile {
|
||||
size: number;
|
||||
}
|
||||
|
||||
const isWin = process.platform.includes('win');
|
||||
|
||||
export const prepareFiles = (
|
||||
files: Map<string, DeploymentFile>,
|
||||
options: Options
|
||||
): PreparedFile[] => {
|
||||
const preparedFiles = [...files.keys()].reduce(
|
||||
(acc: PreparedFile[], sha: string): PreparedFile[] => {
|
||||
const next = [...acc]
|
||||
const next = [...acc];
|
||||
|
||||
const file = files.get(sha) as DeploymentFile
|
||||
const file = files.get(sha) as DeploymentFile;
|
||||
|
||||
for (const name of file.names) {
|
||||
let fileName
|
||||
let fileName;
|
||||
|
||||
if (options.isDirectory) {
|
||||
// Directory
|
||||
fileName = options.path ? name.replace(`${options.path}/`, '') : name
|
||||
fileName = options.path
|
||||
? name.substring(options.path.length + 1)
|
||||
: name;
|
||||
} else {
|
||||
// Array of files or single file
|
||||
const segments = name.split('/')
|
||||
fileName = segments[segments.length - 1]
|
||||
const segments = name.split(sep);
|
||||
fileName = segments[segments.length - 1];
|
||||
}
|
||||
|
||||
next.push({
|
||||
file: fileName,
|
||||
file: isWin ? fileName.replace(/\\/g, '/') : fileName,
|
||||
size: file.data.byteLength || file.data.length,
|
||||
sha
|
||||
})
|
||||
sha,
|
||||
});
|
||||
}
|
||||
|
||||
return next
|
||||
return next;
|
||||
},
|
||||
[]
|
||||
)
|
||||
);
|
||||
|
||||
return preparedFiles
|
||||
return preparedFiles;
|
||||
};
|
||||
|
||||
export function createDebug(debug?: boolean) {
|
||||
const isDebug = debug || process.env.NOW_CLIENT_DEBUG;
|
||||
|
||||
if (isDebug) {
|
||||
return (...logs: string[]) => {
|
||||
process.stderr.write(
|
||||
[`[now-client-debug] ${new Date().toISOString()}`, ...logs].join(' ') +
|
||||
'\n'
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
return () => {};
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Options } from "../deploy"
|
||||
import { Options } from '../deploy';
|
||||
|
||||
export const generateQueryString = (options: Options): string => {
|
||||
if (options.force && options.teamId) {
|
||||
return `?teamId=${options.teamId}&forceNew=1`
|
||||
return `?teamId=${options.teamId}&forceNew=1`;
|
||||
} else if (options.teamId) {
|
||||
return `?teamId=${options.teamId}`
|
||||
return `?teamId=${options.teamId}`;
|
||||
} else if (options.force) {
|
||||
return `?forceNew=1`
|
||||
return `?forceNew=1`;
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
return '';
|
||||
};
|
||||
|
||||
@@ -1,3 +1,16 @@
|
||||
export const isReady = ({ readyState, state }: Deployment | DeploymentBuild): boolean => readyState === 'READY' || state === 'READY'
|
||||
export const isFailed = ({ readyState, state }: Deployment | DeploymentBuild): boolean => readyState ? (readyState.endsWith('_ERROR') || readyState === 'ERROR') : (state && state.endsWith('_ERROR') || state === 'ERROR')
|
||||
export const isDone = (buildOrDeployment: Deployment | DeploymentBuild): boolean => isReady(buildOrDeployment) || isFailed(buildOrDeployment)
|
||||
import { Deployment, DeploymentBuild } from '../types';
|
||||
export const isReady = ({
|
||||
readyState,
|
||||
state,
|
||||
}: Deployment | DeploymentBuild): boolean =>
|
||||
readyState === 'READY' || state === 'READY';
|
||||
export const isFailed = ({
|
||||
readyState,
|
||||
state,
|
||||
}: Deployment | DeploymentBuild): boolean =>
|
||||
readyState
|
||||
? readyState.endsWith('_ERROR') || readyState === 'ERROR'
|
||||
: (state && state.endsWith('_ERROR')) || state === 'ERROR';
|
||||
export const isDone = (
|
||||
buildOrDeployment: Deployment | DeploymentBuild
|
||||
): boolean => isReady(buildOrDeployment) || isFailed(buildOrDeployment);
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import path from 'path'
|
||||
import { TOKEN } from './constants'
|
||||
import { fetch, API_DEPLOYMENTS } from '../src/utils'
|
||||
import { Deployment } from './types'
|
||||
import { createDeployment } from '../src/index'
|
||||
import path from 'path';
|
||||
import { TOKEN } from './constants';
|
||||
import { fetch, API_DEPLOYMENTS } from '../src/utils';
|
||||
import { Deployment } from './types';
|
||||
import { createDeployment } from '../src/index';
|
||||
|
||||
describe('create v2 deployment', () => {
|
||||
let deployment: Deployment
|
||||
let deployment: Deployment;
|
||||
|
||||
afterEach(async () => {
|
||||
if (deployment) {
|
||||
@@ -15,10 +15,48 @@ describe('create v2 deployment', () => {
|
||||
{
|
||||
method: 'DELETE'
|
||||
}
|
||||
)
|
||||
expect(response.status).toEqual(200)
|
||||
);
|
||||
expect(response.status).toEqual(200);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
it('will display an empty deployment warning', async () => {
|
||||
for await (const event of createDeployment(
|
||||
path.resolve(__dirname, 'fixtures', 'v2'),
|
||||
{
|
||||
token: TOKEN,
|
||||
name: 'now-client-tests-v2'
|
||||
}
|
||||
)) {
|
||||
if (event.type === 'warning') {
|
||||
expect(event.payload).toEqual('READY');
|
||||
}
|
||||
|
||||
if (event.type === 'ready') {
|
||||
deployment = event.payload;
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('will report correct file count event', async () => {
|
||||
for await (const event of createDeployment(
|
||||
path.resolve(__dirname, 'fixtures', 'v2'),
|
||||
{
|
||||
token: TOKEN,
|
||||
name: 'now-client-tests-v2'
|
||||
}
|
||||
)) {
|
||||
if (event.type === 'file_count') {
|
||||
expect(event.payload.total).toEqual(0);
|
||||
}
|
||||
|
||||
if (event.type === 'ready') {
|
||||
deployment = event.payload;
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
it('will create a v2 deployment', async () => {
|
||||
for await (const event of createDeployment(
|
||||
@@ -29,10 +67,10 @@ describe('create v2 deployment', () => {
|
||||
}
|
||||
)) {
|
||||
if (event.type === 'ready') {
|
||||
deployment = event.payload
|
||||
expect(deployment.readyState).toEqual('READY')
|
||||
break
|
||||
deployment = event.payload;
|
||||
expect(deployment.readyState).toEqual('READY');
|
||||
break;
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"lib": ["esnext", "dom"],
|
||||
"module": "CommonJS",
|
||||
"moduleResolution": "node",
|
||||
"outDir": "./lib",
|
||||
"outDir": "./dist",
|
||||
"resolveJsonModule": true,
|
||||
"strictNullChecks": true,
|
||||
"noImplicitAny": true,
|
||||
@@ -14,5 +14,6 @@
|
||||
"strict": true,
|
||||
"target": "ES2015",
|
||||
"downlevelIteration": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"include": ["./src", "./types"]
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
BuildOptions,
|
||||
shouldServe,
|
||||
Files,
|
||||
debug
|
||||
debug,
|
||||
} from '@now/build-utils';
|
||||
|
||||
import { createGo, getAnalyzedEntrypoint } from './go-helpers';
|
||||
@@ -38,7 +38,7 @@ async function initPrivateGit(credentials: string) {
|
||||
'config',
|
||||
'--global',
|
||||
'credential.helper',
|
||||
`store --file ${join(homedir(), '.git-credentials')}`
|
||||
`store --file ${join(homedir(), '.git-credentials')}`,
|
||||
]);
|
||||
|
||||
await writeFile(join(homedir(), '.git-credentials'), credentials);
|
||||
@@ -51,7 +51,7 @@ export async function build({
|
||||
entrypoint,
|
||||
config,
|
||||
workPath,
|
||||
meta = {} as BuildParamsMeta
|
||||
meta = {} as BuildParamsMeta,
|
||||
}: BuildParamsType) {
|
||||
if (process.env.GIT_CREDENTIALS && !meta.isDev) {
|
||||
debug('Initialize Git credentials...');
|
||||
@@ -76,7 +76,7 @@ Learn more: https://github.com/golang/go/wiki/Modules
|
||||
// eslint-disable-next-line prefer-const
|
||||
let [goPath, outDir] = await Promise.all([
|
||||
getWriteableDirectory(),
|
||||
getWriteableDirectory()
|
||||
getWriteableDirectory(),
|
||||
]);
|
||||
|
||||
const srcPath = join(goPath, 'src', 'lambda');
|
||||
@@ -194,7 +194,7 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
true
|
||||
);
|
||||
@@ -204,7 +204,7 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
|
||||
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
||||
} catch (err) {
|
||||
console.log(`failed to create default go.mod for ${packageName}`);
|
||||
console.log(`Failed to create default go.mod for ${packageName}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
@@ -277,11 +277,11 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
!isGoModExist
|
||||
) {
|
||||
await move(downloadedFiles[entrypoint].fsPath, finalDestination, {
|
||||
overwrite: forceMove
|
||||
overwrite: forceMove,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
console.log('failed to move entry to package folder');
|
||||
console.log('Failed to move entry to package folder');
|
||||
throw err;
|
||||
}
|
||||
|
||||
@@ -354,7 +354,7 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
false
|
||||
);
|
||||
@@ -381,7 +381,7 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
console.log('Failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
@@ -390,7 +390,7 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build(src, destPath);
|
||||
} catch (err) {
|
||||
@@ -403,10 +403,10 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
files: { ...(await glob('**', outDir)), ...includedFiles },
|
||||
handler: 'handler',
|
||||
runtime: 'go1.x',
|
||||
environment: {}
|
||||
environment: {},
|
||||
});
|
||||
const output = {
|
||||
[entrypoint]: lambda
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
|
||||
const watch = parsedAnalyzed.watch;
|
||||
@@ -420,7 +420,7 @@ Learn more: https://zeit.co/docs/v2/advanced/builders/#go
|
||||
|
||||
return {
|
||||
output,
|
||||
watch: watch.concat(watchSub)
|
||||
watch: watch.concat(watchSub),
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.5.12",
|
||||
"version": "0.6.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://zeit.co/docs/v2/deployments/official-builders/go-now-go",
|
||||
|
||||
5
packages/now-go/test/fixtures/16-custom-flag/custom/first.go
vendored
Normal file
5
packages/now-go/test/fixtures/16-custom-flag/custom/first.go
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
// +build first
|
||||
|
||||
package custom
|
||||
|
||||
const Random = "first:RANDOMNESS_PLACEHOLDER"
|
||||
5
packages/now-go/test/fixtures/16-custom-flag/custom/second.go
vendored
Normal file
5
packages/now-go/test/fixtures/16-custom-flag/custom/second.go
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
// +build second
|
||||
|
||||
package custom
|
||||
|
||||
const Random = "second:RANDOMNESS_PLACEHOLDER"
|
||||
3
packages/now-go/test/fixtures/16-custom-flag/go.mod
vendored
Normal file
3
packages/now-go/test/fixtures/16-custom-flag/go.mod
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module custom-flag
|
||||
|
||||
go 1.12
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user