mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-23 09:59:12 +00:00
Compare commits
241 Commits
@now/ruby@
...
@now/pytho
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b7f8f7c366 | ||
|
|
9adbb10fca | ||
|
|
6646b97a65 | ||
|
|
939c3a11d7 | ||
|
|
363c3ea39f | ||
|
|
935fca6986 | ||
|
|
7e20c585a1 | ||
|
|
7ecdfbb043 | ||
|
|
2a39f61f34 | ||
|
|
6e0182af02 | ||
|
|
1fd630460c | ||
|
|
4583880377 | ||
|
|
abacaf8d40 | ||
|
|
de8829ccfe | ||
|
|
ba18a3a0cb | ||
|
|
0a0f13994f | ||
|
|
eff29463e7 | ||
|
|
9c62b74122 | ||
|
|
5f19e63409 | ||
|
|
1fd4c3a278 | ||
|
|
281b385188 | ||
|
|
4e4b672af9 | ||
|
|
b70ac1ca1c | ||
|
|
36fc64d0ed | ||
|
|
2dee810e74 | ||
|
|
4fa468299f | ||
|
|
90eac2cf54 | ||
|
|
63ad08074c | ||
|
|
31fcc56dbd | ||
|
|
958b067303 | ||
|
|
6af183c7f1 | ||
|
|
92d9f2d809 | ||
|
|
4d7aaff8d0 | ||
|
|
68b19c8122 | ||
|
|
5b600fea44 | ||
|
|
2c867295e6 | ||
|
|
996c0ffffc | ||
|
|
2344357113 | ||
|
|
e919c591fe | ||
|
|
88516137de | ||
|
|
f515138074 | ||
|
|
6d04016054 | ||
|
|
eb968d813c | ||
|
|
37881e0830 | ||
|
|
e3dd13ba9e | ||
|
|
9054b7f2f5 | ||
|
|
17cd17ac96 | ||
|
|
d7c2d071c1 | ||
|
|
483a117a6a | ||
|
|
3c630343d0 | ||
|
|
fe61b31197 | ||
|
|
af14d0af4b | ||
|
|
e04e6bb188 | ||
|
|
e36f3d355e | ||
|
|
24891e3ce8 | ||
|
|
0d29e37dcb | ||
|
|
089b29c2ca | ||
|
|
af513c249c | ||
|
|
6151f11657 | ||
|
|
c0d3847aab | ||
|
|
c78af57d76 | ||
|
|
172605d77d | ||
|
|
d7dc5a0c45 | ||
|
|
75edd0bab5 | ||
|
|
ac60b3660c | ||
|
|
6e22c07ecc | ||
|
|
857e4ef874 | ||
|
|
aa03f69bdc | ||
|
|
8fc77139e0 | ||
|
|
87802cb002 | ||
|
|
dcd57e148f | ||
|
|
18fa193a17 | ||
|
|
34dce350be | ||
|
|
c7caa7b905 | ||
|
|
7b52b26ff0 | ||
|
|
3c958a4429 | ||
|
|
d8660b1db3 | ||
|
|
0996640798 | ||
|
|
079f4ba6bf | ||
|
|
bdf78efbfb | ||
|
|
07ba180ede | ||
|
|
94a2b017d4 | ||
|
|
37c06c89d0 | ||
|
|
42ffc2dafb | ||
|
|
cf65fabe27 | ||
|
|
dc9b9c240a | ||
|
|
84d8f73a2d | ||
|
|
b24cfb605b | ||
|
|
57f66fe62f | ||
|
|
d2cd4a3c5a | ||
|
|
110e1445fa | ||
|
|
f9355cbc77 | ||
|
|
0a9bbd902f | ||
|
|
c8c960f785 | ||
|
|
fb0f4beca6 | ||
|
|
21398d7214 | ||
|
|
a5a56836a8 | ||
|
|
e7fd1e3c3f | ||
|
|
64b24bde56 | ||
|
|
613958d6a5 | ||
|
|
21e4ebc7a9 | ||
|
|
16ef6229e4 | ||
|
|
9ecb011732 | ||
|
|
de48e28fa1 | ||
|
|
3aecb0905a | ||
|
|
c1b2da1d57 | ||
|
|
c4bee64abd | ||
|
|
dbf9c5c46b | ||
|
|
630ec06d48 | ||
|
|
d38e464bfe | ||
|
|
49bd2439a7 | ||
|
|
e97314a3c7 | ||
|
|
acb46cef14 | ||
|
|
06b9ff233e | ||
|
|
a189e72fbe | ||
|
|
75e6b15199 | ||
|
|
85170d7231 | ||
|
|
eeffb55021 | ||
|
|
1fe8317f1e | ||
|
|
6398ef194c | ||
|
|
e4d691eda1 | ||
|
|
d9bb6c8a54 | ||
|
|
c3020e3071 | ||
|
|
e3f31e3e52 | ||
|
|
8c40e1be0f | ||
|
|
b278c7148b | ||
|
|
2791338e04 | ||
|
|
0af87dc2be | ||
|
|
32134dd1f5 | ||
|
|
bdd4441d5c | ||
|
|
99bc1ae7b6 | ||
|
|
32aa94bf3d | ||
|
|
3201227430 | ||
|
|
09a421c23b | ||
|
|
73ce59b492 | ||
|
|
7911d5857d | ||
|
|
05a44a0c70 | ||
|
|
efd8863f8b | ||
|
|
6699028342 | ||
|
|
aa9a234a0b | ||
|
|
91f836818c | ||
|
|
5e37bdc54c | ||
|
|
31c48df795 | ||
|
|
0383b9112b | ||
|
|
f4fe8be4df | ||
|
|
cc643e373d | ||
|
|
651429cb52 | ||
|
|
a374a5ce96 | ||
|
|
bbb4501e9b | ||
|
|
947f9093be | ||
|
|
42f16b6d1e | ||
|
|
9ae747a612 | ||
|
|
9217c5e436 | ||
|
|
d23e7b1054 | ||
|
|
d23eab61cf | ||
|
|
e631de4cfe | ||
|
|
0814bef36f | ||
|
|
ddce65416c | ||
|
|
9e66f9bb08 | ||
|
|
961fbfde55 | ||
|
|
22550c0c48 | ||
|
|
4336f48d22 | ||
|
|
cf299562e3 | ||
|
|
2cd5b35603 | ||
|
|
81abab81f7 | ||
|
|
ad0d7bd560 | ||
|
|
140d10e87b | ||
|
|
6bebc49607 | ||
|
|
a3aa855290 | ||
|
|
a853cb84cd | ||
|
|
a07082ca5c | ||
|
|
81c27771bc | ||
|
|
502f78e835 | ||
|
|
d8935cf121 | ||
|
|
e3c61ac5b7 | ||
|
|
e8e95b8df6 | ||
|
|
f55be4f2de | ||
|
|
ef081cc4b8 | ||
|
|
8e2444d3cd | ||
|
|
f26407e282 | ||
|
|
cded895bf7 | ||
|
|
1211ee4fb3 | ||
|
|
a42bdefe1a | ||
|
|
f025f1007b | ||
|
|
1c11a57371 | ||
|
|
db2d033484 | ||
|
|
40537c9eba | ||
|
|
5cabcb7a27 | ||
|
|
1be9ac59f0 | ||
|
|
64356baed3 | ||
|
|
c944706a0f | ||
|
|
f15deaa51e | ||
|
|
61c3b94460 | ||
|
|
89b018240f | ||
|
|
0124dc9969 | ||
|
|
936441c8a6 | ||
|
|
c80570e096 | ||
|
|
11c67e1c4c | ||
|
|
0d1c0e0f30 | ||
|
|
f50572813e | ||
|
|
648b3a4ae2 | ||
|
|
efe114fa86 | ||
|
|
4445d5e00f | ||
|
|
96b3c1ee7f | ||
|
|
e1a770ac29 | ||
|
|
db1a2e6482 | ||
|
|
ecdde7c367 | ||
|
|
4d31291ea4 | ||
|
|
b70bd670fd | ||
|
|
d3017649e0 | ||
|
|
e970584219 | ||
|
|
c6205293a9 | ||
|
|
cb47f7bf4b | ||
|
|
a60f107e4b | ||
|
|
6e43c322cc | ||
|
|
4b6387bdb5 | ||
|
|
bcd770c0fe | ||
|
|
bb8c7e9f3b | ||
|
|
6e469272c3 | ||
|
|
5e7fa71148 | ||
|
|
12d9a4b4f4 | ||
|
|
084ab2d0e5 | ||
|
|
4ca0d936aa | ||
|
|
5d5a55e7e1 | ||
|
|
e033e5e0ca | ||
|
|
7cb36ef1dd | ||
|
|
9aafb168e9 | ||
|
|
fbb5caf955 | ||
|
|
3f93da550b | ||
|
|
a194e0cc6e | ||
|
|
b007200bcf | ||
|
|
05d88da887 | ||
|
|
0a429bb2f3 | ||
|
|
468ccb7598 | ||
|
|
e47f6f55dc | ||
|
|
17deed91b2 | ||
|
|
cc0bd9f0a2 | ||
|
|
f6f99fef25 | ||
|
|
eb252edc7f | ||
|
|
f3213dbcdc | ||
|
|
c637da7710 |
@@ -1,6 +1,5 @@
|
||||
version: 2
|
||||
jobs:
|
||||
|
||||
install:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
@@ -11,9 +10,9 @@ jobs:
|
||||
- checkout
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-dependencies-{{ checksum "yarn.lock" }}
|
||||
# fallback to using the latest cache if no exact match is found
|
||||
- v1-dependencies-
|
||||
- v1-dependencies-{{ checksum "yarn.lock" }}
|
||||
# fallback to using the latest cache if no exact match is found
|
||||
- v1-dependencies-
|
||||
- run:
|
||||
name: Updating apt packages
|
||||
command: sudo apt-get update
|
||||
@@ -26,6 +25,7 @@ jobs:
|
||||
- save_cache:
|
||||
paths:
|
||||
- node_modules
|
||||
- packages/gatsby-plugin-now/node_modules
|
||||
- packages/now-build-utils/node_modules
|
||||
- packages/now-cgi/node_modules
|
||||
- packages/now-cli/node_modules
|
||||
@@ -43,6 +43,7 @@ jobs:
|
||||
root: .
|
||||
paths:
|
||||
- node_modules
|
||||
- packages/gatsby-plugin-now/node_modules
|
||||
- packages/now-build-utils/node_modules
|
||||
- packages/now-cgi/node_modules
|
||||
- packages/now-cli/node_modules
|
||||
@@ -67,6 +68,9 @@ jobs:
|
||||
command: sudo apt install -y rsync
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Linking dependencies
|
||||
command: yarn bootstrap
|
||||
- run:
|
||||
name: Building
|
||||
command: yarn build
|
||||
@@ -75,6 +79,7 @@ jobs:
|
||||
- persist_to_workspace:
|
||||
root: .
|
||||
paths:
|
||||
- packages/gatsby-plugin-now/test/fixtures
|
||||
- packages/now-build-utils/dist
|
||||
- packages/now-cgi/dist
|
||||
- packages/now-cli/dist
|
||||
@@ -91,6 +96,7 @@ jobs:
|
||||
- packages/now-routing-utils/dist
|
||||
- packages/now-ruby/dist
|
||||
- packages/now-static-build/dist
|
||||
- packages/now-static-build/test/fixtures/10a-gatsby-redirects/plugins
|
||||
|
||||
test-lint:
|
||||
docker:
|
||||
@@ -107,36 +113,14 @@ jobs:
|
||||
name: Linting Code
|
||||
command: yarn test-lint
|
||||
|
||||
# test-unit:
|
||||
# docker:
|
||||
# - image: circleci/node:10
|
||||
# working_directory: ~/repo
|
||||
# steps:
|
||||
# - checkout
|
||||
# - attach_workspace:
|
||||
# at: .
|
||||
# - run:
|
||||
# name: Compiling `now dev` HTML error templates
|
||||
# command: node packages/now-cli/scripts/compile-templates.js
|
||||
# - run:
|
||||
# name: Running Unit Tests
|
||||
# command: yarn test-unit --clean false
|
||||
# - persist_to_workspace:
|
||||
# root: .
|
||||
# paths:
|
||||
# - packages/now-cli/.nyc_output
|
||||
|
||||
test-integration-macos-node-8:
|
||||
macos:
|
||||
xcode: '9.2.0'
|
||||
xcode: '9.0.1'
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Update Node.js
|
||||
command: curl -sfLS install-node.now.sh/8.11 | sh -s -- --yes
|
||||
- run:
|
||||
name: Output version
|
||||
command: node --version
|
||||
@@ -221,15 +205,12 @@ jobs:
|
||||
|
||||
test-integration-macos-now-dev-node-8:
|
||||
macos:
|
||||
xcode: '9.2.0'
|
||||
xcode: '9.0.1'
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Update Node.js
|
||||
command: curl -sfLS install-node.now.sh/8.11 | sh -s -- --yes
|
||||
- run:
|
||||
name: Output version
|
||||
command: node --version
|
||||
@@ -345,6 +326,24 @@ jobs:
|
||||
name: Running Integration Tests Once
|
||||
command: yarn test-integration-once --clean false
|
||||
|
||||
test-unit:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Compiling `now dev` HTML error templates
|
||||
command: node packages/now-cli/scripts/compile-templates.js
|
||||
- run:
|
||||
name: Output version
|
||||
command: node --version
|
||||
- run:
|
||||
name: Running Unit Tests
|
||||
command: yarn test-unit --clean false
|
||||
|
||||
coverage:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
@@ -384,36 +383,6 @@ jobs:
|
||||
name: Finalize Sentry Release
|
||||
command: sentry-cli releases finalize now-cli@`git describe --tags`
|
||||
|
||||
publish-stable:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Saving Authentication Information
|
||||
command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
- run:
|
||||
name: Publishing to Stable Channel
|
||||
command: npm publish --tag latest
|
||||
|
||||
publish-canary:
|
||||
docker:
|
||||
- image: circleci/node:10
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
name: Saving Authentication Information
|
||||
command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
- run:
|
||||
name: Publishing to Canary Channel
|
||||
command: npm publish --tag canary
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
unscheduled:
|
||||
@@ -434,12 +403,6 @@ workflows:
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
# - test-unit:
|
||||
# requires:
|
||||
# - build
|
||||
# filters:
|
||||
# tags:
|
||||
# only: /.*/
|
||||
- test-integration-macos-node-8:
|
||||
requires:
|
||||
- build
|
||||
@@ -515,12 +478,14 @@ workflows:
|
||||
- test-integration-once:
|
||||
requires:
|
||||
- build
|
||||
- test-unit:
|
||||
requires:
|
||||
- build
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
- coverage:
|
||||
requires:
|
||||
#- test-unit
|
||||
- test-integration-macos-node-8
|
||||
- test-integration-macos-node-10
|
||||
- test-integration-macos-node-12
|
||||
@@ -534,23 +499,8 @@ workflows:
|
||||
- test-integration-linux-now-dev-node-10
|
||||
- test-integration-linux-now-dev-node-12
|
||||
- test-integration-once
|
||||
- test-unit
|
||||
- test-lint
|
||||
filters:
|
||||
tags:
|
||||
only: /.*/
|
||||
- publish-canary:
|
||||
requires:
|
||||
- coverage
|
||||
filters:
|
||||
tags:
|
||||
only: /^.*canary.*($|\b)/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
- publish-stable:
|
||||
requires:
|
||||
- coverage
|
||||
filters:
|
||||
tags:
|
||||
only: /^(\d+\.)?(\d+\.)?(\*|\d+)$/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
node_modules
|
||||
dist
|
||||
|
||||
# gatsby-plugin-now
|
||||
packages/gatsby-plugin-now/test/fixtures
|
||||
|
||||
# now-cli
|
||||
packages/now-cli/@types
|
||||
packages/now-cli/download
|
||||
|
||||
2
.github/workflows/publish.yml
vendored
2
.github/workflows/publish.yml
vendored
@@ -9,7 +9,7 @@ on:
|
||||
- '!*'
|
||||
|
||||
jobs:
|
||||
publish:
|
||||
Publish:
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -12,6 +12,7 @@ packages/now-cli/.builders
|
||||
packages/now-cli/assets
|
||||
packages/now-cli/src/util/dev/templates/*.ts
|
||||
packages/now-cli/test/**/yarn.lock
|
||||
!packages/now-cli/test/dev/**/yarn.lock
|
||||
packages/now-cli/test/**/node_modules
|
||||
packages/now-cli/test/dev/fixtures/08-hugo/hugo
|
||||
packages/now-cli/test/dev/fixtures/**/dist
|
||||
|
||||
@@ -12,5 +12,6 @@ optimistic_updates = true
|
||||
|
||||
[merge.message]
|
||||
title = "pull_request_title"
|
||||
body = "pull_request_body"
|
||||
include_pr_number = true
|
||||
body_type = "markdown"
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
{
|
||||
"singleQuote": true
|
||||
}
|
||||
"singleQuote": true,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
# Builders Developer Reference
|
||||
# Runtime Developer Reference
|
||||
|
||||
The following page is a reference for how to create a Builder using the available Builder's API.
|
||||
The following page is a reference for how to create a Runtime using the available Runtime API.
|
||||
|
||||
A Builder is an npm module that exposes a `build` function and optionally an `analyze` function and `prepareCache` function.
|
||||
Official Builders are published to [npmjs.com](https://npmjs.com) as a package and referenced in the `use` property of the `now.json` configuration file.
|
||||
However, the `use` property will work with any [npm install argument](https://docs.npmjs.com/cli/install) such as a git repo url which is useful for testing your Builder.
|
||||
A Runtime is an npm module that exposes a `build` function and optionally an `analyze` function and `prepareCache` function.
|
||||
Official Runtimes are published to [npmjs.com](https://npmjs.com) as a package and referenced in the `use` property of the `now.json` configuration file.
|
||||
However, the `use` property will work with any [npm install argument](https://docs.npmjs.com/cli/install) such as a git repo url which is useful for testing your Runtime.
|
||||
|
||||
See the [Builders Documentation](https://zeit.co/docs/v2/advanced/builders) to view example usage.
|
||||
See the [Runtimes Documentation](https://zeit.co/docs/v2/advanced/runtimes) to view example usage.
|
||||
|
||||
## Builder Exports
|
||||
## Runtime Exports
|
||||
|
||||
### `version`
|
||||
|
||||
A **required** exported constant that decides which version of the Builder API to use.
|
||||
A **required** exported constant that decides which version of the Runtime API to use.
|
||||
|
||||
The latest and suggested version is `2`.
|
||||
|
||||
@@ -109,7 +109,7 @@ export prepareCache(options: PrepareCacheOptions) {
|
||||
|
||||
### `shouldServe`
|
||||
|
||||
An **optional** exported function that is only used by `now dev` in [Now CLI](https:///download) and indicates whether a [Builder](https://zeit.co/docs/v2/advanced/builders) wants to be responsible for building a certain request path.
|
||||
An **optional** exported function that is only used by `now dev` in [Now CLI](https:///download) and indicates whether a [Runtime](https://zeit.co/docs/v2/advanced/runtimes) wants to be responsible for building a certain request path.
|
||||
|
||||
```js
|
||||
shouldServe({
|
||||
@@ -133,7 +133,7 @@ export shouldServe(options: ShouldServeOptions) {
|
||||
|
||||
If this method is not defined, Now CLI will default to [this function](https://github.com/zeit/now/blob/52994bfe26c5f4f179bdb49783ee57ce19334631/packages/now-build-utils/src/should-serve.ts).
|
||||
|
||||
### Builder Options
|
||||
### Runtime Options
|
||||
|
||||
The exported functions [`analyze`](#analyze), [`build`](#build), and [`prepareCache`](#preparecache) receive one argument with the following properties.
|
||||
|
||||
@@ -145,78 +145,15 @@ The exported functions [`analyze`](#analyze), [`build`](#build), and [`prepareCa
|
||||
- `cachePath`: A writable temporary directory where you can build a cache for the next run. This is only passed to `prepareCache`.
|
||||
- `config`: An arbitrary object passed from by the user in the [Build definition](#defining-the-build-step) in `now.json`.
|
||||
|
||||
## Example: html-minifier
|
||||
## Examples
|
||||
|
||||
Let's walk through what it takes to create a simple builder that takes in a HTML source file and yields a minified HTML static file as its build output.
|
||||
|
||||
While this is a very simple builder, the approach demonstrated here can be used to return anything: one or more static files and/or one or more lambdas.
|
||||
|
||||
## Setting up the module
|
||||
|
||||
### Defining the analyze step
|
||||
|
||||
The `analyze` hook is optional. Its goal is to give the developer a tool to avoid wasting time _re-computing a build_ that has already occurred.
|
||||
|
||||
The return value of `analyze` is a _fingerprint_: a simple string that uniquely identifies the build process.
|
||||
|
||||
If `analyze` is not specified, its behavior is to use as the fingerprint the combined checksums of **all the files in the same directory level as the entrypoint**. This is a default that errs on making sure that we re-execute builds when files _other than the entrypoint_ (like dependencies, manifest files, etc) have changed.
|
||||
|
||||
For our `html-minify` example, we know that HTML files don't have dependencies. Therefore, our analyze step can just return the `digest` of the entrypoint.
|
||||
|
||||
Our `index.js` file looks as follows:
|
||||
|
||||
```js
|
||||
exports.analyze = function({ files, entrypoint }) {
|
||||
return files[entrypoint].digest
|
||||
}
|
||||
```
|
||||
|
||||
This means that we will only re-minify and re-create the build output _only if the file contents (and therefore its digest) change._
|
||||
|
||||
### Defining the build step
|
||||
|
||||
Your module will need some utilities to manipulate the data structures we pass you, create new ones and alter the filesystem.
|
||||
|
||||
To that end, we expose our API as part of a `@now/build-utils` package. This package is always loaded on your behalf, so make sure it's only included as `peerDependencies` in your `package.json`.
|
||||
|
||||
Builders can include dependencies of their liking:
|
||||
|
||||
```js
|
||||
const htmlMinifier = require('html-minifier')
|
||||
|
||||
exports.version = 2
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
const stream = files[entrypoint].toStream()
|
||||
const options = Object.assign({}, config || {})
|
||||
const { data } = await FileBlob.fromStream({ stream })
|
||||
const content = data.toString()
|
||||
const minified = htmlMinifier(content, options)
|
||||
const result = new FileBlob({ data: minified })
|
||||
|
||||
return {
|
||||
output: {
|
||||
[entrypoint]: result
|
||||
},
|
||||
watch: [],
|
||||
routes: {}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Defining a `prepareCache` step
|
||||
|
||||
If our builder had performed work that could be re-used in the next build invocation, we could define a `prepareCache` step.
|
||||
|
||||
In this case, there are not intermediate artifacts that we can cache, and our `analyze` step already takes care of caching the full output based on the fingerprint of the input.
|
||||
Check out our [Node.js Runtime](https://github.com/zeit/now/tree/canary/packages/now-node), [Go Runtime](https://github.com/zeit/now/tree/canary/packages/now-go), [Python Runtime](https://github.com/zeit/now/tree/canary/packages/now-python) or [Ruby Runtime](https://github.com/zeit/now/tree/canary/packages/now-ruby) for examples of how to build one.
|
||||
|
||||
## Technical Details
|
||||
|
||||
### Execution Context
|
||||
|
||||
A [Serverless Function](https://zeit.co/docs/v2/advanced/concepts/lambdas) is created where the builder logic is executed. The lambda is run using the Node.js 8 runtime. A brand new sandbox is created for each deployment, for security reasons. The sandbox is cleaned up between executions to ensure no lingering temporary files are shared from build to build.
|
||||
A [Serverless Function](https://zeit.co/docs/v2/advanced/concepts/lambdas) is created where the Runtime logic is executed. The lambda is run using the Node.js 8 runtime. A brand new sandbox is created for each deployment, for security reasons. The sandbox is cleaned up between executions to ensure no lingering temporary files are shared from build to build.
|
||||
|
||||
All the APIs you export ([`analyze`](#analyze), [`build`](#build) and [`prepareCache`](#preparecache)) are not guaranteed to be run in the same process, but the filesystem we expose (e.g.: `workPath` and the results of calling [`getWriteableDirectory`](#getWriteableDirectory) ) is retained.
|
||||
|
||||
@@ -228,15 +165,15 @@ When a new build is created, we pre-populate the `workPath` supplied to `analyze
|
||||
|
||||
The `analyze` step can modify that directory, and it will not be re-created when it's supplied to `build` and `prepareCache`.
|
||||
|
||||
To learn how the cache key is computed and invalidated, refer to the [overview](https://zeit.co/docs/v2/advanced/builders#technical-details).
|
||||
To learn how the cache key is computed and invalidated, refer to the [overview](https://zeit.co/docs/v2/advanced/runtimes#technical-details).
|
||||
|
||||
### Accessing Environment and Secrets
|
||||
|
||||
The env and secrets specified by the user as `build.env` are passed to the builder process. This means you can access user env via `process.env` in Node.js.
|
||||
The env and secrets specified by the user as `build.env` are passed to the Runtime process. This means you can access user env via `process.env` in Node.js.
|
||||
|
||||
### Utilities as peerDependencies
|
||||
|
||||
When you publish your builder to npm, make sure to not specify `@now/build-utils` (as seen below in the API definitions) as a dependency, but rather as part of `peerDependencies`.
|
||||
When you publish your Runtime to npm, make sure to not specify `@now/build-utils` (as seen below in the API definitions) as a dependency, but rather as part of `peerDependencies`.
|
||||
|
||||
## Types
|
||||
|
||||
@@ -358,7 +295,7 @@ This is an abstract enumeration type that is implemented by one of the following
|
||||
|
||||
## JavaScript API
|
||||
|
||||
The following is exposed by `@now/build-utils` to simplify the process of writing Builders, manipulating the file system, using the above types, etc.
|
||||
The following is exposed by `@now/build-utils` to simplify the process of writing Runtimes, manipulating the file system, using the above types, etc.
|
||||
|
||||
### `createLambda`
|
||||
|
||||
@@ -1,40 +0,0 @@
|
||||
# Publishing to npm
|
||||
|
||||
Always publish to the Canary Channel as soon as a PR is merged into the `canary` branch.
|
||||
|
||||
```
|
||||
yarn publish-canary
|
||||
```
|
||||
|
||||
Publish the Stable Channel weekly.
|
||||
|
||||
- Cherry pick each commit from `canary` to `master` branch
|
||||
- Verify that you are _in-sync_ with canary (with the exception of the `version` line in `package.json`)
|
||||
- Deploy the modified Builders
|
||||
|
||||
```
|
||||
# View differences excluding "Publish" commits
|
||||
git checkout canary && git pull
|
||||
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/canary.txt
|
||||
git checkout master && git pull
|
||||
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/master.txt
|
||||
diff ~/Desktop/canary.txt ~/Desktop/master.txt
|
||||
|
||||
# Cherry pick all PRs from canary into master ...
|
||||
git cherry-pick <PR501_COMMIT_SHA>
|
||||
git cherry-pick <PR502_COMMIT_SHA>
|
||||
git cherry-pick <PR503_COMMIT_SHA>
|
||||
git cherry-pick <PR504_COMMIT_SHA>
|
||||
|
||||
# Verify the only difference is "version" in package.json
|
||||
git diff origin/canary
|
||||
|
||||
# Ship it
|
||||
yarn publish-stable
|
||||
```
|
||||
|
||||
After running this publish step, GitHub Actions will take care of publishing the modified Builder packages to npm.
|
||||
|
||||
If for some reason GitHub Actions fails to publish the npm package, you may do so
|
||||
manually by running `npm publish` from the package directory. Make sure to
|
||||
use `npm publish --tag canary` if you are publishing a canary release!
|
||||
@@ -1,9 +1,9 @@
|
||||

|
||||
|
||||
[](https://circleci.com/gh/zeit/workflows/now)
|
||||
[](https://circleci.com/gh/zeit/workflows/now/tree/master)
|
||||
[](https://spectrum.chat/zeit)
|
||||
|
||||
**Note**: The [canary](https://github.com/zeit/now/tree/canary) branch is under heavy development – the stable release branch is [master](https://github.com/zeit/now/tree/master).
|
||||
**NOTE**: The [canary](https://github.com/zeit/now/tree/canary) branch is under heavy development – the stable release branch is [master](https://github.com/zeit/now/tree/master).
|
||||
|
||||
## Usage
|
||||
|
||||
|
||||
18
changelog.js
Normal file
18
changelog.js
Normal file
@@ -0,0 +1,18 @@
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
const commit = execSync('git log --pretty=format:"%s %H"')
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n')
|
||||
.find(line => line.startsWith('Publish '))
|
||||
.split(' ')
|
||||
.pop();
|
||||
|
||||
if (!commit) {
|
||||
throw new Error('Unable to find last publish commit');
|
||||
}
|
||||
|
||||
const log = execSync(`git log --pretty=format:"- %s [%an]" ${commit}...HEAD`).toString().trim();
|
||||
|
||||
console.log(`Changes since the last publish commit ${commit}:`);
|
||||
console.log(`\n${log}\n`);
|
||||
36
diff.js
Normal file
36
diff.js
Normal file
@@ -0,0 +1,36 @@
|
||||
const { execSync } = require('child_process');
|
||||
const { join } = require('path');
|
||||
const { tmpdir } = require('os');
|
||||
const { mkdirSync, writeFileSync } = require('fs');
|
||||
|
||||
function getCommits(count) {
|
||||
return execSync('git log --pretty=format:"%s [%an]"')
|
||||
.toString()
|
||||
.trim()
|
||||
.split('\n')
|
||||
.slice(0, count)
|
||||
.filter(line => !line.startsWith('Publish '))
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
function main(count = '100') {
|
||||
console.log(`Generating diff using last ${count} commits...`);
|
||||
const randomTmpId = Math.random().toString().slice(2);
|
||||
const dir = join(tmpdir(), 'now-diff' + randomTmpId);
|
||||
mkdirSync(dir);
|
||||
|
||||
execSync('git checkout canary && git pull');
|
||||
const canary = getCommits(count);
|
||||
execSync('git checkout master && git pull');
|
||||
const master = getCommits(count);
|
||||
|
||||
writeFileSync(join(dir, 'log.txt'), '# canary\n' + canary);
|
||||
execSync('git init && git add -A && git commit -m "init"', { cwd: dir });
|
||||
writeFileSync(join(dir, 'log.txt'), '# master\n' + master);
|
||||
|
||||
console.log(`Done generating diff. Run the following:`);
|
||||
console.log(`cd ${dir}`);
|
||||
console.log('Then use `git diff` or `git difftool` to view the differences.');
|
||||
}
|
||||
|
||||
main(process.argv[2]);
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
#### Why This Error Occurred
|
||||
|
||||
The domain you supplied cannot be verified using either the intended set of nameservers of the given verification TXT record.
|
||||
The domain you supplied cannot be verified using either the intended set of nameservers or the given verification TXT record.
|
||||
|
||||
#### Possible Ways to Fix It
|
||||
|
||||
|
||||
@@ -19,6 +19,8 @@ the provided `$PORT` that the builder expects the server to bind to.
|
||||
For example, if you are using Gatsby, your `now-dev` script must use the `-p`
|
||||
(port) option to bind to the `$PORT` specified from the builder:
|
||||
|
||||
> *In Windows environments, reference the `PORT` environment variable with `%PORT%`*
|
||||
|
||||
```
|
||||
{
|
||||
...
|
||||
|
||||
@@ -33,6 +33,8 @@
|
||||
"publish-stable": "git checkout master && git pull && lerna version --exact",
|
||||
"publish-canary": "git checkout canary && git pull && lerna version prerelease --preid canary --exact",
|
||||
"publish-from-github": "./.circleci/publish.sh",
|
||||
"diff": "node diff.js",
|
||||
"changelog": "node changelog.js",
|
||||
"build": "node run.js build all",
|
||||
"test-lint": "node run.js test-lint",
|
||||
"test-unit": "node run.js test-unit",
|
||||
@@ -56,5 +58,8 @@
|
||||
"hooks": {
|
||||
"pre-commit": "lint-staged"
|
||||
}
|
||||
},
|
||||
"resolutions": {
|
||||
"signal-exit": "TooTallNate/signal-exit#update/sighub-to-sigint-on-windows"
|
||||
}
|
||||
}
|
||||
|
||||
6
packages/gatsby-plugin-now/build.sh
Executable file
6
packages/gatsby-plugin-now/build.sh
Executable file
@@ -0,0 +1,6 @@
|
||||
#!/bin/bash
|
||||
set -euo pipefail
|
||||
|
||||
# build fixtures for tests
|
||||
yarn --cwd test/fixtures install
|
||||
yarn --cwd test/fixtures run build
|
||||
51
packages/gatsby-plugin-now/gatsby-node.js
Normal file
51
packages/gatsby-plugin-now/gatsby-node.js
Normal file
@@ -0,0 +1,51 @@
|
||||
const path = require('path');
|
||||
const writeFile = require('util').promisify(require('fs').writeFile);
|
||||
|
||||
const REDIRECT_FILE_NAME = '__now_routes_g4t5bY.json';
|
||||
|
||||
exports.onPostBuild = async ({ store }) => {
|
||||
const { redirects, program } = store.getState();
|
||||
|
||||
const routes = [{ handle: 'filesystem' }];
|
||||
|
||||
for (const redirect of redirects) {
|
||||
const route = {
|
||||
src: redirect.fromPath,
|
||||
status: redirect.statusCode || (redirect.isPermanent ? 301 : 302),
|
||||
headers: { Location: redirect.toPath },
|
||||
};
|
||||
|
||||
if (redirect.force) {
|
||||
routes.unshift(route);
|
||||
} else {
|
||||
routes.push(route);
|
||||
}
|
||||
}
|
||||
|
||||
// we implement gatsby's recommendations
|
||||
// https://www.gatsbyjs.org/docs/caching/
|
||||
const finalRoutes = [
|
||||
{
|
||||
src: '^/static/(.*)$',
|
||||
headers: { 'cache-control': 'public,max-age=31536000,immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
src: '^/.*\\.(js|css)$',
|
||||
headers: { 'cache-control': 'public,max-age=31536000,immutable' },
|
||||
continue: true,
|
||||
},
|
||||
{
|
||||
src: '^/(sw\\.js|app-data\\.json|.*\\.html|page-data/.*)$',
|
||||
headers: { 'cache-control': 'public,max-age=0,must-revalidate' },
|
||||
continue: true,
|
||||
},
|
||||
...routes,
|
||||
{ src: '.*', status: 404, dest: '/404.html' },
|
||||
];
|
||||
|
||||
await writeFile(
|
||||
path.join(program.directory, 'public', REDIRECT_FILE_NAME),
|
||||
JSON.stringify(finalRoutes)
|
||||
);
|
||||
};
|
||||
1
packages/gatsby-plugin-now/index.js
Normal file
1
packages/gatsby-plugin-now/index.js
Normal file
@@ -0,0 +1 @@
|
||||
// noop
|
||||
36
packages/gatsby-plugin-now/package.json
Normal file
36
packages/gatsby-plugin-now/package.json
Normal file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"name": "gatsby-plugin-now",
|
||||
"version": "1.2.1-canary.4",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"homepage": "https://zeit.co/guides/deploying-gatsby-with-now",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now.git",
|
||||
"directory": "packages/gatsby-plugin-now"
|
||||
},
|
||||
"keywords": [
|
||||
"gatsby",
|
||||
"gatsby-plugin"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "./build.sh",
|
||||
"test-integration-once": "jest --verbose"
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"gatsby-node.js"
|
||||
],
|
||||
"peerDependencies": {
|
||||
"gatsby": ">=2.0.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"jest": "24.9.0"
|
||||
},
|
||||
"jest": {
|
||||
"testPathIgnorePatterns": [
|
||||
"/node_modules/",
|
||||
"<rootDir>/test/fixtures/"
|
||||
]
|
||||
}
|
||||
}
|
||||
7
packages/gatsby-plugin-now/readme.md
Normal file
7
packages/gatsby-plugin-now/readme.md
Normal file
@@ -0,0 +1,7 @@
|
||||
# gatsby-plugin-now
|
||||
|
||||
⚠️ The use of this plugin is deprecated. ZEIT Now supports Gatsby Redirects out-of-the-box and does not require the use a plugin.
|
||||
|
||||
---
|
||||
|
||||
This plugin generates [Now Routes](https://zeit.co/docs/v2/advanced/routes) for [redirects](https://www.gatsbyjs.org/docs/actions/#createRedirect) you configured for to your Gatsby project.
|
||||
112
packages/gatsby-plugin-now/test/__snapshots__/index.test.js.snap
Normal file
112
packages/gatsby-plugin-now/test/__snapshots__/index.test.js.snap
Normal file
@@ -0,0 +1,112 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`test generated now routes 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"continue": true,
|
||||
"headers": Object {
|
||||
"cache-control": "public,max-age=31536000,immutable",
|
||||
},
|
||||
"src": "^/static/(.*)$",
|
||||
},
|
||||
Object {
|
||||
"continue": true,
|
||||
"headers": Object {
|
||||
"cache-control": "public,max-age=31536000,immutable",
|
||||
},
|
||||
"src": "^/.*\\\\.(js|css)$",
|
||||
},
|
||||
Object {
|
||||
"continue": true,
|
||||
"headers": Object {
|
||||
"cache-control": "public,max-age=0,must-revalidate",
|
||||
},
|
||||
"src": "^/(sw\\\\.js|app-data\\\\.json|.*\\\\.html|page-data/.*)$",
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/",
|
||||
},
|
||||
"src": "/my-special-redirect",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"handle": "filesystem",
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/page-2",
|
||||
},
|
||||
"src": "/page2",
|
||||
"status": 301,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/page-2/",
|
||||
},
|
||||
"src": "/page2/",
|
||||
"status": 301,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/",
|
||||
},
|
||||
"src": "/orange",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/",
|
||||
},
|
||||
"src": "/grape",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/page-2/",
|
||||
},
|
||||
"src": "/blue",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/page-2/",
|
||||
},
|
||||
"src": "/randirect",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/",
|
||||
},
|
||||
"src": "/juice",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/",
|
||||
},
|
||||
"src": "/soda",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/page-2/",
|
||||
},
|
||||
"src": "/donut",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"headers": Object {
|
||||
"Location": "/page-2/",
|
||||
},
|
||||
"src": "/randorect",
|
||||
"status": 302,
|
||||
},
|
||||
Object {
|
||||
"dest": "/404.html",
|
||||
"src": ".*",
|
||||
"status": 404,
|
||||
},
|
||||
]
|
||||
`;
|
||||
4
packages/gatsby-plugin-now/test/fixtures/.gitignore
vendored
Normal file
4
packages/gatsby-plugin-now/test/fixtures/.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
public
|
||||
node_modules
|
||||
.cache
|
||||
yarn.lock
|
||||
3
packages/gatsby-plugin-now/test/fixtures/gatsby-config.js
vendored
Normal file
3
packages/gatsby-plugin-now/test/fixtures/gatsby-config.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
plugins: [{ resolve: require.resolve('../../') }]
|
||||
};
|
||||
105
packages/gatsby-plugin-now/test/fixtures/gatsby-node.js
vendored
Normal file
105
packages/gatsby-plugin-now/test/fixtures/gatsby-node.js
vendored
Normal file
@@ -0,0 +1,105 @@
|
||||
'use strict';
|
||||
|
||||
// Implement the Gatsby API “createPages”. This is called once the
|
||||
// data layer is bootstrapped to let plugins create pages from data.
|
||||
exports.createPages = ({ actions }) => {
|
||||
// need createRedirect action in actions collection
|
||||
// to make the redirection magic happen.
|
||||
// https://www.gatsbyjs.org/docs/bound-action-creators/
|
||||
const { createRedirect } = actions;
|
||||
|
||||
// Let’s set up some string consts to use thoroughout the following.
|
||||
// MDN > JavaScript reference > Statements and declarations
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/const
|
||||
// Maybe we usually redirect to page 2, with trailing slash.
|
||||
const page2Path = `/page-2/`;
|
||||
// But sometimes to homepage.
|
||||
const homePath = `/`;
|
||||
|
||||
// One-off redirect, note trailing slash missing on fromPath and
|
||||
// toPath here.
|
||||
createRedirect({
|
||||
fromPath: `/page2`,
|
||||
isPermanent: true,
|
||||
redirectInBrowser: true,
|
||||
toPath: `/page-2`
|
||||
});
|
||||
|
||||
// Another one-off redirect, note trailing slash on toPath here.
|
||||
// This time we want trailing slash on toPath so we use
|
||||
// page2Path. Need to handle trailing-slashed and non-trailing-
|
||||
// slashed fromPaths separately, Gatsby serves page components
|
||||
// at either version by default, but we need to explicitly redirect
|
||||
// both versions independently, more on page components:
|
||||
// Docs > Building with Components
|
||||
// https://www.gatsbyjs.org/docs/building-with-components/
|
||||
// and handling trailing slashes:
|
||||
// Docs > Creating and modifying pages > Removing trailing slashes
|
||||
// https://www.gatsbyjs.org/docs/creating-and-modifying-pages/#removing-trailing-slashes
|
||||
createRedirect({
|
||||
fromPath: `/page2/`,
|
||||
isPermanent: true,
|
||||
redirectInBrowser: true,
|
||||
toPath: page2Path
|
||||
});
|
||||
|
||||
// One approach to handle several redirects at once is to create an
|
||||
// array of from/to path pairs.
|
||||
let redirectBatch1 = [
|
||||
{ f: `/orange`, t: `/` },
|
||||
// We could use homePath and page2Path directly here.
|
||||
{ f: `/grape`, t: homePath },
|
||||
{ f: `/blue`, t: page2Path },
|
||||
// or leave to empty and swap for page2Path later on.
|
||||
{ f: `/randirect`, t: `` }
|
||||
];
|
||||
|
||||
// Then we can loop through the array of object literals to create
|
||||
// each redirect. A for loop would do the trick
|
||||
for (var { f: f, t: t } of redirectBatch1) {
|
||||
// Here we swap any empty toPath values for trusty page 2 via
|
||||
// page2Path.
|
||||
if (t === ``) {
|
||||
t = page2Path;
|
||||
}
|
||||
createRedirect({
|
||||
fromPath: f,
|
||||
redirectInBrowser: true,
|
||||
toPath: t
|
||||
});
|
||||
// Uncomment next line to see loop in action during build
|
||||
// console.log('\nRedirecting:\n' + f + '\nTo:\n' + t + '\n');
|
||||
// or check .cache/redirects.json post-compile.
|
||||
}
|
||||
|
||||
// A more modern approach might use forEach rather than for...of
|
||||
// Compare
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Loops_and_iteration#for...of_statement
|
||||
// and
|
||||
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Array/forEach
|
||||
let redirectBatch2 = [
|
||||
{ f: `/juice`, t: `/` },
|
||||
{ f: `/soda`, t: `/` },
|
||||
{ f: `/donut`, t: page2Path },
|
||||
{ f: `/randorect`, t: `` }
|
||||
];
|
||||
|
||||
redirectBatch2.forEach(({ f, t }) => {
|
||||
if (t === ``) {
|
||||
t = page2Path;
|
||||
}
|
||||
createRedirect({
|
||||
fromPath: f,
|
||||
redirectInBrowser: true,
|
||||
toPath: t
|
||||
});
|
||||
// Uncomment next line to see forEach in action during build
|
||||
// console.log('\nRedirecting:\n' + f + '\nTo:\n' + t + '\n');
|
||||
});
|
||||
|
||||
createRedirect({
|
||||
fromPath: '/my-special-redirect',
|
||||
toPath: homePath,
|
||||
force: true
|
||||
});
|
||||
};
|
||||
11
packages/gatsby-plugin-now/test/fixtures/package.json
vendored
Normal file
11
packages/gatsby-plugin-now/test/fixtures/package.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"name": "fixtures",
|
||||
"dependencies": {
|
||||
"gatsby": "2.14.0",
|
||||
"react": "16.9.0",
|
||||
"react-dom": "16.9.0"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "gatsby build"
|
||||
}
|
||||
}
|
||||
9
packages/gatsby-plugin-now/test/fixtures/src/pages/index.js
vendored
Normal file
9
packages/gatsby-plugin-now/test/fixtures/src/pages/index.js
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
import React from 'react';
|
||||
|
||||
const IndexPage = () => (
|
||||
<div>
|
||||
<h1>Hi people</h1>
|
||||
</div>
|
||||
);
|
||||
|
||||
export default IndexPage;
|
||||
5
packages/gatsby-plugin-now/test/index.test.js
vendored
Normal file
5
packages/gatsby-plugin-now/test/index.test.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
test('test generated now routes', async () => {
|
||||
const nowRoutes = require('./fixtures/public/__now_routes_g4t5bY.json');
|
||||
|
||||
expect(nowRoutes).toMatchSnapshot();
|
||||
});
|
||||
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.9.14-canary.2",
|
||||
"version": "1.0.0-canary.20",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
"homepage": "https://zeit.co/docs/v2/deployments/builders/developer-guide",
|
||||
"homepage": "https://github.com/zeit/now/blob/canary/DEVELOPING_A_RUNTIME.md",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now.git",
|
||||
@@ -12,7 +12,8 @@
|
||||
},
|
||||
"scripts": {
|
||||
"build": "./build.sh",
|
||||
"test-integration-once": "jest --env node --verbose --runInBand",
|
||||
"test-unit": "jest --env node --verbose --runInBand test/unit.test.js",
|
||||
"test-integration-once": "jest --env node --verbose --runInBand test/integration.test.js",
|
||||
"prepublishOnly": "./build.sh"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
export default function debug(message: string, ...additional: any[]) {
|
||||
if (process.env.NOW_BUILDER_DEBUG) {
|
||||
console.log(message, ...additional);
|
||||
} else if (process.env.NOW_BUILDER_ANNOTATE) {
|
||||
console.log(`[now-builder-debug] ${message}`, ...additional);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { PackageJson, Builder, Config } from './types';
|
||||
import minimatch from 'minimatch';
|
||||
import { valid as validSemver } from 'semver';
|
||||
import { PackageJson, Builder, Config, BuilderFunctions } from './types';
|
||||
|
||||
interface ErrorResponse {
|
||||
code: string;
|
||||
@@ -8,6 +9,7 @@ interface ErrorResponse {
|
||||
|
||||
interface Options {
|
||||
tag?: 'canary' | 'latest' | string;
|
||||
functions?: BuilderFunctions;
|
||||
}
|
||||
|
||||
const src = 'package.json';
|
||||
@@ -17,25 +19,31 @@ const MISSING_BUILD_SCRIPT_ERROR: ErrorResponse = {
|
||||
code: 'missing_build_script',
|
||||
message:
|
||||
'Your `package.json` file is missing a `build` property inside the `script` property.' +
|
||||
'\nMore details: https://zeit.co/docs/v2/advanced/platform/frequently-asked-questions#missing-build-script'
|
||||
'\nMore details: https://zeit.co/docs/v2/platform/frequently-asked-questions#missing-build-script',
|
||||
};
|
||||
|
||||
// Static builders are special cased in `@now/static-build`
|
||||
function getBuilders(): Map<string, Builder> {
|
||||
function getBuilders({ tag }: Options = {}): Map<string, Builder> {
|
||||
const withTag = tag ? `@${tag}` : '';
|
||||
const config = { zeroConfig: true };
|
||||
|
||||
return new Map<string, Builder>([
|
||||
['next', { src, use: '@now/next', config }]
|
||||
['next', { src, use: `@now/next${withTag}`, config }],
|
||||
]);
|
||||
}
|
||||
|
||||
// Must be a function to ensure that the returned
|
||||
// object won't be a reference
|
||||
function getApiBuilders(): Builder[] {
|
||||
function getApiBuilders({ tag }: Pick<Options, 'tag'> = {}): Builder[] {
|
||||
const withTag = tag ? `@${tag}` : '';
|
||||
const config = { zeroConfig: true };
|
||||
|
||||
return [
|
||||
{ src: 'api/**/*.js', use: '@now/node', config },
|
||||
{ src: 'api/**/*.ts', use: '@now/node', config },
|
||||
{ src: 'api/**/*.go', use: '@now/go', config },
|
||||
{ src: 'api/**/*.py', use: '@now/python', config },
|
||||
{ src: 'api/**/*.rb', use: '@now/ruby', config }
|
||||
{ src: 'api/**/*.js', use: `@now/node${withTag}`, config },
|
||||
{ src: 'api/**/*.ts', use: `@now/node${withTag}`, config },
|
||||
{ src: 'api/**/*.go', use: `@now/go${withTag}`, config },
|
||||
{ src: 'api/**/*.py', use: `@now/python${withTag}`, config },
|
||||
{ src: 'api/**/*.rb', use: `@now/ruby${withTag}`, config },
|
||||
];
|
||||
}
|
||||
|
||||
@@ -48,12 +56,61 @@ function hasBuildScript(pkg: PackageJson | undefined) {
|
||||
return Boolean(scripts && scripts['build']);
|
||||
}
|
||||
|
||||
async function detectBuilder(pkg: PackageJson): Promise<Builder> {
|
||||
for (const [dependency, builder] of getBuilders()) {
|
||||
function getApiFunctionBuilder(
|
||||
file: string,
|
||||
prevBuilder: Builder | undefined,
|
||||
{ functions = {} }: Pick<Options, 'functions'>
|
||||
) {
|
||||
const key = Object.keys(functions).find(
|
||||
k => file === k || minimatch(file, k)
|
||||
);
|
||||
const fn = key ? functions[key] : undefined;
|
||||
|
||||
if (!fn || (!fn.runtime && !prevBuilder)) {
|
||||
return prevBuilder;
|
||||
}
|
||||
|
||||
const src = (prevBuilder && prevBuilder.src) || file;
|
||||
const use = fn.runtime || (prevBuilder && prevBuilder.use);
|
||||
|
||||
const config: Config = { zeroConfig: true };
|
||||
|
||||
if (key) {
|
||||
Object.assign(config, {
|
||||
functions: {
|
||||
[key]: fn,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return use ? { use, src, config } : prevBuilder;
|
||||
}
|
||||
|
||||
async function detectFrontBuilder(
|
||||
pkg: PackageJson,
|
||||
builders: Builder[],
|
||||
options: Options
|
||||
): Promise<Builder> {
|
||||
for (const [dependency, builder] of getBuilders(options)) {
|
||||
const deps = Object.assign({}, pkg.dependencies, pkg.devDependencies);
|
||||
|
||||
// Return the builder when a dependency matches
|
||||
if (deps[dependency]) {
|
||||
if (options.functions) {
|
||||
Object.entries(options.functions).forEach(([key, func]) => {
|
||||
// When the builder is not used yet we'll use it for the frontend
|
||||
if (
|
||||
builders.every(
|
||||
b => !(b.config && b.config.functions && b.config.functions[key])
|
||||
)
|
||||
) {
|
||||
if (!builder.config) builder.config = {};
|
||||
if (!builder.config.functions) builder.config.functions = {};
|
||||
builder.config.functions[key] = { ...func };
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
@@ -63,26 +120,40 @@ async function detectBuilder(pkg: PackageJson): Promise<Builder> {
|
||||
}
|
||||
|
||||
// Files that match a specific pattern will get ignored
|
||||
export function ignoreApiFilter(file: string) {
|
||||
if (file.includes('/.')) {
|
||||
return false;
|
||||
export function getIgnoreApiFilter(optionsOrBuilders: Options | Builder[]) {
|
||||
const possiblePatterns: string[] = getApiBuilders().map(b => b.src);
|
||||
|
||||
if (Array.isArray(optionsOrBuilders)) {
|
||||
optionsOrBuilders.forEach(({ src }) => possiblePatterns.push(src));
|
||||
} else if (optionsOrBuilders.functions) {
|
||||
Object.keys(optionsOrBuilders.functions).forEach(p =>
|
||||
possiblePatterns.push(p)
|
||||
);
|
||||
}
|
||||
|
||||
if (file.includes('/_')) {
|
||||
return false;
|
||||
}
|
||||
return (file: string) => {
|
||||
if (!file.startsWith('api/')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (file.endsWith('.d.ts')) {
|
||||
return false;
|
||||
}
|
||||
if (file.includes('/.')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the file does not match any builder we also
|
||||
// don't want to create a route e.g. `package.json`
|
||||
if (getApiBuilders().every(({ src }) => !minimatch(file, src))) {
|
||||
return false;
|
||||
}
|
||||
if (file.includes('/_')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
if (file.endsWith('.d.ts')) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (possiblePatterns.every(p => !(file === p || minimatch(file, p)))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
};
|
||||
}
|
||||
|
||||
// We need to sort the file paths by alphabet to make
|
||||
@@ -91,20 +162,144 @@ export function sortFiles(fileA: string, fileB: string) {
|
||||
return fileA.localeCompare(fileB);
|
||||
}
|
||||
|
||||
async function detectApiBuilders(files: string[]): Promise<Builder[]> {
|
||||
async function detectApiBuilders(
|
||||
files: string[],
|
||||
options: Options
|
||||
): Promise<Builder[]> {
|
||||
const builds = files
|
||||
.sort(sortFiles)
|
||||
.filter(ignoreApiFilter)
|
||||
.filter(getIgnoreApiFilter(options))
|
||||
.map(file => {
|
||||
const result = getApiBuilders().find(({ src }): boolean =>
|
||||
minimatch(file, src)
|
||||
);
|
||||
|
||||
return result ? { ...result, src: file } : null;
|
||||
const apiBuilders = getApiBuilders(options);
|
||||
const apiBuilder = apiBuilders.find(b => minimatch(file, b.src));
|
||||
const fnBuilder = getApiFunctionBuilder(file, apiBuilder, options);
|
||||
return fnBuilder ? { ...fnBuilder, src: file } : null;
|
||||
});
|
||||
|
||||
const finishedBuilds = builds.filter(Boolean);
|
||||
return finishedBuilds as Builder[];
|
||||
return builds.filter(Boolean) as Builder[];
|
||||
}
|
||||
|
||||
// When a package has files that conflict with `/api` routes
|
||||
// e.g. Next.js pages/api we'll check it here and return an error.
|
||||
async function checkConflictingFiles(
|
||||
files: string[],
|
||||
builders: Builder[]
|
||||
): Promise<ErrorResponse | null> {
|
||||
// For Next.js
|
||||
if (builders.some(b => b.use.startsWith('@now/next'))) {
|
||||
const hasApiPages = files.some(file => file.startsWith('pages/api/'));
|
||||
const hasApiBuilders = builders.some(b => b.src.startsWith('api/'));
|
||||
|
||||
if (hasApiPages && hasApiBuilders) {
|
||||
return {
|
||||
code: 'conflicting_files',
|
||||
message:
|
||||
'It is not possible to use `api` and `pages/api` at the same time, please only use one option',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// When e.g. Next.js receives a `functions` property it has to make sure,
|
||||
// that it can handle those files, otherwise there are unused functions.
|
||||
async function checkUnusedFunctionsOnFrontendBuilder(
|
||||
files: string[],
|
||||
builder: Builder
|
||||
): Promise<ErrorResponse | null> {
|
||||
const { config: { functions = undefined } = {} } = builder;
|
||||
|
||||
if (!functions) return null;
|
||||
|
||||
if (builder.use.startsWith('@now/next')) {
|
||||
const matchingFiles = files.filter(file =>
|
||||
Object.keys(functions).some(key => file === key || minimatch(file, key))
|
||||
);
|
||||
|
||||
for (const matchedFile of matchingFiles) {
|
||||
if (
|
||||
!matchedFile.startsWith('src/pages/') &&
|
||||
!matchedFile.startsWith('pages/')
|
||||
) {
|
||||
return {
|
||||
code: 'unused_function',
|
||||
message: `The function for ${matchedFile} can't be handled by any builder`,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function validateFunctions(files: string[], { functions = {} }: Options) {
|
||||
for (const [path, func] of Object.entries(functions)) {
|
||||
if (path.length > 256) {
|
||||
return {
|
||||
code: 'invalid_function_glob',
|
||||
message: 'Function globs must be less than 256 characters long.',
|
||||
};
|
||||
}
|
||||
|
||||
if (!func || typeof func !== 'object') {
|
||||
return {
|
||||
code: 'invalid_function',
|
||||
message: 'Function must be an object.',
|
||||
};
|
||||
}
|
||||
|
||||
if (Object.keys(func).length === 0) {
|
||||
return {
|
||||
code: 'invalid_function',
|
||||
message: 'Function must contain at least one property.',
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
func.maxDuration !== undefined &&
|
||||
(func.maxDuration < 1 ||
|
||||
func.maxDuration > 900 ||
|
||||
!Number.isInteger(func.maxDuration))
|
||||
) {
|
||||
return {
|
||||
code: 'invalid_function_duration',
|
||||
message: 'Functions must have a duration between 1 and 900.',
|
||||
};
|
||||
}
|
||||
|
||||
if (
|
||||
func.memory !== undefined &&
|
||||
(func.memory < 128 || func.memory > 3008 || func.memory % 64 !== 0)
|
||||
) {
|
||||
return {
|
||||
code: 'invalid_function_memory',
|
||||
message:
|
||||
'Functions must have a memory value between 128 and 3008 in steps of 64.',
|
||||
};
|
||||
}
|
||||
|
||||
if (files.some(f => f === path || minimatch(f, path)) === false) {
|
||||
return {
|
||||
code: 'invalid_function_source',
|
||||
message: `No source file matched the function for ${path}.`,
|
||||
};
|
||||
}
|
||||
|
||||
if (func.runtime !== undefined) {
|
||||
const tag = `${func.runtime}`.split('@').pop();
|
||||
|
||||
if (!tag || !validSemver(tag)) {
|
||||
return {
|
||||
code: 'invalid_function_runtime',
|
||||
message:
|
||||
'Function runtimes must have a valid version, for example `@now/node@1.0.0`.',
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// When zero config is used we can call this function
|
||||
@@ -112,24 +307,56 @@ async function detectApiBuilders(files: string[]): Promise<Builder[]> {
|
||||
export async function detectBuilders(
|
||||
files: string[],
|
||||
pkg?: PackageJson | undefined | null,
|
||||
options?: Options
|
||||
options: Options = {}
|
||||
): Promise<{
|
||||
builders: Builder[] | null;
|
||||
errors: ErrorResponse[] | null;
|
||||
warnings: ErrorResponse[];
|
||||
}> {
|
||||
const errors: ErrorResponse[] = [];
|
||||
const warnings: ErrorResponse[] = [];
|
||||
|
||||
const functionError = validateFunctions(files, options);
|
||||
|
||||
if (functionError) {
|
||||
return {
|
||||
builders: null,
|
||||
errors: [functionError],
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
// Detect all builders for the `api` directory before anything else
|
||||
let builders = await detectApiBuilders(files);
|
||||
const builders = await detectApiBuilders(files, options);
|
||||
|
||||
if (pkg && hasBuildScript(pkg)) {
|
||||
builders.push(await detectBuilder(pkg));
|
||||
const frontendBuilder = await detectFrontBuilder(pkg, builders, options);
|
||||
builders.push(frontendBuilder);
|
||||
|
||||
const conflictError = await checkConflictingFiles(files, builders);
|
||||
|
||||
if (conflictError) {
|
||||
warnings.push(conflictError);
|
||||
}
|
||||
|
||||
const unusedFunctionError = await checkUnusedFunctionsOnFrontendBuilder(
|
||||
files,
|
||||
frontendBuilder
|
||||
);
|
||||
|
||||
if (unusedFunctionError) {
|
||||
return {
|
||||
builders: null,
|
||||
errors: [unusedFunctionError],
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
if (pkg && builders.length === 0) {
|
||||
// We only show this error when there are no api builders
|
||||
// since the dependencies of the pkg could be used for those
|
||||
errors.push(MISSING_BUILD_SCRIPT_ERROR);
|
||||
return { errors, builders: null };
|
||||
return { errors, warnings, builders: null };
|
||||
}
|
||||
|
||||
// We allow a `public` directory
|
||||
@@ -138,45 +365,25 @@ export async function detectBuilders(
|
||||
builders.push({
|
||||
use: '@now/static',
|
||||
src: 'public/**/*',
|
||||
config
|
||||
config,
|
||||
});
|
||||
} else if (builders.length > 0) {
|
||||
// We can't use pattern matching, since `!(api)` and `!(api)/**/*`
|
||||
// won't give the correct results
|
||||
builders.push(
|
||||
...files
|
||||
.filter(name => !name.startsWith('api/'))
|
||||
.filter(name => !(name === 'package.json'))
|
||||
.map(name => ({
|
||||
use: '@now/static',
|
||||
src: name,
|
||||
config
|
||||
}))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Change the tag for the builders
|
||||
if (builders && builders.length) {
|
||||
const tag = options && options.tag;
|
||||
|
||||
if (tag) {
|
||||
builders = builders.map((originBuilder: Builder) => {
|
||||
// Copy builder to make sure it is not a reference
|
||||
const builder = { ...originBuilder };
|
||||
|
||||
// @now/static has no canary builder
|
||||
if (builder.use !== '@now/static') {
|
||||
builder.use = `${builder.use}@${tag}`;
|
||||
}
|
||||
|
||||
return builder;
|
||||
} else if (
|
||||
builders.length > 0 &&
|
||||
files.some(f => !f.startsWith('api/') && f !== 'package.json')
|
||||
) {
|
||||
// Everything besides the api directory
|
||||
// and package.json can be served as static files
|
||||
builders.push({
|
||||
use: '@now/static',
|
||||
src: '!{api/**,package.json}',
|
||||
config,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
builders: builders.length ? builders : null,
|
||||
errors: errors.length ? errors : null
|
||||
errors: errors.length ? errors : null,
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Route, Builder } from './types';
|
||||
import { parse as parsePath } from 'path';
|
||||
import { ignoreApiFilter, sortFiles } from './detect-builders';
|
||||
import { Route, Builder } from './types';
|
||||
import { getIgnoreApiFilter, sortFiles } from './detect-builders';
|
||||
|
||||
function escapeName(name: string) {
|
||||
const special = '[]^$.|?*+()'.split('');
|
||||
@@ -60,9 +60,9 @@ function createRouteFromPath(filePath: string): Route {
|
||||
const prefix = isIndex ? '\\/' : '';
|
||||
|
||||
const names = [
|
||||
prefix,
|
||||
isIndex ? prefix : `${fileName}\\/`,
|
||||
prefix + escapeName(fileName),
|
||||
prefix + escapeName(fileName) + escapeName(ext)
|
||||
prefix + escapeName(fileName) + escapeName(ext),
|
||||
].filter(Boolean);
|
||||
|
||||
// Either filename with extension, filename without extension
|
||||
@@ -118,7 +118,7 @@ function partiallyMatches(pathA: string, pathB: string): boolean {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Counts how often a path occurres when all placeholders
|
||||
// Counts how often a path occurs when all placeholders
|
||||
// got resolved, so we can check if they have conflicts
|
||||
function pathOccurrences(filePath: string, files: string[]): string[] {
|
||||
const getAbsolutePath = (unresolvedPath: string): string => {
|
||||
@@ -197,7 +197,10 @@ interface RoutesResult {
|
||||
error: { [key: string]: string } | null;
|
||||
}
|
||||
|
||||
async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
|
||||
async function detectApiRoutes(
|
||||
files: string[],
|
||||
builders: Builder[]
|
||||
): Promise<RoutesResult> {
|
||||
if (!files || files.length === 0) {
|
||||
return { defaultRoutes: null, error: null };
|
||||
}
|
||||
@@ -205,7 +208,7 @@ async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
|
||||
// The deepest routes need to be
|
||||
// the first ones to get handled
|
||||
const sortedFiles = files
|
||||
.filter(ignoreApiFilter)
|
||||
.filter(getIgnoreApiFilter(builders))
|
||||
.sort(sortFiles)
|
||||
.sort(sortFilesBySegmentCount);
|
||||
|
||||
@@ -226,10 +229,10 @@ async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
|
||||
error: {
|
||||
code: 'conflicting_path_segment',
|
||||
message:
|
||||
`The segment "${conflictingSegment}" occurres more than ` +
|
||||
`The segment "${conflictingSegment}" occurs more than ` +
|
||||
`one time in your path "${file}". Please make sure that ` +
|
||||
`every segment in a path is unique`
|
||||
}
|
||||
`every segment in a path is unique`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -249,8 +252,8 @@ async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
|
||||
message:
|
||||
`Two or more files have conflicting paths or names. ` +
|
||||
`Please make sure path segments and filenames, without their extension, are unique. ` +
|
||||
`The path "${file}" has conflicts with ${messagePaths}`
|
||||
}
|
||||
`The path "${file}" has conflicts with ${messagePaths}`,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -261,7 +264,7 @@ async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
|
||||
if (defaultRoutes.length) {
|
||||
defaultRoutes.push({
|
||||
status: 404,
|
||||
src: '/api(\\/.*)?$'
|
||||
src: '/api(\\/.*)?$',
|
||||
});
|
||||
}
|
||||
|
||||
@@ -282,12 +285,12 @@ export async function detectRoutes(
|
||||
files: string[],
|
||||
builders: Builder[]
|
||||
): Promise<RoutesResult> {
|
||||
const routesResult = await detectApiRoutes(files);
|
||||
const routesResult = await detectApiRoutes(files, builders);
|
||||
|
||||
if (routesResult.defaultRoutes && hasPublicBuilder(builders)) {
|
||||
routesResult.defaultRoutes.push({
|
||||
src: '/(.*)',
|
||||
dest: '/public/$1'
|
||||
dest: '/public/$1',
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -4,11 +4,13 @@ import { File } from './types';
|
||||
|
||||
interface FileBlobOptions {
|
||||
mode?: number;
|
||||
contentType?: string;
|
||||
data: string | Buffer;
|
||||
}
|
||||
|
||||
interface FromStreamOptions {
|
||||
mode?: number;
|
||||
contentType?: string;
|
||||
stream: NodeJS.ReadableStream;
|
||||
}
|
||||
|
||||
@@ -16,16 +18,22 @@ export default class FileBlob implements File {
|
||||
public type: 'FileBlob';
|
||||
public mode: number;
|
||||
public data: string | Buffer;
|
||||
public contentType: string | undefined;
|
||||
|
||||
constructor({ mode = 0o100644, data }: FileBlobOptions) {
|
||||
constructor({ mode = 0o100644, contentType, data }: FileBlobOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
||||
this.type = 'FileBlob';
|
||||
this.mode = mode;
|
||||
this.contentType = contentType;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
static async fromStream({ mode = 0o100644, stream }: FromStreamOptions) {
|
||||
static async fromStream({
|
||||
mode = 0o100644,
|
||||
contentType,
|
||||
stream,
|
||||
}: FromStreamOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
const chunks: Buffer[] = [];
|
||||
@@ -37,7 +45,7 @@ export default class FileBlob implements File {
|
||||
});
|
||||
|
||||
const data = Buffer.concat(chunks);
|
||||
return new FileBlob({ mode, data });
|
||||
return new FileBlob({ mode, contentType, data });
|
||||
}
|
||||
|
||||
toStream(): NodeJS.ReadableStream {
|
||||
|
||||
@@ -9,11 +9,13 @@ const semaToPreventEMFILE = new Sema(20);
|
||||
|
||||
interface FileFsRefOptions {
|
||||
mode?: number;
|
||||
contentType?: string;
|
||||
fsPath: string;
|
||||
}
|
||||
|
||||
interface FromStreamOptions {
|
||||
mode: number;
|
||||
contentType?: string;
|
||||
stream: NodeJS.ReadableStream;
|
||||
fsPath: string;
|
||||
}
|
||||
@@ -22,17 +24,20 @@ class FileFsRef implements File {
|
||||
public type: 'FileFsRef';
|
||||
public mode: number;
|
||||
public fsPath: string;
|
||||
public contentType: string | undefined;
|
||||
|
||||
constructor({ mode = 0o100644, fsPath }: FileFsRefOptions) {
|
||||
constructor({ mode = 0o100644, contentType, fsPath }: FileFsRefOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof fsPath === 'string');
|
||||
this.type = 'FileFsRef';
|
||||
this.mode = mode;
|
||||
this.contentType = contentType;
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
static async fromFsPath({
|
||||
mode,
|
||||
contentType,
|
||||
fsPath,
|
||||
}: FileFsRefOptions): Promise<FileFsRef> {
|
||||
let m = mode;
|
||||
@@ -40,11 +45,12 @@ class FileFsRef implements File {
|
||||
const stat = await fs.lstat(fsPath);
|
||||
m = stat.mode;
|
||||
}
|
||||
return new FileFsRef({ mode: m, fsPath });
|
||||
return new FileFsRef({ mode: m, contentType, fsPath });
|
||||
}
|
||||
|
||||
static async fromStream({
|
||||
mode = 0o100644,
|
||||
contentType,
|
||||
stream,
|
||||
fsPath,
|
||||
}: FromStreamOptions): Promise<FileFsRef> {
|
||||
@@ -63,7 +69,7 @@ class FileFsRef implements File {
|
||||
dest.on('error', reject);
|
||||
});
|
||||
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
return new FileFsRef({ mode, contentType, fsPath });
|
||||
}
|
||||
|
||||
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||
|
||||
@@ -8,6 +8,7 @@ import { File } from './types';
|
||||
interface FileRefOptions {
|
||||
mode?: number;
|
||||
digest: string;
|
||||
contentType?: string;
|
||||
mutable?: boolean;
|
||||
}
|
||||
|
||||
@@ -26,14 +27,21 @@ export default class FileRef implements File {
|
||||
public type: 'FileRef';
|
||||
public mode: number;
|
||||
public digest: string;
|
||||
public contentType: string | undefined;
|
||||
private mutable: boolean;
|
||||
|
||||
constructor({ mode = 0o100644, digest, mutable = false }: FileRefOptions) {
|
||||
constructor({
|
||||
mode = 0o100644,
|
||||
digest,
|
||||
contentType,
|
||||
mutable = false,
|
||||
}: FileRefOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof digest === 'string');
|
||||
this.type = 'FileRef';
|
||||
this.mode = mode;
|
||||
this.digest = digest;
|
||||
this.contentType = contentType;
|
||||
this.mutable = mutable;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { intersects } from 'semver';
|
||||
import { NodeVersion } from '../types';
|
||||
import debug from '../debug';
|
||||
|
||||
const supportedOptions: NodeVersion[] = [
|
||||
{ major: 10, range: '10.x', runtime: 'nodejs10.x' },
|
||||
@@ -20,7 +21,7 @@ export async function getSupportedNodeVersion(
|
||||
|
||||
if (!engineRange) {
|
||||
if (!silent) {
|
||||
console.log(
|
||||
debug(
|
||||
'missing `engines` in `package.json`, using default range: ' +
|
||||
selection.range
|
||||
);
|
||||
@@ -34,7 +35,7 @@ export async function getSupportedNodeVersion(
|
||||
});
|
||||
if (found) {
|
||||
if (!silent) {
|
||||
console.log(
|
||||
debug(
|
||||
'Found `engines` in `package.json`, selecting range: ' +
|
||||
selection.range
|
||||
);
|
||||
|
||||
@@ -1,21 +1,22 @@
|
||||
import assert from 'assert';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import debug from '../debug';
|
||||
import spawn from 'cross-spawn';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { deprecate } from 'util';
|
||||
import { cpus } from 'os';
|
||||
import { Meta, PackageJson, NodeVersion, Config } from '../types';
|
||||
import { getSupportedNodeVersion } from './node-version';
|
||||
|
||||
function spawnAsync(
|
||||
export function spawnAsync(
|
||||
command: string,
|
||||
args: string[],
|
||||
cwd: string,
|
||||
opts: SpawnOptions = {}
|
||||
) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const stderrLogs: Buffer[] = [];
|
||||
opts = { stdio: 'inherit', cwd, ...opts };
|
||||
opts = { stdio: 'inherit', ...opts };
|
||||
const child = spawn(command, args, opts);
|
||||
|
||||
if (opts.stdio === 'pipe' && child.stderr) {
|
||||
@@ -54,7 +55,10 @@ export async function runShellScript(
|
||||
assert(path.isAbsolute(fsPath));
|
||||
const destPath = path.dirname(fsPath);
|
||||
await chmodPlusX(fsPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, args, destPath, spawnOpts);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, args, {
|
||||
cwd: destPath,
|
||||
...spawnOpts,
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -128,38 +132,100 @@ async function scanParentDirs(destPath: string, readPackageJson = false) {
|
||||
export async function runNpmInstall(
|
||||
destPath: string,
|
||||
args: string[] = [],
|
||||
spawnOpts?: SpawnOptions
|
||||
spawnOpts?: SpawnOptions,
|
||||
meta?: Meta
|
||||
) {
|
||||
if (meta && meta.isDev) {
|
||||
debug('Skipping dependency installation because dev mode is enabled');
|
||||
return;
|
||||
}
|
||||
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let commandArgs = args;
|
||||
console.log(`installing to ${destPath}`);
|
||||
debug(`Installing to ${destPath}`);
|
||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||
|
||||
const opts = spawnOpts || { env: process.env };
|
||||
const opts = { cwd: destPath, ...spawnOpts } || {
|
||||
cwd: destPath,
|
||||
env: process.env,
|
||||
};
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync(
|
||||
'npm',
|
||||
commandArgs.concat(['install', '--unsafe-perm']),
|
||||
destPath,
|
||||
opts
|
||||
);
|
||||
} else {
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
commandArgs.concat(['--ignore-engines', '--cwd', destPath]),
|
||||
destPath,
|
||||
opts
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function runBundleInstall(
|
||||
destPath: string,
|
||||
args: string[] = [],
|
||||
spawnOpts?: SpawnOptions,
|
||||
meta?: Meta
|
||||
) {
|
||||
if (meta && meta.isDev) {
|
||||
debug('Skipping dependency installation because dev mode is enabled');
|
||||
return;
|
||||
}
|
||||
|
||||
assert(path.isAbsolute(destPath));
|
||||
const opts = { cwd: destPath, ...spawnOpts } || {
|
||||
cwd: destPath,
|
||||
env: process.env,
|
||||
};
|
||||
|
||||
await spawnAsync(
|
||||
'bundle',
|
||||
args.concat([
|
||||
'install',
|
||||
'--no-prune',
|
||||
'--retry',
|
||||
'3',
|
||||
'--jobs',
|
||||
String(cpus().length || 1),
|
||||
]),
|
||||
opts
|
||||
);
|
||||
}
|
||||
|
||||
export async function runPipInstall(
|
||||
destPath: string,
|
||||
args: string[] = [],
|
||||
spawnOpts?: SpawnOptions,
|
||||
meta?: Meta
|
||||
) {
|
||||
if (meta && meta.isDev) {
|
||||
debug('Skipping dependency installation because dev mode is enabled');
|
||||
return;
|
||||
}
|
||||
|
||||
assert(path.isAbsolute(destPath));
|
||||
const opts = { cwd: destPath, ...spawnOpts } || {
|
||||
cwd: destPath,
|
||||
env: process.env,
|
||||
};
|
||||
|
||||
await spawnAsync(
|
||||
'pip3',
|
||||
['install', '--disable-pip-version-check', ...args],
|
||||
opts
|
||||
);
|
||||
}
|
||||
|
||||
export async function runPackageJsonScript(
|
||||
destPath: string,
|
||||
scriptName: string,
|
||||
opts?: SpawnOptions
|
||||
spawnOpts?: SpawnOptions
|
||||
) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
const { packageJson, hasPackageLockJson } = await scanParentDirs(
|
||||
@@ -174,17 +240,14 @@ export async function runPackageJsonScript(
|
||||
);
|
||||
if (!hasScript) return false;
|
||||
|
||||
const opts = { cwd: destPath, ...spawnOpts };
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
console.log(`running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], destPath, opts);
|
||||
console.log(`Running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], opts);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath, 'run', scriptName],
|
||||
destPath,
|
||||
opts
|
||||
);
|
||||
console.log(`Running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], opts);
|
||||
}
|
||||
|
||||
return true;
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { Lambda, createLambda } from './lambda';
|
||||
import { Lambda, createLambda, getLambdaOptionsFromFunction } from './lambda';
|
||||
import { Prerender } from './prerender';
|
||||
import download, { DownloadedFiles } from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory';
|
||||
import glob from './fs/glob';
|
||||
import rename from './fs/rename';
|
||||
import {
|
||||
spawnAsync,
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runBundleInstall,
|
||||
runPipInstall,
|
||||
runShellScript,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
@@ -26,14 +30,18 @@ export {
|
||||
FileRef,
|
||||
Lambda,
|
||||
createLambda,
|
||||
Prerender,
|
||||
download,
|
||||
DownloadedFiles,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
rename,
|
||||
spawnAsync,
|
||||
installDependencies,
|
||||
runPackageJsonScript,
|
||||
runNpmInstall,
|
||||
runBundleInstall,
|
||||
runPipInstall,
|
||||
runShellScript,
|
||||
getNodeVersion,
|
||||
getSpawnOptions,
|
||||
@@ -42,6 +50,7 @@ export {
|
||||
detectBuilders,
|
||||
detectRoutes,
|
||||
debug,
|
||||
getLambdaOptionsFromFunction,
|
||||
};
|
||||
|
||||
export * from './types';
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import assert from 'assert';
|
||||
import Sema from 'async-sema';
|
||||
import { ZipFile } from 'yazl';
|
||||
import minimatch from 'minimatch';
|
||||
import { readlink } from 'fs-extra';
|
||||
import { Files } from './types';
|
||||
import { Files, Config } from './types';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import { isSymbolicLink } from './fs/download';
|
||||
import streamToBuffer from './fs/stream-to-buffer';
|
||||
@@ -15,6 +16,8 @@ interface LambdaOptions {
|
||||
zipBuffer: Buffer;
|
||||
handler: string;
|
||||
runtime: string;
|
||||
memory?: number;
|
||||
maxDuration?: number;
|
||||
environment: Environment;
|
||||
}
|
||||
|
||||
@@ -22,21 +25,39 @@ interface CreateLambdaOptions {
|
||||
files: Files;
|
||||
handler: string;
|
||||
runtime: string;
|
||||
memory?: number;
|
||||
maxDuration?: number;
|
||||
environment?: Environment;
|
||||
}
|
||||
|
||||
interface GetLambdaOptionsFromFunctionOptions {
|
||||
sourceFile: string;
|
||||
config?: Config;
|
||||
}
|
||||
|
||||
export class Lambda {
|
||||
public type: 'Lambda';
|
||||
public zipBuffer: Buffer;
|
||||
public handler: string;
|
||||
public runtime: string;
|
||||
public memory?: number;
|
||||
public maxDuration?: number;
|
||||
public environment: Environment;
|
||||
|
||||
constructor({ zipBuffer, handler, runtime, environment }: LambdaOptions) {
|
||||
constructor({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
maxDuration,
|
||||
memory,
|
||||
environment,
|
||||
}: LambdaOptions) {
|
||||
this.type = 'Lambda';
|
||||
this.zipBuffer = zipBuffer;
|
||||
this.handler = handler;
|
||||
this.runtime = runtime;
|
||||
this.memory = memory;
|
||||
this.maxDuration = maxDuration;
|
||||
this.environment = environment;
|
||||
}
|
||||
}
|
||||
@@ -48,6 +69,8 @@ export async function createLambda({
|
||||
files,
|
||||
handler,
|
||||
runtime,
|
||||
memory,
|
||||
maxDuration,
|
||||
environment = {},
|
||||
}: CreateLambdaOptions): Promise<Lambda> {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
@@ -55,6 +78,14 @@ export async function createLambda({
|
||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||
assert(typeof environment === 'object', '"environment" is not an object');
|
||||
|
||||
if (memory !== undefined) {
|
||||
assert(typeof memory === 'number', '"memory" is not a number');
|
||||
}
|
||||
|
||||
if (maxDuration !== undefined) {
|
||||
assert(typeof maxDuration === 'number', '"maxDuration" is not a number');
|
||||
}
|
||||
|
||||
await sema.acquire();
|
||||
|
||||
try {
|
||||
@@ -63,6 +94,8 @@ export async function createLambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
memory,
|
||||
maxDuration,
|
||||
environment,
|
||||
});
|
||||
} finally {
|
||||
@@ -105,3 +138,23 @@ export async function createZip(files: Files): Promise<Buffer> {
|
||||
|
||||
return zipBuffer;
|
||||
}
|
||||
|
||||
export async function getLambdaOptionsFromFunction({
|
||||
sourceFile,
|
||||
config,
|
||||
}: GetLambdaOptionsFromFunctionOptions): Promise<
|
||||
Pick<LambdaOptions, 'memory' | 'maxDuration'>
|
||||
> {
|
||||
if (config && config.functions) {
|
||||
for (const [pattern, fn] of Object.entries(config.functions)) {
|
||||
if (sourceFile === pattern || minimatch(sourceFile, pattern)) {
|
||||
return {
|
||||
memory: fn.memory,
|
||||
maxDuration: fn.maxDuration,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
42
packages/now-build-utils/src/prerender.ts
Normal file
42
packages/now-build-utils/src/prerender.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { Lambda } from './lambda';
|
||||
|
||||
interface PrerenderOptions {
|
||||
expiration: number;
|
||||
lambda: Lambda;
|
||||
fallback: FileBlob | FileFsRef | FileRef | null;
|
||||
group?: number;
|
||||
}
|
||||
|
||||
export class Prerender {
|
||||
public type: 'Prerender';
|
||||
public expiration: number;
|
||||
public lambda: Lambda;
|
||||
public fallback: FileBlob | FileFsRef | FileRef | null;
|
||||
public group?: number;
|
||||
|
||||
constructor({ expiration, lambda, fallback, group }: PrerenderOptions) {
|
||||
this.type = 'Prerender';
|
||||
this.expiration = expiration;
|
||||
this.lambda = lambda;
|
||||
|
||||
if (
|
||||
typeof group !== 'undefined' &&
|
||||
(group <= 0 || !Number.isInteger(group))
|
||||
) {
|
||||
throw new Error(
|
||||
'The `group` argument for `Prerender` needs to be a natural number.'
|
||||
);
|
||||
}
|
||||
this.group = group;
|
||||
|
||||
if (typeof fallback === 'undefined') {
|
||||
throw new Error(
|
||||
'The `fallback` argument for `Prerender` needs to be a `FileBlob`, `FileFsRef`, `FileRef`, or null.'
|
||||
);
|
||||
}
|
||||
this.fallback = fallback;
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,14 @@
|
||||
import FileRef from './file-ref';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
|
||||
export interface Env {
|
||||
[name: string]: string | undefined;
|
||||
}
|
||||
|
||||
export interface File {
|
||||
type: string;
|
||||
mode: number;
|
||||
contentType?: string;
|
||||
toStream: () => NodeJS.ReadableStream;
|
||||
/**
|
||||
* The absolute path to the file in the filesystem
|
||||
@@ -34,6 +39,7 @@ export interface Config {
|
||||
| boolean
|
||||
| number
|
||||
| { [key: string]: string }
|
||||
| BuilderFunctions
|
||||
| undefined;
|
||||
maxLambdaSize?: string;
|
||||
includeFiles?: string | string[];
|
||||
@@ -44,6 +50,7 @@ export interface Config {
|
||||
debug?: boolean;
|
||||
zeroConfig?: boolean;
|
||||
import?: { [key: string]: string };
|
||||
functions?: BuilderFunctions;
|
||||
}
|
||||
|
||||
export interface Meta {
|
||||
@@ -52,6 +59,8 @@ export interface Meta {
|
||||
requestPath?: string;
|
||||
filesChanged?: string[];
|
||||
filesRemoved?: string[];
|
||||
env?: Env;
|
||||
buildEnv?: Env;
|
||||
}
|
||||
|
||||
export interface AnalyzeOptions {
|
||||
@@ -184,23 +193,110 @@ export interface ShouldServeOptions {
|
||||
config: Config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Credit to Iain Reid, MIT license.
|
||||
* Source: https://gist.github.com/iainreid820/5c1cc527fe6b5b7dba41fec7fe54bf6e
|
||||
*/
|
||||
// eslint-disable-next-line @typescript-eslint/no-namespace
|
||||
namespace PackageJson {
|
||||
/**
|
||||
* An author or contributor
|
||||
*/
|
||||
export interface Author {
|
||||
name: string;
|
||||
email?: string;
|
||||
homepage?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A map of exposed bin commands
|
||||
*/
|
||||
export interface BinMap {
|
||||
[commandName: string]: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A bugs link
|
||||
*/
|
||||
export interface Bugs {
|
||||
email: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface Config {
|
||||
name?: string;
|
||||
config?: unknown;
|
||||
}
|
||||
|
||||
/**
|
||||
* A map of dependencies
|
||||
*/
|
||||
export interface DependencyMap {
|
||||
[dependencyName: string]: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* CommonJS package structure
|
||||
*/
|
||||
export interface Directories {
|
||||
lib?: string;
|
||||
bin?: string;
|
||||
man?: string;
|
||||
doc?: string;
|
||||
example?: string;
|
||||
}
|
||||
|
||||
export interface Engines {
|
||||
node?: string;
|
||||
npm?: string;
|
||||
}
|
||||
|
||||
export interface PublishConfig {
|
||||
registry?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* A project repository
|
||||
*/
|
||||
export interface Repository {
|
||||
type: string;
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface ScriptsMap {
|
||||
[scriptName: string]: string;
|
||||
}
|
||||
}
|
||||
|
||||
export interface PackageJson {
|
||||
name: string;
|
||||
version: string;
|
||||
engines?: {
|
||||
[key: string]: string;
|
||||
node: string;
|
||||
npm: string;
|
||||
};
|
||||
scripts?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
dependencies?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
devDependencies?: {
|
||||
[key: string]: string;
|
||||
};
|
||||
readonly name?: string;
|
||||
readonly version?: string;
|
||||
readonly description?: string;
|
||||
readonly keywords?: string[];
|
||||
readonly homepage?: string;
|
||||
readonly bugs?: string | PackageJson.Bugs;
|
||||
readonly license?: string;
|
||||
readonly author?: string | PackageJson.Author;
|
||||
readonly contributors?: string[] | PackageJson.Author[];
|
||||
readonly files?: string[];
|
||||
readonly main?: string;
|
||||
readonly bin?: string | PackageJson.BinMap;
|
||||
readonly man?: string | string[];
|
||||
readonly directories?: PackageJson.Directories;
|
||||
readonly repository?: string | PackageJson.Repository;
|
||||
readonly scripts?: PackageJson.ScriptsMap;
|
||||
readonly config?: PackageJson.Config;
|
||||
readonly dependencies?: PackageJson.DependencyMap;
|
||||
readonly devDependencies?: PackageJson.DependencyMap;
|
||||
readonly peerDependencies?: PackageJson.DependencyMap;
|
||||
readonly optionalDependencies?: PackageJson.DependencyMap;
|
||||
readonly bundledDependencies?: string[];
|
||||
readonly engines?: PackageJson.Engines;
|
||||
readonly os?: string[];
|
||||
readonly cpu?: string[];
|
||||
readonly preferGlobal?: boolean;
|
||||
readonly private?: boolean;
|
||||
readonly publishConfig?: PackageJson.PublishConfig;
|
||||
}
|
||||
|
||||
export interface NodeVersion {
|
||||
@@ -214,3 +310,11 @@ export interface Builder {
|
||||
src: string;
|
||||
config?: Config;
|
||||
}
|
||||
|
||||
export interface BuilderFunctions {
|
||||
[key: string]: {
|
||||
memory?: number;
|
||||
maxDuration?: number;
|
||||
runtime?: string;
|
||||
};
|
||||
}
|
||||
|
||||
209
packages/now-build-utils/test/integration.test.js
vendored
Normal file
209
packages/now-build-utils/test/integration.test.js
vendored
Normal file
@@ -0,0 +1,209 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment,
|
||||
} = require('../../../test/lib/deployment/test-deployment');
|
||||
const { glob, detectBuilders, detectRoutes } = require('../');
|
||||
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
|
||||
const builderUrl = '@canary';
|
||||
let buildUtilsUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const buildUtilsPath = path.resolve(__dirname, '..');
|
||||
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
|
||||
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||
});
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
if (fixture.includes('zero-config')) {
|
||||
// Those have separate tests
|
||||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
|
||||
// few foreign tests
|
||||
|
||||
const buildersToTestWith = ['now-next', 'now-node', 'now-static-build'];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const builder of buildersToTestWith) {
|
||||
const fixturesPath2 = path.resolve(
|
||||
__dirname,
|
||||
`../../${builder}/test/fixtures`
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
||||
// don't run all foreign fixtures, just some
|
||||
if (['01-cowsay', '01-cache-headers', '03-env-vars'].includes(fixture)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${builder}/${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath2, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes`', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '01-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/my-endpoint',
|
||||
mustContain: 'my-endpoint',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/other-endpoint',
|
||||
mustContain: 'other-endpoint',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/team/zeit',
|
||||
mustContain: 'team/zeit',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/user/myself',
|
||||
mustContain: 'user/myself',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/not-okay/',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt',
|
||||
},
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
});
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes` with `index` files', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/not-okay',
|
||||
status: 404,
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/index',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/index.js',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date.js',
|
||||
mustContain: 'hello from api/date.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
// Someone might expect this to be `date.js`,
|
||||
// but I doubt that there is any case were both
|
||||
// `date/index.js` and `date.js` exists,
|
||||
// so it is not special cased
|
||||
path: '/api/date',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date/',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date/index',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/api/date/index.js',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200,
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt',
|
||||
},
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
});
|
||||
@@ -6,25 +6,8 @@ const { createZip } = require('../dist/lambda');
|
||||
const { glob, download, detectBuilders, detectRoutes } = require('../');
|
||||
const {
|
||||
getSupportedNodeVersion,
|
||||
defaultSelection
|
||||
defaultSelection,
|
||||
} = require('../dist/fs/node-version');
|
||||
const {
|
||||
packAndDeploy,
|
||||
testDeployment
|
||||
} = require('../../../test/lib/deployment/test-deployment');
|
||||
|
||||
jest.setTimeout(4 * 60 * 1000);
|
||||
|
||||
const builderUrl = '@canary';
|
||||
let buildUtilsUrl;
|
||||
|
||||
beforeAll(async () => {
|
||||
const buildUtilsPath = path.resolve(__dirname, '..');
|
||||
buildUtilsUrl = await packAndDeploy(buildUtilsPath);
|
||||
console.log('buildUtilsUrl', buildUtilsUrl);
|
||||
});
|
||||
|
||||
// unit tests
|
||||
|
||||
it('should re-create symlinks properly', async () => {
|
||||
const files = await glob('**', path.join(__dirname, 'symlinks'));
|
||||
@@ -38,7 +21,7 @@ it('should re-create symlinks properly', async () => {
|
||||
|
||||
const [linkStat, aStat] = await Promise.all([
|
||||
fs.lstat(path.join(outDir, 'link.txt')),
|
||||
fs.lstat(path.join(outDir, 'a.txt'))
|
||||
fs.lstat(path.join(outDir, 'a.txt')),
|
||||
]);
|
||||
assert(linkStat.isSymbolicLink());
|
||||
assert(aStat.isFile());
|
||||
@@ -60,7 +43,7 @@ it('should create zip files with symlinks properly', async () => {
|
||||
|
||||
const [linkStat, aStat] = await Promise.all([
|
||||
fs.lstat(path.join(outDir, 'link.txt')),
|
||||
fs.lstat(path.join(outDir, 'a.txt'))
|
||||
fs.lstat(path.join(outDir, 'a.txt')),
|
||||
]);
|
||||
assert(linkStat.isSymbolicLink());
|
||||
assert(aStat.isFile());
|
||||
@@ -120,7 +103,7 @@ it('should support require by path for legacy builders', () => {
|
||||
const glob2 = require('@now/build-utils/fs/glob.js');
|
||||
const rename2 = require('@now/build-utils/fs/rename.js');
|
||||
const {
|
||||
runNpmInstall: runNpmInstall2
|
||||
runNpmInstall: runNpmInstall2,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const streamToBuffer2 = require('@now/build-utils/fs/stream-to-buffer.js');
|
||||
|
||||
@@ -142,160 +125,98 @@ it('should support require by path for legacy builders', () => {
|
||||
expect(Lambda2).toBe(index.Lambda);
|
||||
});
|
||||
|
||||
// own fixtures
|
||||
|
||||
const fixturesPath = path.resolve(__dirname, 'fixtures');
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath)) {
|
||||
if (fixture.includes('zero-config')) {
|
||||
// Those have separate tests
|
||||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
|
||||
// few foreign tests
|
||||
|
||||
const buildersToTestWith = ['now-next', 'now-node', 'now-static-build'];
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const builder of buildersToTestWith) {
|
||||
const fixturesPath2 = path.resolve(
|
||||
__dirname,
|
||||
`../../${builder}/test/fixtures`
|
||||
);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const fixture of fs.readdirSync(fixturesPath2)) {
|
||||
// don't run all foreign fixtures, just some
|
||||
if (['01-cowsay', '01-cache-headers', '03-env-vars'].includes(fixture)) {
|
||||
// eslint-disable-next-line no-loop-func
|
||||
it(`should build ${builder}/${fixture}`, async () => {
|
||||
await expect(
|
||||
testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
path.join(fixturesPath2, fixture)
|
||||
)
|
||||
).resolves.toBeDefined();
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
it('Test `detectBuilders`', async () => {
|
||||
{
|
||||
// package.json + no build
|
||||
describe('Test `detectBuilders`', () => {
|
||||
it('package.json + no build', async () => {
|
||||
const pkg = { dependencies: { next: '9.0.0' } };
|
||||
const files = ['package.json', 'pages/index.js', 'public/index.html'];
|
||||
const { builders, errors } = await detectBuilders(files, pkg);
|
||||
expect(builders).toBe(null);
|
||||
expect(errors.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json + no build + next
|
||||
it('package.json + no build + next', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'pages/index.js'];
|
||||
const { builders, errors } = await detectBuilders(files, pkg);
|
||||
expect(builders[0].use).toBe('@now/next');
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json + no build + next
|
||||
it('package.json + no build + next', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'pages/index.js'];
|
||||
const { builders, errors } = await detectBuilders(files, pkg);
|
||||
expect(builders[0].use).toBe('@now/next');
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json + no build
|
||||
it('package.json + no build', async () => {
|
||||
const pkg = {};
|
||||
const files = ['package.json'];
|
||||
const { builders, errors } = await detectBuilders(files, pkg);
|
||||
expect(builders).toBe(null);
|
||||
expect(errors.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// static file
|
||||
it('static file', async () => {
|
||||
const files = ['index.html'];
|
||||
const { builders, errors } = await detectBuilders(files);
|
||||
expect(builders).toBe(null);
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// no package.json + public
|
||||
it('no package.json + public', async () => {
|
||||
const files = ['api/users.js', 'public/index.html'];
|
||||
const { builders, errors } = await detectBuilders(files);
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// no package.json + no build + raw static + api
|
||||
it('no package.json + no build + raw static + api', async () => {
|
||||
const files = ['api/users.js', 'index.html'];
|
||||
const { builders, errors } = await detectBuilders(files);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/users.js');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('index.html');
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json + no build + root + api
|
||||
it('package.json + no build + root + api', async () => {
|
||||
const files = ['index.html', 'api/[endpoint].js', 'static/image.png'];
|
||||
const { builders, errors } = await detectBuilders(files);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/[endpoint].js');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('index.html');
|
||||
expect(builders[2].use).toBe('@now/static');
|
||||
expect(builders[2].src).toBe('static/image.png');
|
||||
expect(builders.length).toBe(3);
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders.length).toBe(2);
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// api + ignore files
|
||||
it('api + ignore files', async () => {
|
||||
const files = [
|
||||
'api/_utils/handler.js',
|
||||
'api/[endpoint]/.helper.js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/[endpoint]/[id].js');
|
||||
expect(builders.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// api + next + public
|
||||
it('api + next + public', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'api/endpoint.js', 'public/index.html'];
|
||||
|
||||
@@ -305,13 +226,12 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[1].use).toBe('@now/next');
|
||||
expect(builders[1].src).toBe('package.json');
|
||||
expect(builders.length).toBe(2);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// api + next + raw static
|
||||
it('api + next + raw static', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'api/endpoint.js', 'index.html'];
|
||||
|
||||
@@ -321,29 +241,25 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[1].use).toBe('@now/next');
|
||||
expect(builders[1].src).toBe('package.json');
|
||||
expect(builders.length).toBe(2);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// api + raw static
|
||||
it('api + raw static', async () => {
|
||||
const files = ['api/endpoint.js', 'index.html', 'favicon.ico'];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/endpoint.js');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('favicon.ico');
|
||||
expect(builders[2].use).toBe('@now/static');
|
||||
expect(builders[2].src).toBe('index.html');
|
||||
expect(builders.length).toBe(3);
|
||||
}
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
expect(builders.length).toBe(2);
|
||||
});
|
||||
|
||||
{
|
||||
// api + public
|
||||
it('api + public', async () => {
|
||||
const files = [
|
||||
'api/endpoint.js',
|
||||
'public/index.html',
|
||||
'public/favicon.ico',
|
||||
'README.md'
|
||||
'README.md',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
@@ -352,22 +268,20 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('public/**/*');
|
||||
expect(builders.length).toBe(2);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// just public
|
||||
it('just public', async () => {
|
||||
const files = ['public/index.html', 'public/favicon.ico', 'README.md'];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
expect(builders[0].src).toBe('public/**/*');
|
||||
expect(builders.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// next + public
|
||||
it('next + public', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['package.json', 'public/index.html', 'README.md'];
|
||||
|
||||
@@ -375,13 +289,12 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[0].use).toBe('@now/next');
|
||||
expect(builders[0].src).toBe('package.json');
|
||||
expect(builders.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// nuxt
|
||||
it('nuxt', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'nuxt build' },
|
||||
dependencies: { nuxt: '2.8.1' }
|
||||
dependencies: { nuxt: '2.8.1' },
|
||||
};
|
||||
const files = ['package.json', 'pages/index.js'];
|
||||
|
||||
@@ -389,10 +302,9 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[0].use).toBe('@now/static-build');
|
||||
expect(builders[0].src).toBe('package.json');
|
||||
expect(builders.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json with no build + api
|
||||
it('package.json with no build + api', async () => {
|
||||
const pkg = { dependencies: { next: '9.0.0' } };
|
||||
const files = ['package.json', 'api/[endpoint].js'];
|
||||
|
||||
@@ -400,45 +312,41 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/[endpoint].js');
|
||||
expect(builders.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json with no build + public directory
|
||||
it('package.json with no build + public directory', async () => {
|
||||
const pkg = { dependencies: { next: '9.0.0' } };
|
||||
const files = ['package.json', 'public/index.html'];
|
||||
|
||||
const { builders, errors } = await detectBuilders(files, pkg);
|
||||
expect(builders).toBe(null);
|
||||
expect(errors.length).toBe(1);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// no package.json + api
|
||||
it('no package.json + api', async () => {
|
||||
const files = ['api/[endpoint].js', 'api/[endpoint]/[id].js'];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
expect(builders.length).toBe(2);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// no package.json + no api
|
||||
it('no package.json + no api', async () => {
|
||||
const files = ['index.html'];
|
||||
|
||||
const { builders, errors } = await detectBuilders(files);
|
||||
expect(builders).toBe(null);
|
||||
expect(errors).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json + api + canary
|
||||
it('package.json + api + canary', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = [
|
||||
'pages/index.js',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg, { tag: 'canary' });
|
||||
@@ -446,18 +354,17 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[1].use).toBe('@now/node@canary');
|
||||
expect(builders[2].use).toBe('@now/next@canary');
|
||||
expect(builders.length).toBe(3);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json + api + latest
|
||||
it('package.json + api + latest', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = [
|
||||
'pages/index.js',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg, { tag: 'latest' });
|
||||
@@ -465,18 +372,17 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[1].use).toBe('@now/node@latest');
|
||||
expect(builders[2].use).toBe('@now/next@latest');
|
||||
expect(builders.length).toBe(3);
|
||||
}
|
||||
});
|
||||
|
||||
{
|
||||
// package.json + api + random tag
|
||||
it('package.json + api + random tag', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' }
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = [
|
||||
'pages/index.js',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg, { tag: 'haha' });
|
||||
@@ -484,7 +390,238 @@ it('Test `detectBuilders`', async () => {
|
||||
expect(builders[1].use).toBe('@now/node@haha');
|
||||
expect(builders[2].use).toBe('@now/next@haha');
|
||||
expect(builders.length).toBe(3);
|
||||
}
|
||||
});
|
||||
|
||||
it('next.js pages/api + api', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['api/user.js', 'pages/api/user.js'];
|
||||
|
||||
const { warnings, errors, builders } = await detectBuilders(files, pkg);
|
||||
|
||||
expect(errors).toBe(null);
|
||||
expect(warnings[0]).toBeDefined();
|
||||
expect(warnings[0].code).toBe('conflicting_files');
|
||||
expect(builders).toBeDefined();
|
||||
expect(builders.length).toBe(2);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[1].use).toBe('@now/next');
|
||||
});
|
||||
|
||||
it('many static files + one api file', async () => {
|
||||
const files = Array.from({ length: 5000 }).map((_, i) => `file${i}.html`);
|
||||
files.push('api/index.ts');
|
||||
const { builders } = await detectBuilders(files);
|
||||
|
||||
expect(builders.length).toBe(2);
|
||||
expect(builders[0].use).toBe('@now/node');
|
||||
expect(builders[0].src).toBe('api/index.ts');
|
||||
expect(builders[1].use).toBe('@now/static');
|
||||
expect(builders[1].src).toBe('!{api/**,package.json}');
|
||||
});
|
||||
|
||||
it('functions with nextjs', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const functions = {
|
||||
'pages/api/teams/**': {
|
||||
memory: 128,
|
||||
maxDuration: 10,
|
||||
},
|
||||
};
|
||||
const files = [
|
||||
'package.json',
|
||||
'pages/index.js',
|
||||
'pages/api/teams/members.ts',
|
||||
];
|
||||
const { builders, errors } = await detectBuilders(files, pkg, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(errors).toBe(null);
|
||||
expect(builders.length).toBe(1);
|
||||
expect(builders[0]).toEqual({
|
||||
src: 'package.json',
|
||||
use: '@now/next',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
functions: {
|
||||
'pages/api/teams/**': {
|
||||
memory: 128,
|
||||
maxDuration: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('extend with functions', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const functions = {
|
||||
'api/users/*.ts': {
|
||||
runtime: 'my-custom-runtime-package@1.0.0',
|
||||
},
|
||||
'api/teams/members.ts': {
|
||||
memory: 128,
|
||||
maxDuration: 10,
|
||||
},
|
||||
};
|
||||
const files = [
|
||||
'package.json',
|
||||
'pages/index.js',
|
||||
'api/users/[id].ts',
|
||||
'api/teams/members.ts',
|
||||
];
|
||||
const { builders } = await detectBuilders(files, pkg, { functions });
|
||||
|
||||
expect(builders.length).toBe(3);
|
||||
expect(builders[0]).toEqual({
|
||||
src: 'api/teams/members.ts',
|
||||
use: '@now/node',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
functions: {
|
||||
'api/teams/members.ts': {
|
||||
memory: 128,
|
||||
maxDuration: 10,
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(builders[1]).toEqual({
|
||||
src: 'api/users/[id].ts',
|
||||
use: 'my-custom-runtime-package@1.0.0',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
functions: {
|
||||
'api/users/*.ts': {
|
||||
runtime: 'my-custom-runtime-package@1.0.0',
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
expect(builders[2]).toEqual({
|
||||
src: 'package.json',
|
||||
use: '@now/next',
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('invalid function key', async () => {
|
||||
const functions = { ['a'.repeat(1000)]: { memory: 128 } };
|
||||
const files = ['pages/index.ts'];
|
||||
const { builders, errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(builders).toBe(null);
|
||||
expect(errors.length).toBe(1);
|
||||
expect(errors[0].code).toBe('invalid_function_glob');
|
||||
});
|
||||
|
||||
it('invalid function maxDuration', async () => {
|
||||
const functions = { 'pages/index.ts': { maxDuration: -1 } };
|
||||
const files = ['pages/index.ts'];
|
||||
const { builders, errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(builders).toBe(null);
|
||||
expect(errors.length).toBe(1);
|
||||
expect(errors[0].code).toBe('invalid_function_duration');
|
||||
});
|
||||
|
||||
it('invalid function memory', async () => {
|
||||
const functions = { 'pages/index.ts': { memory: 200 } };
|
||||
const files = ['pages/index.ts'];
|
||||
const { builders, errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(builders).toBe(null);
|
||||
expect(errors.length).toBe(1);
|
||||
expect(errors[0].code).toBe('invalid_function_memory');
|
||||
});
|
||||
|
||||
it('missing runtime version', async () => {
|
||||
const functions = { 'pages/index.ts': { runtime: 'haha' } };
|
||||
const files = ['pages/index.ts'];
|
||||
const { builders, errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(builders).toBe(null);
|
||||
expect(errors.length).toBe(1);
|
||||
expect(errors[0].code).toBe('invalid_function_runtime');
|
||||
});
|
||||
|
||||
it('use a custom runtime', async () => {
|
||||
const functions = { 'api/user.php': { runtime: 'now-php@0.0.5' } };
|
||||
const files = ['api/user.php'];
|
||||
const { builders, errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(errors).toBe(null);
|
||||
expect(builders.length).toBe(1);
|
||||
expect(builders[0].use).toBe('now-php@0.0.5');
|
||||
});
|
||||
|
||||
it('use a custom runtime but without a source', async () => {
|
||||
const functions = { 'api/user.php': { runtime: 'now-php@0.0.5' } };
|
||||
const files = ['api/team.js'];
|
||||
const { errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(errors.length).toBe(1);
|
||||
expect(errors[0].code).toBe('invalid_function_source');
|
||||
});
|
||||
|
||||
it('do not allow empty functions', async () => {
|
||||
const functions = { 'api/user.php': {} };
|
||||
const files = ['api/user.php'];
|
||||
const { errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(errors.length).toBe(1);
|
||||
expect(errors[0].code).toBe('invalid_function');
|
||||
});
|
||||
|
||||
it('do not allow null functions', async () => {
|
||||
const functions = { 'api/user.php': null };
|
||||
const files = ['api/user.php'];
|
||||
const { errors } = await detectBuilders(files, null, {
|
||||
functions,
|
||||
});
|
||||
|
||||
expect(errors.length).toBe(1);
|
||||
expect(errors[0].code).toBe('invalid_function');
|
||||
});
|
||||
|
||||
it('Do not allow functions that are not used by @now/next', async () => {
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
dependencies: { next: '9.0.0' },
|
||||
};
|
||||
const functions = { 'test.js': { memory: 1024 } };
|
||||
const files = ['pages/index.js', 'test.js'];
|
||||
|
||||
const { errors } = await detectBuilders(files, pkg, { functions });
|
||||
|
||||
expect(errors).toBeDefined();
|
||||
expect(errors[0].code).toBe('unused_function');
|
||||
});
|
||||
});
|
||||
|
||||
it('Test `detectRoutes`', async () => {
|
||||
@@ -545,7 +682,7 @@ it('Test `detectRoutes`', async () => {
|
||||
const files = [
|
||||
'public/index.html',
|
||||
'api/[endpoint].js',
|
||||
'api/[endpoint]/[id].js'
|
||||
'api/[endpoint]/[id].js',
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files);
|
||||
@@ -560,7 +697,7 @@ it('Test `detectRoutes`', async () => {
|
||||
{
|
||||
const pkg = {
|
||||
scripts: { build: 'next build' },
|
||||
devDependencies: { next: '9.0.0' }
|
||||
devDependencies: { next: '9.0.0' },
|
||||
};
|
||||
const files = ['public/index.html', 'api/[endpoint].js'];
|
||||
|
||||
@@ -591,7 +728,7 @@ it('Test `detectRoutes`', async () => {
|
||||
'^/api/date(\\/|\\/index|\\/index\\.js)?$'
|
||||
);
|
||||
expect(defaultRoutes[0].dest).toBe('/api/date/index.js');
|
||||
expect(defaultRoutes[1].src).toBe('^/api/(date|date\\.js)$');
|
||||
expect(defaultRoutes[1].src).toBe('^/api/(date\\/|date|date\\.js)$');
|
||||
expect(defaultRoutes[1].dest).toBe('/api/date.js');
|
||||
}
|
||||
|
||||
@@ -606,7 +743,7 @@ it('Test `detectRoutes`', async () => {
|
||||
'^/api/([^\\/]+)(\\/|\\/index|\\/index\\.js)?$'
|
||||
);
|
||||
expect(defaultRoutes[0].dest).toBe('/api/[date]/index.js?date=$1');
|
||||
expect(defaultRoutes[1].src).toBe('^/api/(date|date\\.js)$');
|
||||
expect(defaultRoutes[1].src).toBe('^/api/(date\\/|date|date\\.js)$');
|
||||
expect(defaultRoutes[1].dest).toBe('/api/date.js');
|
||||
}
|
||||
|
||||
@@ -617,7 +754,7 @@ it('Test `detectRoutes`', async () => {
|
||||
'api/users/index.ts',
|
||||
'api/users/index.d.ts',
|
||||
'api/food.ts',
|
||||
'api/ts/gold.ts'
|
||||
'api/ts/gold.ts',
|
||||
];
|
||||
const { builders } = await detectBuilders(files);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
@@ -629,147 +766,16 @@ it('Test `detectRoutes`', async () => {
|
||||
expect(builders[3].use).toBe('@now/node');
|
||||
expect(defaultRoutes.length).toBe(5);
|
||||
}
|
||||
});
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes`', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '01-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/my-endpoint',
|
||||
mustContain: 'my-endpoint',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/other-endpoint',
|
||||
mustContain: 'other-endpoint',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/team/zeit',
|
||||
mustContain: 'team/zeit',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/user/myself',
|
||||
mustContain: 'user/myself',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/not-okay/',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt'
|
||||
}
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
});
|
||||
|
||||
it('Test `detectBuilders` and `detectRoutes` with `index` files', async () => {
|
||||
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
|
||||
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
|
||||
const fileList = await glob('**', fixture);
|
||||
const files = Object.keys(fileList);
|
||||
|
||||
const probes = [
|
||||
{
|
||||
path: '/api/not-okay',
|
||||
status: 404
|
||||
},
|
||||
{
|
||||
path: '/api',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/index',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/index.js',
|
||||
mustContain: 'hello from api/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date.js',
|
||||
mustContain: 'hello from api/date.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
// Someone might expect this to be `date.js`,
|
||||
// but I doubt that there is any case were both
|
||||
// `date/index.js` and `date.js` exists,
|
||||
// so it is not special cased
|
||||
path: '/api/date',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date/',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date/index',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/api/date/index.js',
|
||||
mustContain: 'hello from api/date/index.js',
|
||||
status: 200
|
||||
},
|
||||
{
|
||||
path: '/',
|
||||
mustContain: 'hello from index.txt'
|
||||
}
|
||||
];
|
||||
|
||||
const { builders } = await detectBuilders(files, pkg);
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
|
||||
await fs.writeFile(
|
||||
path.join(fixture, 'now.json'),
|
||||
JSON.stringify(nowConfig, null, 2)
|
||||
);
|
||||
|
||||
const deployment = await testDeployment(
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixture
|
||||
);
|
||||
expect(deployment).toBeDefined();
|
||||
|
||||
{
|
||||
// use a custom runtime
|
||||
const functions = { 'api/user.php': { runtime: 'now-php@0.0.5' } };
|
||||
const files = ['api/user.php'];
|
||||
|
||||
const { builders } = await detectBuilders(files, null, { functions });
|
||||
const { defaultRoutes } = await detectRoutes(files, builders);
|
||||
|
||||
expect(defaultRoutes.length).toBe(2);
|
||||
expect(defaultRoutes[0].dest).toBe('/api/user.php');
|
||||
}
|
||||
});
|
||||
@@ -1,15 +1,18 @@
|
||||
const path = require('path');
|
||||
const { mkdirp, copyFile } = require('fs-extra');
|
||||
|
||||
const glob = require('@now/build-utils/fs/glob');
|
||||
const download = require('@now/build-utils/fs/download');
|
||||
const { createLambda } = require('@now/build-utils/lambda');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory');
|
||||
const { shouldServe } = require('@now/build-utils');
|
||||
const {
|
||||
glob,
|
||||
download,
|
||||
shouldServe,
|
||||
createLambda,
|
||||
getWritableDirectory,
|
||||
getLambdaOptionsFromFunction,
|
||||
} = require('@now/build-utils');
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ workPath, files, entrypoint, meta }) => {
|
||||
exports.build = async ({ workPath, files, entrypoint, meta, config }) => {
|
||||
console.log('downloading files...');
|
||||
const outDir = await getWritableDirectory();
|
||||
|
||||
@@ -27,17 +30,23 @@ exports.build = async ({ workPath, files, entrypoint, meta }) => {
|
||||
path.join(outDir, entrypoint)
|
||||
);
|
||||
|
||||
const lambdaOptions = await getLambdaOptionsFromFunction({
|
||||
sourceFile: entrypoint,
|
||||
config,
|
||||
});
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', outDir),
|
||||
handler: 'handler',
|
||||
runtime: 'go1.x',
|
||||
environment: {
|
||||
SCRIPT_FILENAME: entrypoint
|
||||
}
|
||||
SCRIPT_FILENAME: entrypoint,
|
||||
},
|
||||
...lambdaOptions,
|
||||
});
|
||||
|
||||
return {
|
||||
[entrypoint]: lambda
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/cgi",
|
||||
"version": "0.1.5-canary.1",
|
||||
"version": "0.1.5-canary.2",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
declare module 'cache-or-tmp-directory' {
|
||||
export default function (appName: string) : string | null
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
declare module 'pcre-to-regexp' {
|
||||
export default function (pattern: string, keys?: string[]): RegExp
|
||||
}
|
||||
@@ -2,11 +2,11 @@
|
||||
|
||||
[](https://spectrum.chat/zeit)
|
||||
|
||||
## Usage
|
||||
## Usages
|
||||
|
||||
To install the latest version of Now CLI, visit [zeit.co/download](https://zeit.co/download) or run this command:
|
||||
|
||||
```
|
||||
```bash
|
||||
npm i -g now
|
||||
```
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "now",
|
||||
"version": "16.1.3-canary.3",
|
||||
"version": "16.1.4-canary.35",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Now",
|
||||
@@ -14,6 +14,7 @@
|
||||
"preinstall": "node ./scripts/preinstall.js",
|
||||
"test-unit": "nyc ava test/*unit.js --serial --fail-fast --verbose",
|
||||
"test-integration": "ava test/integration.js --serial --fail-fast",
|
||||
"test-integration-v1": "ava test/integration-v1.js --serial --fail-fast",
|
||||
"test-integration-now-dev": "ava test/dev/integration.js --serial --fail-fast --verbose",
|
||||
"prepublishOnly": "yarn build",
|
||||
"coverage": "nyc report --reporter=text-lcov > coverage.lcov && codecov",
|
||||
@@ -58,15 +59,9 @@
|
||||
]
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 8.11"
|
||||
"node": ">= 8"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@now/build-utils": "0.9.14-canary.2",
|
||||
"@now/go": "latest",
|
||||
"@now/next": "latest",
|
||||
"@now/node": "latest",
|
||||
"@now/routing-utils": "1.2.3-canary.1",
|
||||
"@now/static-build": "latest",
|
||||
"@sentry/node": "5.5.0",
|
||||
"@types/ansi-escapes": "3.0.0",
|
||||
"@types/ansi-regex": "4.0.0",
|
||||
@@ -98,7 +93,7 @@
|
||||
"@types/which": "1.3.1",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@zeit/dockerignore": "0.0.5",
|
||||
"@zeit/fun": "0.9.1",
|
||||
"@zeit/fun": "0.10.2",
|
||||
"@zeit/ncc": "0.18.5",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
"ajv": "6.10.2",
|
||||
@@ -111,7 +106,6 @@
|
||||
"async-sema": "2.1.4",
|
||||
"ava": "2.2.0",
|
||||
"bytes": "3.0.0",
|
||||
"cache-or-tmp-directory": "1.0.0",
|
||||
"chalk": "2.4.2",
|
||||
"chokidar": "2.1.6",
|
||||
"clipboardy": "2.1.0",
|
||||
@@ -132,10 +126,10 @@
|
||||
"escape-html": "1.0.3",
|
||||
"esm": "3.1.4",
|
||||
"execa": "1.0.0",
|
||||
"fetch-h2": "2.0.3",
|
||||
"fs-extra": "7.0.1",
|
||||
"glob": "7.1.2",
|
||||
"http-proxy": "1.17.0",
|
||||
"ignore": "4.0.6",
|
||||
"ini": "1.3.4",
|
||||
"inquirer": "3.3.0",
|
||||
"is-url": "1.2.2",
|
||||
@@ -146,12 +140,13 @@
|
||||
"mime-types": "2.1.24",
|
||||
"minimatch": "3.0.4",
|
||||
"mri": "1.1.0",
|
||||
"ms": "2.1.1",
|
||||
"ms": "2.1.2",
|
||||
"node-fetch": "1.7.3",
|
||||
"now-client": "./packages/now-client",
|
||||
"npm-package-arg": "6.1.0",
|
||||
"nyc": "13.2.0",
|
||||
"ora": "3.4.0",
|
||||
"pcre-to-regexp": "0.0.5",
|
||||
"pcre-to-regexp": "1.0.0",
|
||||
"pluralize": "7.0.0",
|
||||
"pre-commit": "1.2.2",
|
||||
"printf": "0.2.5",
|
||||
@@ -174,6 +169,7 @@
|
||||
"through2": "2.0.3",
|
||||
"title": "3.4.1",
|
||||
"tmp-promise": "1.0.3",
|
||||
"tree-kill": "1.2.1",
|
||||
"ts-node": "8.3.0",
|
||||
"typescript": "3.2.4",
|
||||
"universal-analytics": "0.4.20",
|
||||
|
||||
@@ -8,16 +8,13 @@ import { createWriteStream, mkdirp, remove, writeJSON } from 'fs-extra';
|
||||
|
||||
import { getDistTag } from '../src/util/get-dist-tag';
|
||||
import pkg from '../package.json';
|
||||
import { getBundledBuilders } from '../src/util/dev/get-bundled-builders';
|
||||
|
||||
const dirRoot = join(__dirname, '..');
|
||||
|
||||
const bundledBuilders = Object.keys(pkg.devDependencies).filter(d =>
|
||||
d.startsWith('@now/')
|
||||
);
|
||||
|
||||
async function createBuildersTarball() {
|
||||
const distTag = getDistTag(pkg.version);
|
||||
const builders = Array.from(bundledBuilders).map(b => `${b}@${distTag}`);
|
||||
const builders = Array.from(getBundledBuilders()).map(b => `${b}@${distTag}`);
|
||||
console.log(`Creating builders tarball with: ${builders.join(', ')}`);
|
||||
|
||||
const buildersDir = join(dirRoot, '.builders');
|
||||
@@ -39,7 +36,7 @@ async function createBuildersTarball() {
|
||||
const yarn = join(dirRoot, '../../node_modules/yarn/bin/yarn.js');
|
||||
await execa(process.execPath, [yarn, 'add', '--no-lockfile', ...builders], {
|
||||
cwd: buildersDir,
|
||||
stdio: 'inherit'
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
const packer = tar.pack(buildersDir);
|
||||
@@ -66,7 +63,7 @@ async function main() {
|
||||
// Compile the `doT.js` template files for `now dev`
|
||||
console.log();
|
||||
await execa(process.execPath, [join(__dirname, 'compile-templates.js')], {
|
||||
stdio: 'inherit'
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
// Do the initial `ncc` build
|
||||
@@ -92,20 +89,22 @@ async function main() {
|
||||
// get compiled into the final ncc bundle file, however, we want them to be
|
||||
// present in the npm package because the contents of those files are involved
|
||||
// with `fun`'s cache invalidation mechanism and they need to be shasum'd.
|
||||
const runtimes = join(dirRoot, '../../node_modules/@zeit/fun/dist/src/runtimes');
|
||||
const runtimes = join(
|
||||
dirRoot,
|
||||
'../../node_modules/@zeit/fun/dist/src/runtimes'
|
||||
);
|
||||
const dest = join(dirRoot, 'dist/runtimes');
|
||||
await cpy('**/*', dest, { parents: true, cwd: runtimes });
|
||||
|
||||
console.log('Finished building `now-cli`');
|
||||
}
|
||||
|
||||
process.on('unhandledRejection', (err: any) => {
|
||||
console.error('Unhandled Rejection:');
|
||||
console.error(err);
|
||||
process.on('unhandledRejection', (reason: any, promise: Promise<any>) => {
|
||||
console.error('Unhandled Rejection at:', promise, 'reason:', reason);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
process.on('uncaughtException', (err: any) => {
|
||||
process.on('uncaughtException', err => {
|
||||
console.error('Uncaught Exception:');
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
|
||||
@@ -11,7 +11,10 @@ import strlen from '../../util/strlen.ts';
|
||||
import wait from '../../util/output/wait';
|
||||
|
||||
export default async function ls(ctx, opts, args, output) {
|
||||
const { authConfig: { token }, config } = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config
|
||||
} = ctx;
|
||||
const { currentTeam } = config;
|
||||
const { apiUrl } = ctx;
|
||||
const { '--debug': debugEnabled } = opts;
|
||||
@@ -48,15 +51,13 @@ export default async function ls(ctx, opts, args, output) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!opts['--json']) {
|
||||
cancelWait = wait(
|
||||
args[0]
|
||||
? `Fetching alias details for "${args[0]}" under ${chalk.bold(
|
||||
contextName
|
||||
)}`
|
||||
: `Fetching aliases under ${chalk.bold(contextName)}`
|
||||
);
|
||||
}
|
||||
cancelWait = wait(
|
||||
args[0]
|
||||
? `Fetching alias details for "${args[0]}" under ${chalk.bold(
|
||||
contextName
|
||||
)}`
|
||||
: `Fetching aliases under ${chalk.bold(contextName)}`
|
||||
);
|
||||
|
||||
const aliases = await getAliases(now);
|
||||
if (cancelWait) cancelWait();
|
||||
@@ -72,7 +73,7 @@ export default async function ls(ctx, opts, args, output) {
|
||||
}
|
||||
|
||||
if (opts['--json']) {
|
||||
output.print(JSON.stringify({ rules: alias.rules }, null, 2));
|
||||
console.log(JSON.stringify({ rules: alias.rules }, null, 2));
|
||||
} else {
|
||||
const rules = alias.rules || [];
|
||||
output.log(
|
||||
@@ -105,11 +106,11 @@ function printAliasTable(aliases) {
|
||||
a.rules && a.rules.length
|
||||
? chalk.cyan(`[${plural('rule', a.rules.length, true)}]`)
|
||||
: // for legacy reasons, we might have situations
|
||||
// where the deployment was deleted and the alias
|
||||
// not collected appropriately, and we need to handle it
|
||||
a.deployment && a.deployment.url
|
||||
? a.deployment.url
|
||||
: chalk.gray('–'),
|
||||
// where the deployment was deleted and the alias
|
||||
// not collected appropriately, and we need to handle it
|
||||
a.deployment && a.deployment.url
|
||||
? a.deployment.url
|
||||
: chalk.gray('–'),
|
||||
a.alias,
|
||||
ms(Date.now() - new Date(a.created))
|
||||
])
|
||||
|
||||
@@ -38,7 +38,7 @@ export default async function set(
|
||||
const {
|
||||
authConfig: { token },
|
||||
config,
|
||||
localConfig
|
||||
localConfig,
|
||||
} = ctx;
|
||||
|
||||
const { currentTeam } = config;
|
||||
@@ -48,14 +48,14 @@ export default async function set(
|
||||
const {
|
||||
'--debug': debugEnabled,
|
||||
'--no-verify': noVerify,
|
||||
'--rules': rulesPath
|
||||
'--rules': rulesPath,
|
||||
} = opts;
|
||||
|
||||
const client = new Client({
|
||||
apiUrl,
|
||||
token,
|
||||
currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
let contextName = null;
|
||||
let user = null;
|
||||
@@ -79,12 +79,14 @@ export default async function set(
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!isValidName(args[0])) {
|
||||
output.error(`The provided argument "${args[0]}" is not a valid deployment`);
|
||||
if (args.length >= 1 && !isValidName(args[0])) {
|
||||
output.error(
|
||||
`The provided argument "${args[0]}" is not a valid deployment`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (!isValidName(args[1])) {
|
||||
if (args.length >= 2 && !isValidName(args[1])) {
|
||||
output.error(`The provided argument "${args[1]}" is not a valid domain`);
|
||||
return 1;
|
||||
}
|
||||
@@ -212,6 +214,7 @@ export default async function set(
|
||||
for (const target of targets) {
|
||||
output.log(`Assigning alias ${target} to deployment ${deployment.url}`);
|
||||
|
||||
const isWildcard = isWildcardAlias(target);
|
||||
const record = await assignAlias(
|
||||
output,
|
||||
client,
|
||||
@@ -222,13 +225,14 @@ export default async function set(
|
||||
);
|
||||
const handleResult = handleSetupDomainError(
|
||||
output,
|
||||
handleCreateAliasError(output, record)
|
||||
handleCreateAliasError(output, record),
|
||||
isWildcard
|
||||
);
|
||||
if (handleResult === 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
const prefix = isWildcardAlias(handleResult.alias) ? '' : 'https://';
|
||||
const prefix = isWildcard ? '' : 'https://';
|
||||
|
||||
console.log(
|
||||
`${chalk.cyan('> Success!')} ${chalk.bold(
|
||||
@@ -246,10 +250,15 @@ type SetupDomainError = Exclude<SetupDomainResolve, Domain>;
|
||||
|
||||
function handleSetupDomainError<T>(
|
||||
output: Output,
|
||||
error: SetupDomainError | T
|
||||
error: SetupDomainError | T,
|
||||
isWildcard: boolean = false
|
||||
): T | 1 {
|
||||
if (error instanceof ERRORS.DomainVerificationFailed) {
|
||||
const { nsVerification, txtVerification, domain } = error.meta;
|
||||
if (
|
||||
error instanceof ERRORS.DomainVerificationFailed ||
|
||||
error instanceof ERRORS.DomainNsNotVerifiedForWildcard
|
||||
) {
|
||||
const { nsVerification, domain } = error.meta;
|
||||
|
||||
output.error(
|
||||
`We could not alias since the domain ${domain} could not be verified due to the following reasons:\n`
|
||||
);
|
||||
@@ -265,25 +274,34 @@ function handleSetupDomainError<T>(
|
||||
{ extraSpace: ' ' }
|
||||
)}\n\n`
|
||||
);
|
||||
output.print(
|
||||
` ${chalk.gray(
|
||||
'b)'
|
||||
)} DNS TXT verification failed since found no matching records.`
|
||||
);
|
||||
output.print(
|
||||
`\n${formatDnsTable(
|
||||
[['_now', 'TXT', txtVerification.verificationRecord]],
|
||||
{ extraSpace: ' ' }
|
||||
)}\n\n`
|
||||
);
|
||||
output.print(
|
||||
` Once your domain uses either the nameservers or the TXT DNS record from above, run again ${cmd(
|
||||
'now domains verify <domain>'
|
||||
)}.\n`
|
||||
);
|
||||
output.print(
|
||||
` We will also periodically run a verification check for you and you will receive an email once your domain is verified.\n`
|
||||
);
|
||||
if (error instanceof ERRORS.DomainVerificationFailed && !isWildcard) {
|
||||
const { txtVerification } = error.meta;
|
||||
output.print(
|
||||
` ${chalk.gray(
|
||||
'b)'
|
||||
)} DNS TXT verification failed since found no matching records.`
|
||||
);
|
||||
output.print(
|
||||
`\n${formatDnsTable(
|
||||
[['_now', 'TXT', txtVerification.verificationRecord]],
|
||||
{ extraSpace: ' ' }
|
||||
)}\n\n`
|
||||
);
|
||||
output.print(
|
||||
` Once your domain uses either the nameservers or the TXT DNS record from above, run again ${cmd(
|
||||
'now domains verify <domain>'
|
||||
)}.\n`
|
||||
);
|
||||
output.print(
|
||||
` We will also periodically run a verification check for you and you will receive an email once your domain is verified.\n`
|
||||
);
|
||||
} else {
|
||||
output.print(
|
||||
` Once your domain uses the nameservers from above, run again ${cmd(
|
||||
'now domains verify <domain>'
|
||||
)}.\n`
|
||||
);
|
||||
}
|
||||
output.print(' Read more: https://err.sh/now/domain-verification\n');
|
||||
return 1;
|
||||
}
|
||||
@@ -349,9 +367,7 @@ function handleSetupDomainError<T>(
|
||||
|
||||
if (error instanceof ERRORS.DomainPurchasePending) {
|
||||
output.error(
|
||||
`The domain ${
|
||||
error.meta.domain
|
||||
} is processing and will be available once the order is completed.`
|
||||
`The domain ${error.meta.domain} is processing and will be available once the order is completed.`
|
||||
);
|
||||
output.print(
|
||||
` An email will be sent upon completion so you can alias to your new domain.\n`
|
||||
@@ -467,9 +483,7 @@ function handleCreateAliasError<T>(
|
||||
}
|
||||
if (error instanceof ERRORS.ForbiddenScaleMinInstances) {
|
||||
output.error(
|
||||
`You can't scale to more than ${
|
||||
error.meta.max
|
||||
} min instances with your current plan.`
|
||||
`You can't scale to more than ${error.meta.max} min instances with your current plan.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
@@ -490,9 +504,7 @@ function handleCreateAliasError<T>(
|
||||
|
||||
if (error instanceof ERRORS.CertMissing) {
|
||||
output.error(
|
||||
`There is no certificate for the domain ${
|
||||
error.meta.domain
|
||||
} and it could not be created.`
|
||||
`There is no certificate for the domain ${error.meta.domain} and it could not be created.`
|
||||
);
|
||||
output.log(
|
||||
`Please generate a new certificate manually with ${cmd(
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
import chalk from 'chalk';
|
||||
|
||||
// @ts-ignore
|
||||
import Now from '../../util';
|
||||
import Client from '../../util/client.ts';
|
||||
import getScope from '../../util/get-scope.ts';
|
||||
import stamp from '../../util/output/stamp.ts';
|
||||
import Client from '../../util/client';
|
||||
import getScope from '../../util/get-scope';
|
||||
import stamp from '../../util/output/stamp';
|
||||
import wait from '../../util/output/wait';
|
||||
import createCertFromFile from '../../util/certs/create-cert-from-file';
|
||||
import createCertForCns from '../../util/certs/create-cert-for-cns';
|
||||
import { NowContext } from '../../types';
|
||||
import { Output } from '../../util/output';
|
||||
|
||||
import {
|
||||
DomainPermissionDenied,
|
||||
@@ -14,7 +17,20 @@ import {
|
||||
} from '../../util/errors-ts';
|
||||
import handleCertError from '../../util/certs/handle-cert-error';
|
||||
|
||||
async function add(ctx, opts, args, output) {
|
||||
interface Options {
|
||||
'--overwrite'?: boolean;
|
||||
'--debug'?: boolean;
|
||||
'--crt'?: string;
|
||||
'--key'?: string;
|
||||
'--ca'?: string;
|
||||
}
|
||||
|
||||
async function add(
|
||||
ctx: NowContext,
|
||||
opts: Options,
|
||||
args: string[],
|
||||
output: Output
|
||||
): Promise<number> {
|
||||
const {
|
||||
authConfig: { token },
|
||||
config
|
||||
@@ -77,10 +93,12 @@ async function add(ctx, opts, args, output) {
|
||||
|
||||
// Create a custom certificate from the given file paths
|
||||
cert = await createCertFromFile(now, keyPath, crtPath, caPath, contextName);
|
||||
|
||||
if (cert instanceof InvalidCert) {
|
||||
output.error(`The provided certificate is not valid and can't be added.`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (cert instanceof DomainPermissionDenied) {
|
||||
output.error(
|
||||
`You don't have permissions over domain ${chalk.underline(
|
||||
@@ -97,6 +115,7 @@ async function add(ctx, opts, args, output) {
|
||||
'now certs issue <cn> <cns>'
|
||||
)} instead`
|
||||
);
|
||||
|
||||
if (args.length < 1) {
|
||||
output.error(
|
||||
`Invalid number of arguments to create a custom certificate entry. Usage:`
|
||||
@@ -107,34 +126,43 @@ async function add(ctx, opts, args, output) {
|
||||
}
|
||||
|
||||
// Create the certificate from the given array of CNs
|
||||
const cns = args.reduce((res, item) => [...res, ...item.split(',')], []);
|
||||
const cns = args.reduce<string[]>((res, item) => res.concat(item.split(',')), []);
|
||||
const cancelWait = wait(
|
||||
`Generating a certificate for ${chalk.bold(cns.join(', '))}`
|
||||
);
|
||||
|
||||
cert = await createCertForCns(now, cns, contextName);
|
||||
cancelWait();
|
||||
|
||||
const result = handleCertError(output, cert);
|
||||
if (result === 1) {
|
||||
return result
|
||||
}
|
||||
|
||||
if (cert instanceof DomainPermissionDenied) {
|
||||
output.error(
|
||||
`You don't have permissions over domain ${chalk.underline(
|
||||
cert.meta.domain
|
||||
)} under ${chalk.bold(cert.meta.context)}.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Print success message
|
||||
output.success(
|
||||
`Certificate entry for ${chalk.bold(
|
||||
cert.cns.join(', ')
|
||||
)} created ${addStamp()}`
|
||||
);
|
||||
const result = handleCertError(output, cert);
|
||||
|
||||
if (result === 1) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (cert instanceof DomainPermissionDenied) {
|
||||
output.error(
|
||||
`You don't have permissions over domain ${chalk.underline(
|
||||
cert.meta.domain
|
||||
)} under ${chalk.bold(cert.meta.context)}.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (cert instanceof Error) {
|
||||
// All cert errors are handled above,
|
||||
// so this is only for typescript
|
||||
throw cert;
|
||||
} else {
|
||||
// Print success message
|
||||
output.success(
|
||||
`Certificate entry for ${chalk.bold(
|
||||
cert.cns.join(', ')
|
||||
)} created ${addStamp()}`
|
||||
);
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
//
|
||||
import chalk from 'chalk';
|
||||
|
||||
// @ts-ignore
|
||||
import { handleError } from '../../util/error';
|
||||
|
||||
import createOutput from '../../util/output';
|
||||
@@ -12,6 +12,7 @@ import add from './add';
|
||||
import issue from './issue';
|
||||
import ls from './ls';
|
||||
import rm from './rm';
|
||||
import { NowContext } from '../../types';
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
@@ -71,7 +72,7 @@ const COMMAND_CONFIG = {
|
||||
rm: ['rm', 'remove']
|
||||
};
|
||||
|
||||
export default async function main(ctx) {
|
||||
export default async function main(ctx: NowContext) {
|
||||
let argv;
|
||||
|
||||
try {
|
||||
@@ -3,16 +3,29 @@ import ms from 'ms';
|
||||
import plural from 'pluralize';
|
||||
import psl from 'psl';
|
||||
import table from 'text-table';
|
||||
// @ts-ignore
|
||||
import Now from '../../util';
|
||||
import cmd from '../../util/output/cmd';
|
||||
import Client from '../../util/client.ts';
|
||||
import getScope from '../../util/get-scope.ts';
|
||||
import stamp from '../../util/output/stamp.ts';
|
||||
import Client from '../../util/client';
|
||||
import getScope from '../../util/get-scope';
|
||||
import stamp from '../../util/output/stamp';
|
||||
import getCerts from '../../util/certs/get-certs';
|
||||
import { CertNotFound } from '../../util/errors-ts';
|
||||
import strlen from '../../util/strlen.ts';
|
||||
import strlen from '../../util/strlen';
|
||||
import { Output } from '../../util/output';
|
||||
import { NowContext, Cert } from '../../types';
|
||||
|
||||
async function ls(ctx, opts, args, output) {
|
||||
interface Options {
|
||||
'--debug'?: boolean;
|
||||
'--after'?: string;
|
||||
}
|
||||
|
||||
async function ls(
|
||||
ctx: NowContext,
|
||||
opts: Options,
|
||||
args: string[],
|
||||
output: Output
|
||||
): Promise<number> {
|
||||
const { authConfig: { token }, config } = ctx;
|
||||
const { currentTeam } = config;
|
||||
const { apiUrl } = ctx;
|
||||
@@ -32,7 +45,6 @@ async function ls(ctx, opts, args, output) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
// $FlowFixMe
|
||||
const now = new Now({ apiUrl, token, debug, currentTeam });
|
||||
const lsStamp = stamp();
|
||||
|
||||
@@ -55,7 +67,6 @@ async function ls(ctx, opts, args, output) {
|
||||
throw certificates;
|
||||
}
|
||||
|
||||
const { uid: lastCert } = certificates[certificates.length - 1];
|
||||
const certs = sortByCn(certificates);
|
||||
|
||||
output.log(
|
||||
@@ -65,7 +76,8 @@ async function ls(ctx, opts, args, output) {
|
||||
);
|
||||
|
||||
if (certs.length >= 100) {
|
||||
output.note(`There may be more certificates that can be retrieved with ${cmd(`now ${process.argv.slice(2).join(' ')} --after=${lastCert}`)}.`);
|
||||
const { uid: lastCert } = certificates[certificates.length - 1];
|
||||
output.note(`There may be more certificates that can be retrieved with ${cmd(`now ${process.argv.slice(2).join(' ')} --after=${lastCert}`)}.\n`);
|
||||
}
|
||||
|
||||
if (certs.length > 0) {
|
||||
@@ -75,7 +87,7 @@ async function ls(ctx, opts, args, output) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
function formatCertsTable(certsList) {
|
||||
function formatCertsTable(certsList: Cert[]) {
|
||||
return `${table(
|
||||
[formatCertsTableHead(), ...formatCertsTableBody(certsList)],
|
||||
{
|
||||
@@ -86,7 +98,7 @@ function formatCertsTable(certsList) {
|
||||
).replace(/^(.*)/gm, ' $1')}\n`;
|
||||
}
|
||||
|
||||
function formatCertsTableHead() {
|
||||
function formatCertsTableHead(): string[] {
|
||||
return [
|
||||
chalk.dim('id'),
|
||||
chalk.dim('cns'),
|
||||
@@ -96,15 +108,12 @@ function formatCertsTableHead() {
|
||||
];
|
||||
}
|
||||
|
||||
function formatCertsTableBody(certsList) {
|
||||
function formatCertsTableBody(certsList: Cert[]) {
|
||||
const now = new Date();
|
||||
return certsList.reduce(
|
||||
(result, cert) => [...result, ...formatCert(now, cert)],
|
||||
[]
|
||||
);
|
||||
return certsList.reduce<string[][]>((result, cert) => result.concat(formatCert(now, cert)), []);
|
||||
}
|
||||
|
||||
function formatCert(time, cert) {
|
||||
function formatCert(time: Date, cert: Cert) {
|
||||
return cert.cns.map(
|
||||
(cn, idx) =>
|
||||
idx === 0
|
||||
@@ -113,26 +122,26 @@ function formatCert(time, cert) {
|
||||
);
|
||||
}
|
||||
|
||||
function formatCertNonFirstCn(cn, multiple) {
|
||||
function formatCertNonFirstCn(cn: string, multiple: boolean): string[] {
|
||||
return ['', formatCertCn(cn, multiple), '', '', ''];
|
||||
}
|
||||
|
||||
function formatCertCn(cn, multiple) {
|
||||
function formatCertCn(cn: string, multiple: boolean) {
|
||||
return multiple ? `${chalk.gray('-')} ${chalk.bold(cn)}` : chalk.bold(cn);
|
||||
}
|
||||
|
||||
function formatCertFirstCn(time, cert, cn, multiple) {
|
||||
function formatCertFirstCn(time: Date, cert: Cert, cn: string, multiple: boolean): string[] {
|
||||
return [
|
||||
cert.uid,
|
||||
formatCertCn(cn, multiple),
|
||||
formatExpirationDate(new Date(cert.expiration)),
|
||||
cert.autoRenew ? 'yes' : 'no',
|
||||
chalk.gray(ms(time - new Date(cert.created)))
|
||||
chalk.gray(ms(time.getTime() - new Date(cert.created).getTime()))
|
||||
];
|
||||
}
|
||||
|
||||
function formatExpirationDate(date) {
|
||||
const diff = date - Date.now();
|
||||
function formatExpirationDate(date: Date) {
|
||||
const diff = date.getTime() - Date.now();
|
||||
return diff < 0
|
||||
? chalk.gray(`${ms(-diff)} ago`)
|
||||
: chalk.gray(`in ${ms(diff)}`);
|
||||
@@ -143,8 +152,8 @@ function formatExpirationDate(date) {
|
||||
* to 'wildcard' since that will allow psl get the root domain
|
||||
* properly to make the comparison.
|
||||
*/
|
||||
function sortByCn(certsList) {
|
||||
return certsList.concat().sort((a, b) => {
|
||||
function sortByCn(certsList: Cert[]) {
|
||||
return certsList.concat().sort((a: Cert, b: Cert) => {
|
||||
const domainA = psl.get(a.cns[0].replace('*', 'wildcard'));
|
||||
const domainB = psl.get(b.cns[0].replace('*', 'wildcard'));
|
||||
if (!domainA || !domainB) return 0;
|
||||
@@ -10,7 +10,9 @@ export const latestHelp = () => `
|
||||
|
||||
${chalk.dim('Basic')}
|
||||
|
||||
deploy [path] Performs a deployment ${chalk.bold('(default)')}
|
||||
deploy [path] Performs a deployment ${chalk.bold(
|
||||
'(default)'
|
||||
)}
|
||||
dev Start a local development server
|
||||
init [example] Initialize an example project
|
||||
ls | list [app] Lists deployments
|
||||
@@ -18,7 +20,6 @@ export const latestHelp = () => `
|
||||
login [email] Logs into your account or creates a new one
|
||||
logout Logs out of your account
|
||||
switch [scope] Switches between teams and your personal account
|
||||
update Updates Now CLI to the latest version
|
||||
help [cmd] Displays complete help for [cmd]
|
||||
|
||||
${chalk.dim('Advanced')}
|
||||
@@ -29,7 +30,6 @@ export const latestHelp = () => `
|
||||
certs [cmd] Manages your SSL certificates
|
||||
secrets [name] Manages your secret environment variables
|
||||
logs [url] Displays the logs for a deployment
|
||||
scale [args] Scales the instance count of a deployment
|
||||
teams Manages your teams
|
||||
whoami Shows the username of the currently logged in user
|
||||
|
||||
@@ -115,7 +115,7 @@ export const latestArgs = {
|
||||
'-e': '--env',
|
||||
'-b': '--build-env',
|
||||
'-C': '--no-clipboard',
|
||||
'-m': '--meta'
|
||||
'-m': '--meta',
|
||||
};
|
||||
|
||||
export const legacyArgsMri = {
|
||||
@@ -126,7 +126,8 @@ export const legacyArgsMri = {
|
||||
'meta',
|
||||
'session-affinity',
|
||||
'regions',
|
||||
'dotenv'
|
||||
'dotenv',
|
||||
'target',
|
||||
],
|
||||
boolean: [
|
||||
'help',
|
||||
@@ -143,11 +144,12 @@ export const legacyArgsMri = {
|
||||
'public',
|
||||
'no-scale',
|
||||
'no-verify',
|
||||
'dotenv'
|
||||
'dotenv',
|
||||
'prod',
|
||||
],
|
||||
default: {
|
||||
C: false,
|
||||
clipboard: true
|
||||
clipboard: true,
|
||||
},
|
||||
alias: {
|
||||
env: 'e',
|
||||
@@ -164,8 +166,8 @@ export const legacyArgsMri = {
|
||||
'session-affinity': 'S',
|
||||
name: 'n',
|
||||
project: 'P',
|
||||
alias: 'a'
|
||||
}
|
||||
alias: 'a',
|
||||
},
|
||||
};
|
||||
|
||||
// The following arg parsing is simply to make it compatible
|
||||
|
||||
@@ -3,18 +3,14 @@ import bytes from 'bytes';
|
||||
import { write as copy } from 'clipboardy';
|
||||
import chalk from 'chalk';
|
||||
import title from 'title';
|
||||
import Progress from 'progress';
|
||||
import Client from '../../util/client';
|
||||
import wait from '../../util/output/wait';
|
||||
import { handleError } from '../../util/error';
|
||||
import getArgs from '../../util/get-args';
|
||||
import toHumanPath from '../../util/humanize-path';
|
||||
import Now from '../../util';
|
||||
import stamp from '../../util/output/stamp.ts';
|
||||
import { isReady, isDone, isFailed } from '../../util/build-state';
|
||||
import createDeploy from '../../util/deploy/create-deploy';
|
||||
import getDeploymentByIdOrHost from '../../util/deploy/get-deployment-by-id-or-host';
|
||||
import sleep from '../../util/sleep';
|
||||
import parseMeta from '../../util/parse-meta';
|
||||
import code from '../../util/output/code';
|
||||
import param from '../../util/output/param';
|
||||
@@ -36,12 +32,15 @@ import {
|
||||
AliasDomainConfigured,
|
||||
MissingBuildScript,
|
||||
ConflictingFilePath,
|
||||
ConflictingPathSegment
|
||||
ConflictingPathSegment,
|
||||
BuildError,
|
||||
NotDomainOwner,
|
||||
} from '../../util/errors-ts';
|
||||
import { SchemaValidationFailed } from '../../util/errors';
|
||||
import purchaseDomainIfAvailable from '../../util/domains/purchase-domain-if-available';
|
||||
import handleCertError from '../../util/certs/handle-cert-error';
|
||||
import isWildcardAlias from '../../util/alias/is-wildcard-alias';
|
||||
import shouldDeployDir from '../../util/deploy/should-deploy-dir';
|
||||
|
||||
const addProcessEnv = async (log, env) => {
|
||||
let val;
|
||||
@@ -72,11 +71,12 @@ const addProcessEnv = async (log, env) => {
|
||||
};
|
||||
|
||||
const deploymentErrorMsg = `Your deployment failed. Please retry later. More: https://err.sh/now/deployment-error`;
|
||||
const prepareAlias = input => isWildcardAlias(input) ? input : `https://${input}`;
|
||||
const prepareAlias = input =>
|
||||
isWildcardAlias(input) ? input : `https://${input}`;
|
||||
|
||||
const printDeploymentStatus = async (
|
||||
output,
|
||||
{ url, readyState, alias: aliasList, aliasError },
|
||||
{ readyState, alias: aliasList, aliasError },
|
||||
deployStamp,
|
||||
clipboardEnabled,
|
||||
localConfig,
|
||||
@@ -94,10 +94,18 @@ const printDeploymentStatus = async (
|
||||
const preparedAlias = prepareAlias(firstAlias);
|
||||
try {
|
||||
await copy(`https://${firstAlias}`);
|
||||
output.ready(`Deployed to ${chalk.bold(chalk.cyan(preparedAlias))} ${chalk.gray('[in clipboard]')} ${deployStamp()}`);
|
||||
output.ready(
|
||||
`Deployed to ${chalk.bold(
|
||||
chalk.cyan(preparedAlias)
|
||||
)} ${chalk.gray('[in clipboard]')} ${deployStamp()}`
|
||||
);
|
||||
} catch (err) {
|
||||
output.debug(`Error copying to clipboard: ${err}`);
|
||||
output.ready(`Deployed to ${chalk.bold(chalk.cyan(preparedAlias))} ${deployStamp()}`);
|
||||
output.ready(
|
||||
`Deployed to ${chalk.bold(
|
||||
chalk.cyan(preparedAlias)
|
||||
)} ${deployStamp()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
@@ -109,13 +117,17 @@ const printDeploymentStatus = async (
|
||||
|
||||
for (const alias of aliasList) {
|
||||
const index = aliasList.indexOf(alias);
|
||||
const isLast = index === (aliasList.length - 1);
|
||||
const isLast = index === aliasList.length - 1;
|
||||
const shouldCopy = matching ? alias === matching : isLast;
|
||||
|
||||
if (shouldCopy && clipboardEnabled) {
|
||||
try {
|
||||
await copy(`https://${alias}`);
|
||||
output.print(`- ${chalk.bold(chalk.cyan(prepareAlias(alias)))} ${chalk.gray('[in clipboard]')}\n`);
|
||||
output.print(
|
||||
`- ${chalk.bold(chalk.cyan(prepareAlias(alias)))} ${chalk.gray(
|
||||
'[in clipboard]'
|
||||
)}\n`
|
||||
);
|
||||
|
||||
continue;
|
||||
} catch (err) {
|
||||
@@ -138,20 +150,6 @@ const printDeploymentStatus = async (
|
||||
return 1;
|
||||
}
|
||||
|
||||
const failedBuilds = builds.filter(isFailed);
|
||||
const amount = failedBuilds.length;
|
||||
|
||||
if (amount > 0) {
|
||||
output.error('Build failed');
|
||||
output.error(
|
||||
`Check your logs at https://${url}/_logs or run ${code(
|
||||
`now logs ${url}`
|
||||
)}`
|
||||
);
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
output.error(deploymentErrorMsg);
|
||||
return 1;
|
||||
};
|
||||
@@ -206,7 +204,15 @@ export default async function main(
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { apiUrl, authConfig: { token }, config: { currentTeam } } = ctx;
|
||||
if (!(await shouldDeployDir(argv._[0], output))) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
const {
|
||||
apiUrl,
|
||||
authConfig: { token },
|
||||
config: { currentTeam },
|
||||
} = ctx;
|
||||
const { log, debug, error, warn } = output;
|
||||
const paths = Object.keys(stats);
|
||||
const debugEnabled = argv['--debug'];
|
||||
@@ -236,7 +242,6 @@ export default async function main(
|
||||
parseMeta(argv['--meta'])
|
||||
);
|
||||
|
||||
let syncCount;
|
||||
let deployStamp = stamp();
|
||||
let deployment = null;
|
||||
|
||||
@@ -289,11 +294,15 @@ export default async function main(
|
||||
parseEnv(argv['--env'])
|
||||
);
|
||||
|
||||
// Enable debug mode for builders
|
||||
const buildDebugEnv = debugEnabled ? { NOW_BUILDER_DEBUG: '1' } : {};
|
||||
|
||||
// Merge build env out of `build.env` from now.json, and `--build-env` args
|
||||
const deploymentBuildEnv = Object.assign(
|
||||
{},
|
||||
parseEnv(localConfig.build && localConfig.build.env),
|
||||
parseEnv(argv['--build-env'])
|
||||
parseEnv(argv['--build-env']),
|
||||
buildDebugEnv
|
||||
);
|
||||
|
||||
// If there's any undefined values, then inherit them from this process
|
||||
@@ -313,33 +322,45 @@ export default async function main(
|
||||
|
||||
try {
|
||||
// $FlowFixMe
|
||||
const project = getProjectName({argv, nowConfig: localConfig, isFile, paths});
|
||||
const project = getProjectName({
|
||||
argv,
|
||||
nowConfig: localConfig,
|
||||
isFile,
|
||||
paths,
|
||||
});
|
||||
log(`Using project ${chalk.bold(project)}`);
|
||||
|
||||
const createArgs = {
|
||||
name: project,
|
||||
env: deploymentEnv,
|
||||
build: { env: deploymentBuildEnv },
|
||||
forceNew: argv['--force'],
|
||||
quiet,
|
||||
wantsPublic: argv['--public'] || localConfig.public,
|
||||
isFile,
|
||||
type: null,
|
||||
nowConfig: localConfig,
|
||||
regions,
|
||||
meta
|
||||
name: project,
|
||||
env: deploymentEnv,
|
||||
build: { env: deploymentBuildEnv },
|
||||
forceNew: argv['--force'],
|
||||
quiet,
|
||||
wantsPublic: argv['--public'] || localConfig.public,
|
||||
isFile,
|
||||
type: null,
|
||||
nowConfig: localConfig,
|
||||
regions,
|
||||
meta,
|
||||
deployStamp,
|
||||
};
|
||||
|
||||
if (argv['--target']) {
|
||||
const deprecatedTarget = argv['--target'];
|
||||
|
||||
if (!['staging', 'production'].includes(deprecatedTarget)) {
|
||||
error(`The specified ${param('--target')} ${code(deprecatedTarget)} is not valid`);
|
||||
error(
|
||||
`The specified ${param('--target')} ${code(
|
||||
deprecatedTarget
|
||||
)} is not valid`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (deprecatedTarget === 'production') {
|
||||
warn('We recommend using the much shorter `--prod` option instead of `--target production` (deprecated)');
|
||||
warn(
|
||||
'We recommend using the much shorter `--prod` option instead of `--target production` (deprecated)'
|
||||
);
|
||||
}
|
||||
|
||||
output.debug(`Setting target to ${deprecatedTarget}`);
|
||||
@@ -351,7 +372,7 @@ export default async function main(
|
||||
|
||||
deployStamp = stamp();
|
||||
|
||||
const firstDeployCall = await createDeploy(
|
||||
deployment = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
@@ -360,13 +381,49 @@ export default async function main(
|
||||
ctx
|
||||
);
|
||||
|
||||
if (deployment instanceof NotDomainOwner) {
|
||||
output.error(deployment);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const deploymentResponse = handleCertError(
|
||||
output,
|
||||
await getDeploymentByIdOrHost(now, contextName, deployment.id, 'v10')
|
||||
);
|
||||
|
||||
if (deploymentResponse === 1) {
|
||||
return deploymentResponse;
|
||||
}
|
||||
|
||||
if (
|
||||
firstDeployCall instanceof DomainNotFound &&
|
||||
firstDeployCall.meta && firstDeployCall.meta.domain
|
||||
deploymentResponse instanceof DeploymentNotFound ||
|
||||
deploymentResponse instanceof DeploymentPermissionDenied ||
|
||||
deploymentResponse instanceof InvalidDeploymentId
|
||||
) {
|
||||
output.debug(`The domain ${
|
||||
firstDeployCall.meta.domain
|
||||
} was not found, trying to purchase it`);
|
||||
output.error(deploymentResponse.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (handleCertError(output, deployment) === 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (deployment === null) {
|
||||
error('Uploading failed. Please try again.');
|
||||
return 1;
|
||||
}
|
||||
} catch (err) {
|
||||
debug(`Error: ${err}\n${err.stack}`);
|
||||
|
||||
if (err instanceof NotDomainOwner) {
|
||||
output.error(err.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (err instanceof DomainNotFound && err.meta && err.meta.domain) {
|
||||
output.debug(
|
||||
`The domain ${err.meta.domain} was not found, trying to purchase it`
|
||||
);
|
||||
|
||||
const purchase = await purchaseDomainIfAvailable(
|
||||
output,
|
||||
@@ -374,16 +431,14 @@ export default async function main(
|
||||
apiUrl: ctx.apiUrl,
|
||||
token: ctx.authConfig.token,
|
||||
currentTeam: ctx.config.currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
}),
|
||||
firstDeployCall.meta.domain,
|
||||
err.meta.domain,
|
||||
contextName
|
||||
);
|
||||
|
||||
if (purchase === true) {
|
||||
output.success(`Successfully purchased the domain ${
|
||||
firstDeployCall.meta.domain
|
||||
}!`);
|
||||
output.success(`Successfully purchased the domain ${err.meta.domain}!`);
|
||||
|
||||
// We exit if the purchase is completed since
|
||||
// the domain verification can take some time
|
||||
@@ -391,7 +446,7 @@ export default async function main(
|
||||
}
|
||||
|
||||
if (purchase === false || purchase instanceof UserAborted) {
|
||||
handleCreateDeployError(output, firstDeployCall);
|
||||
handleCreateDeployError(output, deployment);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -399,120 +454,36 @@ export default async function main(
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (handleCertError(output, firstDeployCall) === 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (
|
||||
firstDeployCall instanceof DomainNotFound ||
|
||||
firstDeployCall instanceof DomainNotVerified ||
|
||||
firstDeployCall instanceof DomainPermissionDenied ||
|
||||
firstDeployCall instanceof DomainVerificationFailed ||
|
||||
firstDeployCall instanceof SchemaValidationFailed ||
|
||||
firstDeployCall instanceof InvalidDomain ||
|
||||
firstDeployCall instanceof DeploymentNotFound ||
|
||||
firstDeployCall instanceof BuildsRateLimited ||
|
||||
firstDeployCall instanceof DeploymentsRateLimited ||
|
||||
firstDeployCall instanceof AliasDomainConfigured ||
|
||||
firstDeployCall instanceof MissingBuildScript ||
|
||||
firstDeployCall instanceof ConflictingFilePath ||
|
||||
firstDeployCall instanceof ConflictingPathSegment
|
||||
err instanceof DomainNotFound ||
|
||||
err instanceof DomainNotVerified ||
|
||||
err instanceof NotDomainOwner ||
|
||||
err instanceof DomainPermissionDenied ||
|
||||
err instanceof DomainVerificationFailed ||
|
||||
err instanceof SchemaValidationFailed ||
|
||||
err instanceof InvalidDomain ||
|
||||
err instanceof DeploymentNotFound ||
|
||||
err instanceof BuildsRateLimited ||
|
||||
err instanceof DeploymentsRateLimited ||
|
||||
err instanceof AliasDomainConfigured ||
|
||||
err instanceof MissingBuildScript ||
|
||||
err instanceof ConflictingFilePath ||
|
||||
err instanceof ConflictingPathSegment
|
||||
) {
|
||||
handleCreateDeployError(output, firstDeployCall);
|
||||
handleCreateDeployError(output, err);
|
||||
return 1;
|
||||
}
|
||||
|
||||
deployment = firstDeployCall;
|
||||
if (err instanceof BuildError) {
|
||||
output.error('Build failed');
|
||||
output.error(
|
||||
`Check your logs at ${now.url}/_logs or run ${code(
|
||||
`now logs ${now.url}`
|
||||
)}`
|
||||
);
|
||||
|
||||
if (now.syncFileCount > 0) {
|
||||
const uploadStamp = stamp();
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
if (now.syncFileCount !== now.fileCount) {
|
||||
debug(`Total files ${now.fileCount}, ${now.syncFileCount} changed`);
|
||||
}
|
||||
|
||||
const size = bytes(now.syncAmount);
|
||||
syncCount = `${now.syncFileCount} file${now.syncFileCount > 1
|
||||
? 's'
|
||||
: ''}`;
|
||||
const bar = new Progress(
|
||||
`${chalk.gray(
|
||||
'>'
|
||||
)} Upload [:bar] :percent :etas (${size}) [${syncCount}]`,
|
||||
{
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: now.syncAmount,
|
||||
clear: true
|
||||
}
|
||||
);
|
||||
|
||||
now.upload({ scale: {} });
|
||||
|
||||
now.on('upload', ({ names, data }) => {
|
||||
debug(`Uploaded: ${names.join(' ')} (${bytes(data.length)})`);
|
||||
});
|
||||
|
||||
now.on('uploadProgress', progress => {
|
||||
bar.tick(progress);
|
||||
});
|
||||
|
||||
now.on('complete', resolve);
|
||||
|
||||
now.on('error', err => {
|
||||
error('Upload failed');
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
if (!quiet && syncCount) {
|
||||
log(`Synced ${syncCount} (${bytes(now.syncAmount)}) ${uploadStamp()}`);
|
||||
}
|
||||
|
||||
for (let i = 0; i < 4; i += 1) {
|
||||
deployStamp = stamp();
|
||||
const secondDeployCall = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
paths,
|
||||
createArgs
|
||||
);
|
||||
|
||||
if (handleCertError(output, secondDeployCall) === 1) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (
|
||||
secondDeployCall instanceof DomainPermissionDenied ||
|
||||
secondDeployCall instanceof DomainVerificationFailed ||
|
||||
secondDeployCall instanceof SchemaValidationFailed ||
|
||||
secondDeployCall instanceof DeploymentNotFound ||
|
||||
secondDeployCall instanceof DeploymentsRateLimited ||
|
||||
secondDeployCall instanceof AliasDomainConfigured ||
|
||||
secondDeployCall instanceof MissingBuildScript ||
|
||||
secondDeployCall instanceof ConflictingFilePath ||
|
||||
secondDeployCall instanceof ConflictingPathSegment
|
||||
) {
|
||||
handleCreateDeployError(output, secondDeployCall);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (now.syncFileCount === 0) {
|
||||
deployment = secondDeployCall;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (deployment === null) {
|
||||
error('Uploading failed. Please try again.');
|
||||
return 1;
|
||||
}
|
||||
return 1;
|
||||
}
|
||||
} catch (err) {
|
||||
debug(`Error: ${err}\n${err.stack}`);
|
||||
|
||||
if (err.keyword === 'additionalProperties' && err.dataPath === '.scale') {
|
||||
const { additionalProperty = '' } = err.params || {};
|
||||
@@ -531,114 +502,14 @@ export default async function main(
|
||||
return 1;
|
||||
}
|
||||
|
||||
const { url } = now;
|
||||
|
||||
if (isTTY) {
|
||||
log(`${url} ${chalk.gray(`[v2]`)} ${deployStamp()}`);
|
||||
} else {
|
||||
process.stdout.write(url);
|
||||
}
|
||||
|
||||
// If an error occurred, we want to let it fall down to rendering
|
||||
// builds so the user can see in which build the error occurred.
|
||||
if (isReady(deployment)) {
|
||||
return printDeploymentStatus(output, deployment, deployStamp, !argv['--no-clipboard'], localConfig);
|
||||
}
|
||||
|
||||
const sleepingTime = ms('1.5s');
|
||||
const allBuildsTime = stamp();
|
||||
const times = {};
|
||||
const buildsUrl = `/v1/now/deployments/${deployment.id}/builds`;
|
||||
|
||||
let builds = [];
|
||||
let buildsCompleted = false;
|
||||
let buildSpinner = null;
|
||||
|
||||
let deploymentSpinner = null;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (!buildsCompleted) {
|
||||
const { builds: freshBuilds } = await now.fetch(buildsUrl);
|
||||
|
||||
// If there are no builds, we need to exit.
|
||||
if (freshBuilds.length === 0 || freshBuilds.every(isDone)) {
|
||||
builds = freshBuilds;
|
||||
buildsCompleted = true;
|
||||
} else {
|
||||
for (const build of freshBuilds) {
|
||||
const id = build.id;
|
||||
const done = isDone(build);
|
||||
|
||||
if (times[id]) {
|
||||
if (done && typeof times[id] === 'function') {
|
||||
times[id] = times[id]();
|
||||
}
|
||||
} else {
|
||||
times[id] = done ? allBuildsTime() : stamp();
|
||||
}
|
||||
}
|
||||
|
||||
if (JSON.stringify(builds) !== JSON.stringify(freshBuilds)) {
|
||||
builds = freshBuilds;
|
||||
|
||||
if (buildSpinner === null) {
|
||||
buildSpinner = wait('Building...');
|
||||
}
|
||||
|
||||
buildsCompleted = builds.every(isDone);
|
||||
|
||||
if (builds.some(isFailed)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const deploymentResponse = handleCertError(
|
||||
output,
|
||||
await getDeploymentByIdOrHost(now, contextName, deployment.id, 'v9')
|
||||
)
|
||||
|
||||
if (deploymentResponse === 1) {
|
||||
return deploymentResponse;
|
||||
}
|
||||
|
||||
if (
|
||||
deploymentResponse instanceof DeploymentNotFound ||
|
||||
deploymentResponse instanceof DeploymentPermissionDenied ||
|
||||
deploymentResponse instanceof InvalidDeploymentId
|
||||
) {
|
||||
output.error(deploymentResponse.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (isReady(deploymentResponse) || isFailed(deploymentResponse)) {
|
||||
deployment = deploymentResponse;
|
||||
|
||||
if (typeof deploymentSpinner === 'function') {
|
||||
// This stops it
|
||||
deploymentSpinner();
|
||||
}
|
||||
|
||||
break;
|
||||
} else if (!deploymentSpinner) {
|
||||
if (typeof buildSpinner === 'function') {
|
||||
buildSpinner();
|
||||
}
|
||||
|
||||
deploymentSpinner = wait('Finalizing...');
|
||||
}
|
||||
}
|
||||
|
||||
await sleep(sleepingTime);
|
||||
}
|
||||
|
||||
if (typeof buildSpinner === 'function') {
|
||||
buildSpinner();
|
||||
}
|
||||
|
||||
return printDeploymentStatus(output, deployment, deployStamp, !argv['--no-clipboard'], localConfig, builds);
|
||||
};
|
||||
return printDeploymentStatus(
|
||||
output,
|
||||
deployment,
|
||||
deployStamp,
|
||||
!argv['--no-clipboard'],
|
||||
localConfig
|
||||
);
|
||||
}
|
||||
|
||||
function handleCreateDeployError(output, error) {
|
||||
if (error instanceof InvalidDomain) {
|
||||
@@ -708,18 +579,20 @@ function handleCreateDeployError(output, error) {
|
||||
}
|
||||
if (error instanceof TooManyRequests) {
|
||||
output.error(
|
||||
`Too many requests detected for ${error.meta
|
||||
.api} API. Try again in ${ms(error.meta.retryAfter * 1000, {
|
||||
long: true
|
||||
})}.`
|
||||
`Too many requests detected for ${error.meta.api} API. Try again in ${ms(
|
||||
error.meta.retryAfter * 1000,
|
||||
{
|
||||
long: true,
|
||||
}
|
||||
)}.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
if (error instanceof DomainNotVerified) {
|
||||
output.error(
|
||||
`The domain used as an alias ${
|
||||
chalk.underline(error.meta.domain)
|
||||
} is not verified yet. Please verify it.`
|
||||
`The domain used as an alias ${chalk.underline(
|
||||
error.meta.domain
|
||||
)} is not verified yet. Please verify it.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
@@ -730,6 +603,7 @@ function handleCreateDeployError(output, error) {
|
||||
}
|
||||
if (
|
||||
error instanceof DeploymentNotFound ||
|
||||
error instanceof NotDomainOwner ||
|
||||
error instanceof DeploymentsRateLimited ||
|
||||
error instanceof AliasDomainConfigured ||
|
||||
error instanceof MissingBuildScript ||
|
||||
|
||||
@@ -2,7 +2,6 @@ import { resolve, basename, join } from 'path';
|
||||
import { eraseLines } from 'ansi-escapes';
|
||||
// @ts-ignore
|
||||
import { write as copy } from 'clipboardy';
|
||||
import bytes from 'bytes';
|
||||
import chalk from 'chalk';
|
||||
import dotenv from 'dotenv';
|
||||
import fs from 'fs-extra';
|
||||
@@ -13,7 +12,6 @@ import ms from 'ms';
|
||||
// @ts-ignore
|
||||
import title from 'title';
|
||||
import plural from 'pluralize';
|
||||
import Progress from 'progress';
|
||||
// @ts-ignore
|
||||
import { handleError } from '../../util/error';
|
||||
import chars from '../../util/output/chars';
|
||||
@@ -34,19 +32,16 @@ import promptOptions from '../../util/prompt-options';
|
||||
// @ts-ignore
|
||||
import readMetaData from '../../util/read-metadata';
|
||||
import toHumanPath from '../../util/humanize-path';
|
||||
import combineAsyncGenerators from '../../util/combine-async-generators';
|
||||
// @ts-ignore
|
||||
import createDeploy from '../../util/deploy/create-deploy';
|
||||
import eventListenerToGenerator from '../../util/event-listener-to-generator';
|
||||
// @ts-ignore
|
||||
import formatLogCmd from '../../util/output/format-log-cmd';
|
||||
// @ts-ignore
|
||||
import formatLogOutput from '../../util/output/format-log-output';
|
||||
// @ts-ignore
|
||||
import getEventsStream from '../../util/deploy/get-events-stream';
|
||||
import shouldDeployDir from '../../util/deploy/should-deploy-dir';
|
||||
// @ts-ignore
|
||||
import getInstanceIndex from '../../util/deploy/get-instance-index';
|
||||
import getStateChangeFromPolling from '../../util/deploy/get-state-change-from-polling';
|
||||
import joinWords from '../../util/output/join-words';
|
||||
// @ts-ignore
|
||||
import normalizeRegionsList from '../../util/scale/normalize-regions-list';
|
||||
@@ -67,11 +62,12 @@ import {
|
||||
DomainVerificationFailed,
|
||||
TooManyRequests,
|
||||
VerifyScaleTimeout,
|
||||
DeploymentsRateLimited
|
||||
DeploymentsRateLimited,
|
||||
NotDomainOwner,
|
||||
} from '../../util/errors-ts';
|
||||
import {
|
||||
InvalidAllForScale,
|
||||
InvalidRegionOrDCForScale
|
||||
InvalidRegionOrDCForScale,
|
||||
} from '../../util/errors';
|
||||
import { SchemaValidationFailed } from '../../util/errors';
|
||||
import handleCertError from '../../util/certs/handle-cert-error';
|
||||
@@ -198,7 +194,7 @@ const promptForEnvFields = async (list: string[]) => {
|
||||
for (const field of list) {
|
||||
questions.push({
|
||||
name: field,
|
||||
message: field
|
||||
message: field,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -220,7 +216,7 @@ const promptForEnvFields = async (list: string[]) => {
|
||||
|
||||
async function canUseZeroConfig(cwd: string): Promise<boolean> {
|
||||
try {
|
||||
const pkg = (await readPackage(join(cwd, 'package.json')));
|
||||
const pkg = await readPackage(join(cwd, 'package.json'));
|
||||
|
||||
if (!pkg || pkg instanceof Error) {
|
||||
return false;
|
||||
@@ -250,7 +246,7 @@ async function canUseZeroConfig(cwd: string): Promise<boolean> {
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
} catch(_) {}
|
||||
} catch (_) {}
|
||||
|
||||
return false;
|
||||
}
|
||||
@@ -275,6 +271,10 @@ export default async function main(
|
||||
paths = [process.cwd()];
|
||||
}
|
||||
|
||||
if (!(await shouldDeployDir(argv._[0], output))) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Options
|
||||
forceNew = argv.force;
|
||||
deploymentName = argv.name;
|
||||
@@ -296,15 +296,27 @@ export default async function main(
|
||||
quiet = !isTTY;
|
||||
({ log, error, note, debug, warn } = output);
|
||||
|
||||
const infoUrl = await canUseZeroConfig(paths[0])
|
||||
? 'https://zeit.co/guides/migrate-to-zeit-now'
|
||||
: 'https://zeit.co/docs/v2/advanced/platform/changes-in-now-2-0'
|
||||
const infoUrl = 'https://zeit.co/guides/migrate-to-zeit-now';
|
||||
|
||||
warn(`You are using an old version of the Now Platform. More: ${link(infoUrl)}`);
|
||||
warn(
|
||||
`You are using an old version of the Now Platform. More: ${link(infoUrl)}`
|
||||
);
|
||||
|
||||
if (argv.prod || argv.target) {
|
||||
error(
|
||||
`The option ${cmd(
|
||||
argv.prod ? '--prod' : '--target'
|
||||
)} is not supported for Now 1.0 deployments. To manually alias a deployment, use ${cmd(
|
||||
'now alias'
|
||||
)} instead.`
|
||||
);
|
||||
await exit(1);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const {
|
||||
authConfig: { token },
|
||||
config
|
||||
config,
|
||||
} = ctx;
|
||||
|
||||
try {
|
||||
@@ -314,7 +326,7 @@ export default async function main(
|
||||
token,
|
||||
config,
|
||||
firstRun: true,
|
||||
deploymentType: undefined
|
||||
deploymentType: undefined,
|
||||
});
|
||||
} catch (err) {
|
||||
await stopDeployment(err);
|
||||
@@ -327,7 +339,7 @@ async function sync({
|
||||
token,
|
||||
config: { currentTeam },
|
||||
firstRun,
|
||||
deploymentType
|
||||
deploymentType,
|
||||
}: SyncOptions): Promise<void> {
|
||||
return new Promise(async (_resolve, reject) => {
|
||||
let deployStamp = stamp();
|
||||
@@ -476,7 +488,7 @@ async function sync({
|
||||
|
||||
// XXX: legacy
|
||||
deploymentType,
|
||||
sessionAffinity
|
||||
sessionAffinity,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -486,7 +498,7 @@ async function sync({
|
||||
meta,
|
||||
deploymentName,
|
||||
deploymentType,
|
||||
sessionAffinity
|
||||
sessionAffinity,
|
||||
} = await readMeta(
|
||||
paths[0],
|
||||
deploymentName,
|
||||
@@ -499,7 +511,7 @@ async function sync({
|
||||
'dockerfile_missing',
|
||||
'no_dockerfile_commands',
|
||||
'unsupported_deployment_type',
|
||||
'multiple_manifests'
|
||||
'multiple_manifests',
|
||||
];
|
||||
|
||||
if (
|
||||
@@ -537,7 +549,7 @@ async function sync({
|
||||
// Read scale and fail if we have both regions and scale
|
||||
if (regions.length > 0 && Object.keys(scaleFromConfig).length > 0) {
|
||||
error(
|
||||
'Can\'t set both `regions` and `scale` options simultaneously',
|
||||
"Can't set both `regions` and `scale` options simultaneously",
|
||||
'regions-and-scale-at-once'
|
||||
);
|
||||
await exit(1);
|
||||
@@ -548,9 +560,7 @@ async function sync({
|
||||
dcIds = normalizeRegionsList(regions);
|
||||
if (dcIds instanceof InvalidRegionOrDCForScale) {
|
||||
error(
|
||||
`The value "${
|
||||
dcIds.meta.regionOrDC
|
||||
}" is not a valid region or DC identifier`
|
||||
`The value "${dcIds.meta.regionOrDC}" is not a valid region or DC identifier`
|
||||
);
|
||||
await exit(1);
|
||||
return 1;
|
||||
@@ -565,7 +575,7 @@ async function sync({
|
||||
scale = dcIds.reduce(
|
||||
(result: DcScale, dcId: string) => ({
|
||||
...result,
|
||||
[dcId]: { min: 0, max: 1 }
|
||||
[dcId]: { min: 0, max: 1 },
|
||||
}),
|
||||
{}
|
||||
);
|
||||
@@ -661,8 +671,9 @@ async function sync({
|
||||
}
|
||||
|
||||
const hasSecrets = Object.keys(deploymentEnv).some(key =>
|
||||
deploymentEnv[key].startsWith('@')
|
||||
(deploymentEnv[key] || '').startsWith('@')
|
||||
);
|
||||
|
||||
const secretsPromise = hasSecrets ? now.listSecrets() : null;
|
||||
|
||||
const findSecret = async (uidOrName: string) => {
|
||||
@@ -754,15 +765,13 @@ async function sync({
|
||||
parseMeta(argv.meta)
|
||||
);
|
||||
|
||||
let syncCount;
|
||||
|
||||
try {
|
||||
meta.name = getProjectName({
|
||||
argv,
|
||||
nowConfig,
|
||||
isFile,
|
||||
paths,
|
||||
pre: meta.name
|
||||
pre: meta.name,
|
||||
});
|
||||
log(`Using project ${chalk.bold(meta.name)}`);
|
||||
const createArgs = Object.assign(
|
||||
@@ -776,13 +785,15 @@ async function sync({
|
||||
scale,
|
||||
wantsPublic,
|
||||
sessionAffinity,
|
||||
isFile
|
||||
isFile,
|
||||
nowConfig,
|
||||
deployStamp,
|
||||
},
|
||||
meta
|
||||
);
|
||||
|
||||
deployStamp = stamp();
|
||||
const firstDeployCall = await createDeploy(
|
||||
deployment = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
@@ -790,118 +801,24 @@ async function sync({
|
||||
createArgs
|
||||
);
|
||||
|
||||
const handledResult = handleCertError(output, firstDeployCall);
|
||||
const handledResult = handleCertError(output, deployment);
|
||||
if (handledResult === 1) {
|
||||
return handledResult;
|
||||
}
|
||||
|
||||
if (
|
||||
firstDeployCall instanceof DomainNotFound ||
|
||||
firstDeployCall instanceof DomainPermissionDenied ||
|
||||
firstDeployCall instanceof DomainVerificationFailed ||
|
||||
firstDeployCall instanceof SchemaValidationFailed ||
|
||||
firstDeployCall instanceof DeploymentNotFound ||
|
||||
firstDeployCall instanceof DeploymentsRateLimited
|
||||
deployment instanceof DomainNotFound ||
|
||||
deployment instanceof NotDomainOwner ||
|
||||
deployment instanceof DomainPermissionDenied ||
|
||||
deployment instanceof DomainVerificationFailed ||
|
||||
deployment instanceof SchemaValidationFailed ||
|
||||
deployment instanceof DeploymentNotFound ||
|
||||
deployment instanceof DeploymentsRateLimited
|
||||
) {
|
||||
handleCreateDeployError(output, firstDeployCall);
|
||||
handleCreateDeployError(output, deployment);
|
||||
await exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
deployment = firstDeployCall;
|
||||
|
||||
if (now.syncFileCount > 0) {
|
||||
const uploadStamp = stamp();
|
||||
await new Promise(resolve => {
|
||||
if (now.syncFileCount !== now.fileCount) {
|
||||
debug(`Total files ${now.fileCount}, ${now.syncFileCount} changed`);
|
||||
}
|
||||
|
||||
const size = bytes(now.syncAmount);
|
||||
syncCount = `${now.syncFileCount} file${
|
||||
now.syncFileCount > 1 ? 's' : ''
|
||||
}`;
|
||||
const bar = new Progress(
|
||||
`${chalk.gray(
|
||||
'>'
|
||||
)} Upload [:bar] :percent :etas (${size}) [${syncCount}]`,
|
||||
{
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: now.syncAmount,
|
||||
clear: true
|
||||
}
|
||||
);
|
||||
|
||||
now.upload({ scale });
|
||||
|
||||
now.on(
|
||||
'upload',
|
||||
({ names, data }: { names: string[]; data: Buffer }) => {
|
||||
debug(`Uploaded: ${names.join(' ')} (${bytes(data.length)})`);
|
||||
}
|
||||
);
|
||||
|
||||
now.on('uploadProgress', (progress: number) => {
|
||||
bar.tick(progress);
|
||||
});
|
||||
|
||||
now.on('complete', resolve);
|
||||
|
||||
now.on('error', (err: Error) => {
|
||||
error('Upload failed');
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
|
||||
if (!quiet && syncCount) {
|
||||
log(
|
||||
`Synced ${syncCount} (${bytes(now.syncAmount)}) ${uploadStamp()}`
|
||||
);
|
||||
}
|
||||
|
||||
for (let i = 0; i < 4; i += 1) {
|
||||
deployStamp = stamp();
|
||||
const secondDeployCall = await createDeploy(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
paths,
|
||||
createArgs
|
||||
);
|
||||
|
||||
const handledResult = handleCertError(output, secondDeployCall);
|
||||
if (handledResult === 1) {
|
||||
return handledResult;
|
||||
}
|
||||
|
||||
if (
|
||||
secondDeployCall instanceof DomainNotFound ||
|
||||
secondDeployCall instanceof DomainPermissionDenied ||
|
||||
secondDeployCall instanceof DomainVerificationFailed ||
|
||||
secondDeployCall instanceof SchemaValidationFailed ||
|
||||
secondDeployCall instanceof TooManyRequests ||
|
||||
secondDeployCall instanceof DeploymentNotFound ||
|
||||
secondDeployCall instanceof DeploymentsRateLimited
|
||||
) {
|
||||
handleCreateDeployError(output, secondDeployCall);
|
||||
await exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
if (now.syncFileCount === 0) {
|
||||
deployment = secondDeployCall;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (deployment === null) {
|
||||
error('Uploading failed. Please try again.');
|
||||
await exit(1);
|
||||
return;
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
if (err.code === 'plan_requires_public') {
|
||||
if (!wantsPublic) {
|
||||
@@ -914,7 +831,7 @@ async function sync({
|
||||
|
||||
if (isTTY) {
|
||||
proceed = await promptBool('Are you sure you want to proceed?', {
|
||||
trailing: eraseLines(1)
|
||||
trailing: eraseLines(1),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -954,10 +871,10 @@ async function sync({
|
||||
output,
|
||||
token,
|
||||
config: {
|
||||
currentTeam
|
||||
currentTeam,
|
||||
},
|
||||
firstRun: false,
|
||||
deploymentType
|
||||
deploymentType,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1002,8 +919,6 @@ async function sync({
|
||||
} else {
|
||||
log(`${chalk.bold(chalk.cyan(url))}${dcs} ${deployStamp()}`);
|
||||
}
|
||||
} else {
|
||||
process.stdout.write(url);
|
||||
}
|
||||
|
||||
if (deploymentType === 'static') {
|
||||
@@ -1022,96 +937,52 @@ async function sync({
|
||||
// Show build logs
|
||||
// (We have to add this check for flow but it will never happen)
|
||||
if (deployment !== null) {
|
||||
// If the created deployment is ready it was a deduping and we should exit
|
||||
if (deployment.readyState !== 'READY') {
|
||||
require('assert')(deployment); // mute linter
|
||||
const instanceIndex = getInstanceIndex();
|
||||
const eventsStream = await maybeGetEventsStream(now, deployment);
|
||||
const eventsGenerator = getEventsGenerator(
|
||||
const instanceIndex = getInstanceIndex();
|
||||
const eventsStream = await maybeGetEventsStream(now, deployment);
|
||||
|
||||
if (!noVerify) {
|
||||
output.log(
|
||||
`Verifying instantiation in ${joinWords(
|
||||
Object.keys(deployment.scale).map(dc => chalk.bold(dc))
|
||||
)}`
|
||||
);
|
||||
const verifyStamp = stamp();
|
||||
const verifyDCsGenerator = getVerifyDCsGenerator(
|
||||
output,
|
||||
now,
|
||||
contextName,
|
||||
deployment,
|
||||
eventsStream
|
||||
);
|
||||
|
||||
for await (const _event of eventsGenerator) {
|
||||
const event = _event as any;
|
||||
// Stop when the deployment is ready
|
||||
if (
|
||||
event.type === 'state-change' &&
|
||||
event.payload.value === 'READY'
|
||||
) {
|
||||
output.log(`Build completed`);
|
||||
break;
|
||||
}
|
||||
|
||||
// Stop then there is an error state
|
||||
if (
|
||||
event.type === 'state-change' &&
|
||||
event.payload.value === 'ERROR'
|
||||
) {
|
||||
output.error(`Build failed`);
|
||||
await exit(1);
|
||||
}
|
||||
|
||||
// For any relevant event we receive, print the result
|
||||
if (event.type === 'build-start') {
|
||||
output.log('Building…');
|
||||
} else if (event.type === 'command') {
|
||||
output.log(formatLogCmd(event.payload.text));
|
||||
} else if (event.type === 'stdout' || event.type === 'stderr') {
|
||||
formatLogOutput(event.payload.text).forEach((msg: string) =>
|
||||
output.log(msg)
|
||||
for await (const _dcOrEvent of verifyDCsGenerator) {
|
||||
const dcOrEvent = _dcOrEvent as any;
|
||||
if (dcOrEvent instanceof VerifyScaleTimeout) {
|
||||
output.error(
|
||||
`Instance verification timed out (${ms(dcOrEvent.meta.timeout)})`
|
||||
);
|
||||
output.log(
|
||||
'Read more: https://err.sh/now-cli/verification-timeout'
|
||||
);
|
||||
await exit(1);
|
||||
} else if (Array.isArray(dcOrEvent)) {
|
||||
const [dc, instances] = dcOrEvent;
|
||||
output.log(
|
||||
`${chalk.cyan(chars.tick)} Scaled ${plural(
|
||||
'instance',
|
||||
instances,
|
||||
true
|
||||
)} in ${chalk.bold(dc)} ${verifyStamp()}`
|
||||
);
|
||||
} else if (
|
||||
dcOrEvent &&
|
||||
(dcOrEvent.type === 'stdout' || dcOrEvent.type === 'stderr')
|
||||
) {
|
||||
const prefix = chalk.gray(
|
||||
`[${instanceIndex(dcOrEvent.payload.instanceId)}] `
|
||||
);
|
||||
formatLogOutput(dcOrEvent.payload.text, prefix).forEach(
|
||||
(msg: string) => output.log(msg)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (!noVerify) {
|
||||
output.log(
|
||||
`Verifying instantiation in ${joinWords(
|
||||
Object.keys(deployment.scale).map(dc => chalk.bold(dc))
|
||||
)}`
|
||||
);
|
||||
const verifyStamp = stamp();
|
||||
const verifyDCsGenerator = getVerifyDCsGenerator(
|
||||
output,
|
||||
now,
|
||||
deployment,
|
||||
eventsStream
|
||||
);
|
||||
|
||||
for await (const _dcOrEvent of verifyDCsGenerator) {
|
||||
const dcOrEvent = _dcOrEvent as any;
|
||||
if (dcOrEvent instanceof VerifyScaleTimeout) {
|
||||
output.error(
|
||||
`Instance verification timed out (${ms(
|
||||
dcOrEvent.meta.timeout
|
||||
)})`
|
||||
);
|
||||
output.log(
|
||||
'Read more: https://err.sh/now/verification-timeout'
|
||||
);
|
||||
await exit(1);
|
||||
} else if (Array.isArray(dcOrEvent)) {
|
||||
const [dc, instances] = dcOrEvent;
|
||||
output.log(
|
||||
`${chalk.cyan(chars.tick)} Scaled ${plural(
|
||||
'instance',
|
||||
instances,
|
||||
true
|
||||
)} in ${chalk.bold(dc)} ${verifyStamp()}`
|
||||
);
|
||||
} else if (
|
||||
dcOrEvent &&
|
||||
(dcOrEvent.type === 'stdout' || dcOrEvent.type === 'stderr')
|
||||
) {
|
||||
const prefix = chalk.gray(
|
||||
`[${instanceIndex(dcOrEvent.payload.instanceId)}] `
|
||||
);
|
||||
formatLogOutput(dcOrEvent.payload.text, prefix).forEach(
|
||||
(msg: string) => output.log(msg)
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1133,7 +1004,7 @@ async function readMeta(
|
||||
deploymentType,
|
||||
deploymentName: _deploymentName,
|
||||
quiet: true,
|
||||
sessionAffinity: _sessionAffinity
|
||||
sessionAffinity: _sessionAffinity,
|
||||
});
|
||||
|
||||
if (!deploymentType) {
|
||||
@@ -1150,7 +1021,7 @@ async function readMeta(
|
||||
meta,
|
||||
deploymentName: _deploymentName,
|
||||
deploymentType,
|
||||
sessionAffinity: _sessionAffinity
|
||||
sessionAffinity: _sessionAffinity,
|
||||
};
|
||||
} catch (err) {
|
||||
if (isTTY && err.code === 'multiple_manifests') {
|
||||
@@ -1164,7 +1035,7 @@ async function readMeta(
|
||||
try {
|
||||
deploymentType = await promptOptions([
|
||||
['npm', `${chalk.bold('package.json')}\t${chalk.gray(' --npm')} `],
|
||||
['docker', `${chalk.bold('Dockerfile')}\t${chalk.gray('--docker')} `]
|
||||
['docker', `${chalk.bold('Dockerfile')}\t${chalk.gray('--docker')} `],
|
||||
]);
|
||||
} catch (_) {
|
||||
throw err;
|
||||
@@ -1190,35 +1061,13 @@ async function maybeGetEventsStream(now: Now, deployment: any) {
|
||||
try {
|
||||
return await getEventsStream(now, deployment.deploymentId, {
|
||||
direction: 'forward',
|
||||
follow: true
|
||||
follow: true,
|
||||
});
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getEventsGenerator(
|
||||
now: Now,
|
||||
contextName: string,
|
||||
deployment: any,
|
||||
eventsStream: any
|
||||
) {
|
||||
const stateChangeFromPollingGenerator = getStateChangeFromPolling(
|
||||
now,
|
||||
contextName,
|
||||
deployment.deploymentId,
|
||||
deployment.readyState
|
||||
);
|
||||
if (eventsStream !== null) {
|
||||
return combineAsyncGenerators(
|
||||
eventListenerToGenerator('data', eventsStream),
|
||||
stateChangeFromPollingGenerator
|
||||
);
|
||||
}
|
||||
|
||||
return stateChangeFromPollingGenerator;
|
||||
}
|
||||
|
||||
function getVerifyDCsGenerator(
|
||||
output: Output,
|
||||
now: Now,
|
||||
@@ -1228,7 +1077,7 @@ function getVerifyDCsGenerator(
|
||||
const verifyDeployment = verifyDeploymentScale(
|
||||
output,
|
||||
now,
|
||||
deployment.deploymentId,
|
||||
deployment.deploymentId || deployment.uid,
|
||||
deployment.scale
|
||||
);
|
||||
|
||||
@@ -1295,9 +1144,9 @@ function handleCreateDeployError(output: Output, error: Error) {
|
||||
output.error(
|
||||
`Failed to validate ${highlight(
|
||||
'now.json'
|
||||
)}: ${message}\nDocumentation: ${
|
||||
link('https://zeit.co/docs/v2/advanced/configuration')
|
||||
}`
|
||||
)}: ${message}\nDocumentation: ${link(
|
||||
'https://zeit.co/docs/v2/advanced/configuration'
|
||||
)}`
|
||||
);
|
||||
|
||||
return 1;
|
||||
@@ -1307,7 +1156,7 @@ function handleCreateDeployError(output: Output, error: Error) {
|
||||
`Too many requests detected for ${error.meta.api} API. Try again in ${ms(
|
||||
error.meta.retryAfter * 1000,
|
||||
{
|
||||
long: true
|
||||
long: true,
|
||||
}
|
||||
)}.`
|
||||
);
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import path from 'path';
|
||||
import chalk from 'chalk';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
|
||||
import getArgs from '../../util/get-args';
|
||||
import getSubcommand from '../../util/get-subcommand';
|
||||
@@ -11,11 +12,10 @@ import logo from '../../util/output/logo';
|
||||
import cmd from '../../util/output/cmd';
|
||||
import dev from './dev';
|
||||
import readPackage from '../../util/read-package';
|
||||
import { Package } from '../../util/dev/types';
|
||||
import readConfig from '../../util/config/read-config';
|
||||
|
||||
const COMMAND_CONFIG = {
|
||||
dev: ['dev']
|
||||
dev: ['dev'],
|
||||
};
|
||||
|
||||
const help = () => {
|
||||
@@ -54,18 +54,12 @@ export default async function main(ctx: NowContext) {
|
||||
|
||||
// Deprecated
|
||||
'--port': Number,
|
||||
'-p': '--port'
|
||||
'-p': '--port',
|
||||
});
|
||||
const debug = argv['--debug'];
|
||||
args = getSubcommand(argv._.slice(1), COMMAND_CONFIG).args;
|
||||
output = createOutput({ debug });
|
||||
|
||||
// Builders won't show debug logs by default
|
||||
// the `NOW_BUILDER_DEBUG` env variable will enable them
|
||||
if (debug) {
|
||||
process.env.NOW_BUILDER_DEBUG = '1';
|
||||
}
|
||||
|
||||
if ('--port' in argv) {
|
||||
output.warn('`--port` is deprecated, please use `--listen` instead');
|
||||
argv['--listen'] = String(argv['--port']);
|
||||
@@ -90,7 +84,7 @@ export default async function main(ctx: NowContext) {
|
||||
const pkg = await readPackage(path.join(dir, 'package.json'));
|
||||
|
||||
if (pkg) {
|
||||
const { scripts } = pkg as Package;
|
||||
const { scripts } = pkg as PackageJson;
|
||||
|
||||
if (scripts && scripts.dev && /\bnow\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
@@ -98,9 +92,7 @@ export default async function main(ctx: NowContext) {
|
||||
'package.json'
|
||||
)} must not contain ${cmd('now dev')}`
|
||||
);
|
||||
output.error(
|
||||
`More details: http://err.sh/now/now-dev-as-dev-script`
|
||||
);
|
||||
output.error(`More details: http://err.sh/now/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ export default async function verify(
|
||||
) {
|
||||
const {
|
||||
authConfig: { token },
|
||||
config
|
||||
config,
|
||||
} = ctx;
|
||||
const { currentTeam } = config;
|
||||
const { apiUrl } = ctx;
|
||||
@@ -122,16 +122,11 @@ export default async function verify(
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (result.txtVerifiedAt) {
|
||||
if (result.nsVerifiedAt) {
|
||||
console.log(
|
||||
`${chalk.cyan('> Success!')} Domain ${chalk.bold(
|
||||
domain.name
|
||||
)} was verified using DNS TXT record. ${verifyStamp()}`
|
||||
);
|
||||
output.print(
|
||||
` You can verify with nameservers too. Run ${cmd(
|
||||
`now domains inspect ${domain.name}`
|
||||
)} to find out the intended set.\n`
|
||||
)} was verified using nameservers. ${verifyStamp()}`
|
||||
);
|
||||
return 0;
|
||||
}
|
||||
@@ -139,7 +134,12 @@ export default async function verify(
|
||||
console.log(
|
||||
`${chalk.cyan('> Success!')} Domain ${chalk.bold(
|
||||
domain.name
|
||||
)} was verified using nameservers. ${verifyStamp()}`
|
||||
)} was verified using DNS TXT record. ${verifyStamp()}`
|
||||
);
|
||||
output.print(
|
||||
` You can verify with nameservers too. Run ${cmd(
|
||||
`now domains inspect ${domain.name}`
|
||||
)} to find out the intended set.\n`
|
||||
);
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -1,39 +1,38 @@
|
||||
export default new Map([
|
||||
[ 'alias', 'alias'],
|
||||
[ 'aliases', 'alias'],
|
||||
[ 'billing', 'billing'],
|
||||
[ 'cc', 'billing'],
|
||||
[ 'cert', 'certs'],
|
||||
[ 'certs', 'certs'],
|
||||
[ 'deploy', 'deploy'],
|
||||
[ 'deploy-v1', 'deploy'],
|
||||
[ 'deploy-v2', 'deploy'],
|
||||
[ 'dev', 'dev'],
|
||||
[ 'dns', 'dns'],
|
||||
[ 'domain', 'domains'],
|
||||
[ 'domains', 'domains'],
|
||||
[ 'downgrade', 'upgrade'],
|
||||
[ 'help', 'help'],
|
||||
[ 'init', 'init'],
|
||||
[ 'inspect', 'inspect'],
|
||||
[ 'list', 'list'],
|
||||
[ 'ln', 'alias'],
|
||||
[ 'log', 'logs'],
|
||||
[ 'login', 'login'],
|
||||
[ 'logout', 'logout'],
|
||||
[ 'logs', 'logs'],
|
||||
[ 'ls', 'list'],
|
||||
[ 'project', 'projects'],
|
||||
[ 'projects', 'projects'],
|
||||
[ 'remove', 'remove'],
|
||||
[ 'rm', 'remove'],
|
||||
[ 'scale', 'scale'],
|
||||
[ 'secret', 'secrets'],
|
||||
[ 'secrets', 'secrets'],
|
||||
[ 'switch', 'teams'],
|
||||
[ 'team', 'teams'],
|
||||
[ 'teams', 'teams'],
|
||||
[ 'update', 'update'],
|
||||
[ 'upgrade', 'upgrade'],
|
||||
[ 'whoami', 'whoami']
|
||||
['alias', 'alias'],
|
||||
['aliases', 'alias'],
|
||||
['billing', 'billing'],
|
||||
['cc', 'billing'],
|
||||
['cert', 'certs'],
|
||||
['certs', 'certs'],
|
||||
['deploy', 'deploy'],
|
||||
['deploy-v1', 'deploy'],
|
||||
['deploy-v2', 'deploy'],
|
||||
['dev', 'dev'],
|
||||
['dns', 'dns'],
|
||||
['domain', 'domains'],
|
||||
['domains', 'domains'],
|
||||
['downgrade', 'upgrade'],
|
||||
['help', 'help'],
|
||||
['init', 'init'],
|
||||
['inspect', 'inspect'],
|
||||
['list', 'list'],
|
||||
['ln', 'alias'],
|
||||
['log', 'logs'],
|
||||
['login', 'login'],
|
||||
['logout', 'logout'],
|
||||
['logs', 'logs'],
|
||||
['ls', 'list'],
|
||||
['project', 'projects'],
|
||||
['projects', 'projects'],
|
||||
['remove', 'remove'],
|
||||
['rm', 'remove'],
|
||||
['scale', 'scale'],
|
||||
['secret', 'secrets'],
|
||||
['secrets', 'secrets'],
|
||||
['switch', 'teams'],
|
||||
['team', 'teams'],
|
||||
['teams', 'teams'],
|
||||
['update', 'update'],
|
||||
['whoami', 'whoami'],
|
||||
]);
|
||||
|
||||
@@ -74,7 +74,7 @@ export default async function main(ctx) {
|
||||
'--all': Boolean,
|
||||
'--meta': [String],
|
||||
'-a': '--all',
|
||||
'-m': '--meta'
|
||||
'-m': '--meta',
|
||||
});
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
@@ -84,7 +84,7 @@ export default async function main(ctx) {
|
||||
const debugEnabled = argv['--debug'];
|
||||
|
||||
const { print, log, error, note, debug } = createOutput({
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
|
||||
if (argv._.length > 2) {
|
||||
@@ -103,13 +103,16 @@ export default async function main(ctx) {
|
||||
}
|
||||
|
||||
const meta = parseMeta(argv['--meta']);
|
||||
const { authConfig: { token }, config } = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config,
|
||||
} = ctx;
|
||||
const { currentTeam, includeScheme } = config;
|
||||
const client = new Client({
|
||||
apiUrl,
|
||||
token,
|
||||
currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
let contextName = null;
|
||||
|
||||
@@ -165,7 +168,7 @@ export default async function main(ctx) {
|
||||
|
||||
try {
|
||||
debug('Fetching deployments');
|
||||
deployments = await now.list(app, { version: 4, meta });
|
||||
deployments = await now.list(app, { version: 5, meta });
|
||||
} catch (err) {
|
||||
stopSpinner();
|
||||
throw err;
|
||||
@@ -202,7 +205,16 @@ export default async function main(ctx) {
|
||||
const item = aliases.find(e => e.uid === app || e.alias === app);
|
||||
|
||||
if (item) {
|
||||
debug('Found alias that matches app name');
|
||||
debug(`Found alias that matches app name: ${item.alias}`);
|
||||
|
||||
if (Array.isArray(item.rules)) {
|
||||
now.close();
|
||||
stopSpinner();
|
||||
log(`Found matching path alias: ${chalk.cyan(item.alias)}`);
|
||||
log(`Please run ${cmd(`now alias ls ${item.alias}`)} instead`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
const match = await now.findDeployment(item.deploymentId);
|
||||
const instances = await getDeploymentInstances(
|
||||
now,
|
||||
@@ -250,7 +262,9 @@ export default async function main(ctx) {
|
||||
|
||||
// information to help the user find other deployments or instances
|
||||
if (app == null) {
|
||||
log(`To list more deployments for a project run ${cmd('now ls [project]')}`);
|
||||
log(
|
||||
`To list more deployments for a project run ${cmd('now ls [project]')}`
|
||||
);
|
||||
} else if (!argv['--all']) {
|
||||
log(`To list deployment instances run ${cmd('now ls --all [project]')}`);
|
||||
}
|
||||
@@ -260,19 +274,18 @@ export default async function main(ctx) {
|
||||
console.log(
|
||||
`${table(
|
||||
[
|
||||
['project', 'latest deployment', 'inst #', 'type', 'state', 'age'].map(s => chalk.dim(s)),
|
||||
['project', 'latest deployment', 'state', 'age', 'username'].map(s =>
|
||||
chalk.dim(s)
|
||||
),
|
||||
...deployments
|
||||
.sort(sortRecent())
|
||||
.map(dep => [
|
||||
[
|
||||
getProjectName(dep),
|
||||
chalk.bold((includeScheme ? 'https://' : '') + dep.url),
|
||||
dep.instanceCount == null || dep.type === 'LAMBDAS'
|
||||
? chalk.gray('-')
|
||||
: dep.instanceCount,
|
||||
dep.type === 'LAMBDAS' ? chalk.gray('-') : dep.type,
|
||||
stateString(dep.state),
|
||||
chalk.gray(ms(Date.now() - new Date(dep.created)))
|
||||
chalk.gray(ms(Date.now() - new Date(dep.created))),
|
||||
dep.creator.username,
|
||||
],
|
||||
...(argv['--all']
|
||||
? dep.instances.map(i => [
|
||||
@@ -280,9 +293,9 @@ export default async function main(ctx) {
|
||||
` ${chalk.gray('-')} ${i.url} `,
|
||||
'',
|
||||
'',
|
||||
''
|
||||
'',
|
||||
])
|
||||
: [])
|
||||
: []),
|
||||
])
|
||||
// flatten since the previous step returns a nested
|
||||
// array of the deployment and (optionally) its instances
|
||||
@@ -293,12 +306,12 @@ export default async function main(ctx) {
|
||||
// we only want to render one deployment per app
|
||||
filterUniqueApps()
|
||||
: () => true
|
||||
)
|
||||
),
|
||||
],
|
||||
{
|
||||
align: ['l', 'l', 'r', 'l', 'b'],
|
||||
hsep: ' '.repeat(4),
|
||||
stringLength: strlen
|
||||
stringLength: strlen,
|
||||
}
|
||||
).replace(/^/gm, ' ')}\n\n`
|
||||
);
|
||||
@@ -310,7 +323,7 @@ function getProjectName(d) {
|
||||
return 'files';
|
||||
}
|
||||
|
||||
return d.name
|
||||
return d.name;
|
||||
}
|
||||
|
||||
// renders the state string
|
||||
|
||||
@@ -84,8 +84,8 @@ export default async function main(ctx) {
|
||||
debug: 'd',
|
||||
query: 'q',
|
||||
follow: 'f',
|
||||
output: 'o'
|
||||
}
|
||||
output: 'o',
|
||||
},
|
||||
});
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
@@ -136,14 +136,17 @@ export default async function main(ctx) {
|
||||
types = argv.all ? [] : ['command', 'stdout', 'stderr', 'exit'];
|
||||
outputMode = argv.output in logPrinters ? argv.output : 'short';
|
||||
|
||||
const { authConfig: { token }, config } = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config,
|
||||
} = ctx;
|
||||
const { currentTeam } = config;
|
||||
const now = new Now({ apiUrl, token, debug, currentTeam });
|
||||
const client = new Client({
|
||||
apiUrl,
|
||||
token,
|
||||
currentTeam,
|
||||
debug: debugEnabled
|
||||
debug: debugEnabled,
|
||||
});
|
||||
let contextName = null;
|
||||
|
||||
@@ -206,7 +209,7 @@ export default async function main(ctx) {
|
||||
types,
|
||||
instanceId,
|
||||
since,
|
||||
until
|
||||
until,
|
||||
}; // no follow
|
||||
const storage = [];
|
||||
const storeEvent = event => storage.push(event);
|
||||
@@ -216,7 +219,7 @@ export default async function main(ctx) {
|
||||
onEvent: storeEvent,
|
||||
quiet: false,
|
||||
debug,
|
||||
findOpts: findOpts1
|
||||
findOpts: findOpts1,
|
||||
});
|
||||
|
||||
const printedEventIds = new Set();
|
||||
@@ -238,14 +241,14 @@ export default async function main(ctx) {
|
||||
types,
|
||||
instanceId,
|
||||
since: since2,
|
||||
follow: true
|
||||
follow: true,
|
||||
};
|
||||
await printEvents(now, deployment.uid || deployment.id, currentTeam, {
|
||||
mode: 'logs',
|
||||
onEvent: printEvent,
|
||||
quiet: false,
|
||||
debug,
|
||||
findOpts: findOpts2
|
||||
findOpts: findOpts2,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -280,25 +283,53 @@ function printLogShort(log) {
|
||||
` ${obj.status} ${obj.bodyBytesSent}`;
|
||||
} else if (log.type === 'event') {
|
||||
data = `EVENT ${log.event} ${JSON.stringify(log.payload)}`;
|
||||
} else if (obj) {
|
||||
data = JSON.stringify(obj, null, 2);
|
||||
} else {
|
||||
data = obj
|
||||
? JSON.stringify(obj, null, 2)
|
||||
: (log.text || '')
|
||||
data = (log.text || '')
|
||||
.replace(/\n$/, '')
|
||||
.replace(/^\n/, '')
|
||||
// eslint-disable-next-line no-control-regex
|
||||
.replace(/\x1b\[1000D/g, '')
|
||||
.replace(/\x1b\[0K/g, '')
|
||||
.replace(/\x1b\[1A/g, '');
|
||||
if (/warning/i.test(data)) {
|
||||
data = chalk.yellow(data);
|
||||
} else if (log.type === 'stderr') {
|
||||
data = chalk.red(data);
|
||||
}
|
||||
}
|
||||
|
||||
const date = new Date(log.created).toISOString();
|
||||
|
||||
data.split('\n').forEach((line, i) => {
|
||||
if (
|
||||
line.includes('START RequestId:') ||
|
||||
line.includes('END RequestId:') ||
|
||||
line.includes('XRAY TraceId:')
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (line.includes('REPORT RequestId:')) {
|
||||
line = line.substring(line.indexOf('Duration:'), line.length);
|
||||
|
||||
if (line.includes('Init Duration:')) {
|
||||
line = line.substring(0, line.indexOf('Init Duration:'));
|
||||
}
|
||||
}
|
||||
|
||||
if (i === 0) {
|
||||
console.log(`${chalk.dim(date)} ${line}`);
|
||||
console.log(
|
||||
`${chalk.dim(date)} ${line.replace('[now-builder-debug] ', '')}`
|
||||
);
|
||||
} else {
|
||||
console.log(`${' '.repeat(date.length)} ${line}`);
|
||||
console.log(
|
||||
`${' '.repeat(date.length)} ${line.replace(
|
||||
'[now-builder-debug] ',
|
||||
''
|
||||
)}`
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -327,7 +358,7 @@ function printLogRaw(log) {
|
||||
|
||||
const logPrinters = {
|
||||
short: printLogShort,
|
||||
raw: printLogRaw
|
||||
raw: printLogRaw,
|
||||
};
|
||||
|
||||
function toTimestamp(datestr) {
|
||||
|
||||
@@ -10,7 +10,7 @@ import Client from '../util/client.ts';
|
||||
import logo from '../util/output/logo';
|
||||
import getScope from '../util/get-scope';
|
||||
|
||||
const e = encodeURIComponent
|
||||
const e = encodeURIComponent;
|
||||
|
||||
const help = () => {
|
||||
console.log(`
|
||||
@@ -48,8 +48,8 @@ const main = async ctx => {
|
||||
argv = mri(ctx.argv.slice(2), {
|
||||
boolean: ['help'],
|
||||
alias: {
|
||||
help: 'h'
|
||||
}
|
||||
help: 'h',
|
||||
},
|
||||
});
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
@@ -63,7 +63,10 @@ const main = async ctx => {
|
||||
await exit(0);
|
||||
}
|
||||
|
||||
const { authConfig: { token }, config: { currentTeam }} = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config: { currentTeam },
|
||||
} = ctx;
|
||||
const client = new Client({ apiUrl, token, currentTeam, debug });
|
||||
|
||||
const { contextName } = await getScope(client);
|
||||
@@ -93,17 +96,21 @@ async function run({ client, contextName }) {
|
||||
if (args.length !== 0) {
|
||||
console.error(
|
||||
error(
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan('`now projects ls`')}`
|
||||
`Invalid number of arguments. Usage: ${chalk.cyan(
|
||||
'`now projects ls`'
|
||||
)}`
|
||||
)
|
||||
);
|
||||
return exit(1);
|
||||
}
|
||||
|
||||
const list = await client.fetch('/projects/list', {method: 'GET'});
|
||||
const list = await client.fetch('/v2/projects/', { method: 'GET' });
|
||||
const elapsed = ms(new Date() - start);
|
||||
|
||||
console.log(
|
||||
`> ${plural('project', list.length, true)} found under ${chalk.bold(contextName)} ${chalk.gray(`[${elapsed}]`)}`
|
||||
`> ${plural('project', list.length, true)} found under ${chalk.bold(
|
||||
contextName
|
||||
)} ${chalk.gray(`[${elapsed}]`)}`
|
||||
);
|
||||
|
||||
if (list.length > 0) {
|
||||
@@ -114,19 +121,19 @@ async function run({ client, contextName }) {
|
||||
header.concat(
|
||||
list.map(secret => [
|
||||
'',
|
||||
chalk.bold(secret.name),
|
||||
chalk.gray(`${ms(cur - new Date(secret.updatedAt)) } ago`)
|
||||
])
|
||||
chalk.bold(secret.name),
|
||||
chalk.gray(`${ms(cur - new Date(secret.updatedAt))} ago`),
|
||||
])
|
||||
),
|
||||
{
|
||||
align: ['l', 'l', 'l'],
|
||||
hsep: ' '.repeat(2),
|
||||
stringLength: strlen
|
||||
stringLength: strlen,
|
||||
}
|
||||
);
|
||||
|
||||
if (out) {
|
||||
console.log(`\n${ out }\n`);
|
||||
console.log(`\n${out}\n`);
|
||||
}
|
||||
}
|
||||
return;
|
||||
@@ -148,11 +155,11 @@ async function run({ client, contextName }) {
|
||||
|
||||
// Check the existence of the project
|
||||
try {
|
||||
await client.fetch(`/projects/info/${e(name)}`)
|
||||
} catch(err) {
|
||||
await client.fetch(`/projects/info/${e(name)}`);
|
||||
} catch (err) {
|
||||
if (err.status === 404) {
|
||||
console.error(error('No such project exists'))
|
||||
return exit(1)
|
||||
console.error(error('No such project exists'));
|
||||
return exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -162,7 +169,9 @@ async function run({ client, contextName }) {
|
||||
return exit(0);
|
||||
}
|
||||
|
||||
await client.fetch('/projects/remove', {method: 'DELETE', body: {name}});
|
||||
await client.fetch(`/v2/projects/${name}`, {
|
||||
method: 'DELETE',
|
||||
});
|
||||
const elapsed = ms(new Date() - start);
|
||||
console.log(
|
||||
`${chalk.cyan('> Success!')} Project ${chalk.bold(
|
||||
@@ -193,7 +202,10 @@ async function run({ client, contextName }) {
|
||||
}
|
||||
|
||||
const [name] = args;
|
||||
await client.fetch('/projects/ensure-project', {method: 'POST', body: {name}});
|
||||
await client.fetch('/projects/ensure-project', {
|
||||
method: 'POST',
|
||||
body: { name },
|
||||
});
|
||||
const elapsed = ms(new Date() - start);
|
||||
|
||||
console.log(
|
||||
@@ -204,9 +216,7 @@ async function run({ client, contextName }) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.error(
|
||||
error('Please specify a valid subcommand: ls | add | rm')
|
||||
);
|
||||
console.error(error('Please specify a valid subcommand: ls | add | rm'));
|
||||
help();
|
||||
exit(1);
|
||||
}
|
||||
@@ -220,7 +230,7 @@ function readConfirmation(projectName) {
|
||||
return new Promise(resolve => {
|
||||
process.stdout.write(
|
||||
`The project: ${chalk.bold(projectName)} will be removed permanently.\n` +
|
||||
`It will also delete everything under the project including deployments.\n`
|
||||
`It will also delete everything under the project including deployments.\n`
|
||||
);
|
||||
|
||||
process.stdout.write(
|
||||
|
||||
@@ -70,11 +70,12 @@ let subcommand;
|
||||
|
||||
const main = async ctx => {
|
||||
argv = mri(ctx.argv.slice(2), {
|
||||
boolean: ['help', 'debug'],
|
||||
boolean: ['help', 'debug', 'yes'],
|
||||
alias: {
|
||||
help: 'h',
|
||||
debug: 'd'
|
||||
}
|
||||
debug: 'd',
|
||||
yes: 'y',
|
||||
},
|
||||
});
|
||||
|
||||
argv._ = argv._.slice(1);
|
||||
@@ -88,7 +89,10 @@ const main = async ctx => {
|
||||
await exit(0);
|
||||
}
|
||||
|
||||
const { authConfig: { token }, config: { currentTeam } } = ctx;
|
||||
const {
|
||||
authConfig: { token },
|
||||
config: { currentTeam },
|
||||
} = ctx;
|
||||
const output = createOutput({ debug });
|
||||
const client = new Client({ apiUrl, token, currentTeam, debug });
|
||||
let contextName = null;
|
||||
@@ -105,7 +109,7 @@ const main = async ctx => {
|
||||
}
|
||||
|
||||
try {
|
||||
await run({ token, contextName, currentTeam });
|
||||
await run({ output, token, contextName, currentTeam });
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
exit(1);
|
||||
@@ -121,7 +125,7 @@ export default async ctx => {
|
||||
}
|
||||
};
|
||||
|
||||
async function run({ token, contextName, currentTeam }) {
|
||||
async function run({ output, token, contextName, currentTeam }) {
|
||||
const secrets = new NowSecrets({ apiUrl, token, debug, currentTeam });
|
||||
const args = argv._.slice(1);
|
||||
const start = Date.now();
|
||||
@@ -153,13 +157,13 @@ async function run({ token, contextName, currentTeam }) {
|
||||
list.map(secret => [
|
||||
'',
|
||||
chalk.bold(secret.name),
|
||||
chalk.gray(`${ms(cur - new Date(secret.created))} ago`)
|
||||
chalk.gray(`${ms(cur - new Date(secret.created))} ago`),
|
||||
])
|
||||
),
|
||||
{
|
||||
align: ['l', 'l', 'l'],
|
||||
hsep: ' '.repeat(2),
|
||||
stringLength: strlen
|
||||
stringLength: strlen,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -185,7 +189,7 @@ async function run({ token, contextName, currentTeam }) {
|
||||
const theSecret = list.find(secret => secret.name === args[0]);
|
||||
|
||||
if (theSecret) {
|
||||
const yes = await readConfirmation(theSecret);
|
||||
const yes = argv.yes || (await readConfirmation(theSecret));
|
||||
if (!yes) {
|
||||
console.error(error('User abort'));
|
||||
return exit(0);
|
||||
@@ -250,6 +254,10 @@ async function run({ token, contextName, currentTeam }) {
|
||||
await secrets.add(name, value);
|
||||
const elapsed = ms(new Date() - start);
|
||||
|
||||
if (name !== name.toLowerCase()) {
|
||||
output.warn(`Your secret name was converted to lower-case`);
|
||||
}
|
||||
|
||||
console.log(
|
||||
`${chalk.cyan('> Success!')} Secret ${chalk.bold(
|
||||
name.toLowerCase()
|
||||
@@ -275,7 +283,7 @@ function readConfirmation(secret) {
|
||||
const time = chalk.gray(`${ms(new Date() - new Date(secret.created))} ago`);
|
||||
const tbl = table([[chalk.bold(secret.name), time]], {
|
||||
align: ['r', 'l'],
|
||||
hsep: ' '.repeat(6)
|
||||
hsep: ' '.repeat(6),
|
||||
});
|
||||
|
||||
process.stdout.write(
|
||||
|
||||
@@ -1,323 +0,0 @@
|
||||
import chalk from 'chalk';
|
||||
import createOutput from '../util/output';
|
||||
import cmd from '../util/output/cmd.ts';
|
||||
import logo from '../util/output/logo';
|
||||
import { handleError } from '../util/error';
|
||||
import Client from '../util/client.ts';
|
||||
import getScope from '../util/get-scope.ts';
|
||||
import getArgs from '../util/get-args';
|
||||
import promptBool from '../util/prompt-bool';
|
||||
import Now from '../util';
|
||||
import wait from '../util/output/wait';
|
||||
import plans from '../util/plans';
|
||||
|
||||
const help = type => {
|
||||
console.log(`
|
||||
${chalk.bold(`${logo} now ${type}`)} [options]
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
-A ${chalk.bold.underline('FILE')}, --local-config=${chalk.bold.underline(
|
||||
'FILE'
|
||||
)} Path to the local ${'`now.json`'} file
|
||||
-Q ${chalk.bold.underline('DIR')}, --global-config=${chalk.bold.underline(
|
||||
'DIR'
|
||||
)} Path to the global ${'`.now`'} directory
|
||||
-d, --debug Debug mode [off]
|
||||
-t ${chalk.bold.underline('TOKEN')}, --token=${chalk.bold.underline(
|
||||
'TOKEN'
|
||||
)} Login token
|
||||
-S, --scope Set a custom scope
|
||||
-y, --yes Skip the confirmation prompt
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} ${type === 'upgrade'
|
||||
? 'Upgrade to the Unlimited plan'
|
||||
: 'Downgrade to the Free plan'}
|
||||
|
||||
${chalk.cyan(`$ now ${type}`)}
|
||||
${type === 'upgrade'
|
||||
? `
|
||||
${chalk.yellow('NOTE:')} ${chalk.gray(
|
||||
'Make sure you have a payment method, or add one:'
|
||||
)}
|
||||
|
||||
${chalk.cyan(`$ now billing add`)}
|
||||
`
|
||||
: ''}
|
||||
${chalk.gray('–')} ${type === 'upgrade'
|
||||
? 'Upgrade to the Unlimited plan without confirming'
|
||||
: 'Downgrade to the Free plan without confirming'}
|
||||
|
||||
${chalk.cyan(`$ now ${type} --yes`)}
|
||||
`);
|
||||
};
|
||||
|
||||
const upgradeToUnlimited = async ({ error }, now, reactivation = false) => {
|
||||
const cancelWait = wait(reactivation ? 'Re-activating' : 'Upgrading');
|
||||
|
||||
try {
|
||||
await now.fetch(`/plan`, {
|
||||
method: 'PUT',
|
||||
body: {
|
||||
plan: 'unlimited',
|
||||
reactivation
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
cancelWait();
|
||||
|
||||
if (err.code === 'no_team_owner') {
|
||||
error(
|
||||
`You are not an owner of this team. Please ask an owner to upgrade your membership.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (err.code === 'customer_not_found') {
|
||||
error(
|
||||
`No payment method available. Please add one using ${cmd(
|
||||
'now billing add'
|
||||
)} before upgrading.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
error(`Not able to upgrade. Please try again later.`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
cancelWait();
|
||||
};
|
||||
|
||||
const downgradeToFree = async ({ error }, now) => {
|
||||
const cancelWait = wait('Downgrading');
|
||||
|
||||
try {
|
||||
await now.fetch(`/plan`, {
|
||||
method: 'PUT',
|
||||
body: {
|
||||
plan: 'free'
|
||||
}
|
||||
});
|
||||
} catch (err) {
|
||||
cancelWait();
|
||||
|
||||
if (err.code === 'no_team_owner') {
|
||||
error(
|
||||
`You are not an owner of this team. Please ask an owner to upgrade your membership.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
error(`Not able to downgrade. Please try again later.`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
cancelWait();
|
||||
};
|
||||
|
||||
export default async function main(ctx) {
|
||||
let argv;
|
||||
|
||||
try {
|
||||
argv = getArgs(ctx.argv.slice(2), {
|
||||
'--yes': Boolean,
|
||||
'-y': '--yes'
|
||||
});
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const type = argv._[0];
|
||||
const skipConfirmation = argv['--yes'];
|
||||
|
||||
if (argv['--help']) {
|
||||
help(type);
|
||||
return 2;
|
||||
}
|
||||
|
||||
const apiUrl = ctx.apiUrl;
|
||||
const debugEnabled = argv['--debug'];
|
||||
const output = createOutput({ debug: debugEnabled });
|
||||
const { log, success, warn } = output;
|
||||
if (type === 'upgrade') {
|
||||
log(`Are you trying to upgrade Now CLI? Run ${cmd('now update')}!`);
|
||||
}
|
||||
warn(`${cmd(`now ${type}`)} is deprecated and will soon be removed.`);
|
||||
log(`Change your plan here: ${chalk.cyan('https://zeit.co/account/plan')}\n`);
|
||||
|
||||
const { authConfig: { token }, config } = ctx;
|
||||
const { currentTeam } = config;
|
||||
const client = new Client({
|
||||
apiUrl,
|
||||
token,
|
||||
currentTeam,
|
||||
debug: debugEnabled
|
||||
});
|
||||
let user = null;
|
||||
let team = null;
|
||||
|
||||
try {
|
||||
({ user, team } = await getScope(client));
|
||||
} catch (err) {
|
||||
if (err.code === 'NOT_AUTHORIZED' || err.code === 'TEAM_DELETED') {
|
||||
output.error(err.message);
|
||||
return 1;
|
||||
}
|
||||
|
||||
throw err;
|
||||
}
|
||||
|
||||
const prefix = currentTeam
|
||||
? `Your team ${chalk.bold(team.name)} is`
|
||||
: 'You are';
|
||||
const now = new Now({ apiUrl, token, debug: debugEnabled, currentTeam });
|
||||
const billing = currentTeam ? team.billing : user.billing;
|
||||
const plan = (billing && billing.plan) || 'free';
|
||||
|
||||
if (billing && billing.cancelation) {
|
||||
const date = new Date(billing.cancelation).toLocaleString();
|
||||
|
||||
log(
|
||||
`Your subscription is set to ${chalk.bold('downgrade')} on ${chalk.bold(
|
||||
date
|
||||
)}.`
|
||||
);
|
||||
const confirmed =
|
||||
skipConfirmation ||
|
||||
(await promptBool(
|
||||
output,
|
||||
`Would you like to prevent this from happening?`
|
||||
));
|
||||
|
||||
if (!confirmed) {
|
||||
log(`No action taken`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
await upgradeToUnlimited(output, now, true);
|
||||
success(`${prefix} back on the ${chalk.bold(plans[plan])} plan. Enjoy!`);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
if (plan === 'unlimited') {
|
||||
if (type === 'upgrade') {
|
||||
log(
|
||||
`${prefix} already on the ${chalk.bold(
|
||||
'Unlimited'
|
||||
)} plan. This is the highest plan.`
|
||||
);
|
||||
log(
|
||||
`If you want to upgrade a different scope, switch to it by using ${cmd(
|
||||
'now switch'
|
||||
)} first.`
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
if (type === 'downgrade') {
|
||||
log(`${prefix} on the ${chalk.bold('Unlimited')} plan.`);
|
||||
const confirmed =
|
||||
skipConfirmation ||
|
||||
(await promptBool(
|
||||
output,
|
||||
`Would you like to downgrade to the ${chalk.bold('Free')} plan?`
|
||||
));
|
||||
|
||||
if (!confirmed) {
|
||||
log(`Aborted`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
await downgradeToFree(output, now);
|
||||
success(
|
||||
`${prefix} now on the ${chalk.bold(
|
||||
'Free'
|
||||
)} plan. We are sad to see you go!`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if (plan === 'free' || plan === 'oss') {
|
||||
if (type === 'downgrade') {
|
||||
log(
|
||||
`${prefix} already on the ${chalk.bold(
|
||||
'Free'
|
||||
)} plan. This is the lowest plan.`
|
||||
);
|
||||
log(
|
||||
`If you want to downgrade a different scope, switch to it by using ${cmd(
|
||||
'now switch'
|
||||
)} first.`
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
if (type === 'upgrade') {
|
||||
log(`${prefix} on the ${chalk.bold('Free')} plan.`);
|
||||
const confirmed =
|
||||
skipConfirmation ||
|
||||
(await promptBool(
|
||||
output,
|
||||
`Would you like to upgrade to the ${chalk.bold(
|
||||
'Unlimited'
|
||||
)} plan (starting at ${chalk.bold('$0.99/month')})?`
|
||||
));
|
||||
|
||||
if (!confirmed) {
|
||||
log(`Aborted`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
await upgradeToUnlimited(output, now);
|
||||
success(`${prefix} now on the ${chalk.bold('Unlimited')} plan. Enjoy!`);
|
||||
}
|
||||
}
|
||||
|
||||
log(`${prefix} on the old ${chalk.bold(plans[plan])} plan (Now 1.0).`);
|
||||
|
||||
if (type === 'upgrade') {
|
||||
const confirmed =
|
||||
skipConfirmation ||
|
||||
(await promptBool(
|
||||
output,
|
||||
`Would you like to upgrade to the new ${chalk.bold(
|
||||
'Unlimited'
|
||||
)} plan (starting at ${chalk.bold('$0.99/month')})?`
|
||||
));
|
||||
|
||||
if (!confirmed) {
|
||||
log(`Aborted`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
await upgradeToUnlimited(output, now);
|
||||
success(`${prefix} now on the new ${chalk.bold('Unlimited')} plan. Enjoy!`);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
const confirmed =
|
||||
skipConfirmation ||
|
||||
(await promptBool(
|
||||
output,
|
||||
`Would you like to downgrade to the new ${chalk.bold('Free')} plan?`
|
||||
));
|
||||
|
||||
if (!confirmed) {
|
||||
log(`Aborted`);
|
||||
return 0;
|
||||
}
|
||||
|
||||
await downgradeToFree(output, now);
|
||||
success(
|
||||
`${prefix} now on the new ${chalk.bold(
|
||||
'Free'
|
||||
)} plan. We are sad to see you go!`
|
||||
);
|
||||
}
|
||||
@@ -17,7 +17,7 @@ import info from './util/output/info';
|
||||
import getNowDir from './util/config/global-path';
|
||||
import {
|
||||
getDefaultConfig,
|
||||
getDefaultAuthConfig
|
||||
getDefaultAuthConfig,
|
||||
} from './util/config/get-default';
|
||||
import hp from './util/humanize-path';
|
||||
import commands from './commands/index.ts';
|
||||
@@ -36,8 +36,8 @@ import getConfig from './util/get-config';
|
||||
import * as ERRORS from './util/errors-ts';
|
||||
import { NowError } from './util/now-error';
|
||||
import { SENTRY_DSN } from './util/constants.ts';
|
||||
import { metrics, shouldCollectMetrics } from './util/metrics.ts';
|
||||
import getUpdateCommand from './util/get-update-command';
|
||||
import { metrics, shouldCollectMetrics } from './util/metrics.ts';
|
||||
|
||||
const NOW_DIR = getNowDir();
|
||||
const NOW_CONFIG_PATH = configFiles.getConfigFilePath();
|
||||
@@ -53,7 +53,7 @@ sourceMap.install();
|
||||
Sentry.init({
|
||||
dsn: SENTRY_DSN,
|
||||
release: `now-cli@${pkg.version}`,
|
||||
environment: pkg.version.includes('canary') ? 'canary' : 'stable'
|
||||
environment: pkg.version.includes('canary') ? 'canary' : 'stable',
|
||||
});
|
||||
|
||||
let debug = () => {};
|
||||
@@ -71,7 +71,7 @@ const main = async argv_ => {
|
||||
'--version': Boolean,
|
||||
'-v': '--version',
|
||||
'--debug': Boolean,
|
||||
'-d': '--debug'
|
||||
'-d': '--debug',
|
||||
},
|
||||
{ permissive: true }
|
||||
);
|
||||
@@ -102,7 +102,10 @@ const main = async argv_ => {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (localConfig instanceof NowError && !(localConfig instanceof ERRORS.CantFindConfig)) {
|
||||
if (
|
||||
localConfig instanceof NowError &&
|
||||
!(localConfig instanceof ERRORS.CantFindConfig)
|
||||
) {
|
||||
output.error(`Failed to load local config file: ${localConfig.message}`);
|
||||
return 1;
|
||||
}
|
||||
@@ -118,7 +121,7 @@ const main = async argv_ => {
|
||||
if (targetOrSubcommand !== 'update') {
|
||||
update = await checkForUpdate(pkg, {
|
||||
interval: ms('1d'),
|
||||
distTag: pkg.version.includes('canary') ? 'canary' : 'latest'
|
||||
distTag: pkg.version.includes('canary') ? 'canary' : 'latest',
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
@@ -135,7 +138,15 @@ const main = async argv_ => {
|
||||
console.log(
|
||||
info(
|
||||
`${chalk.bgRed('UPDATE AVAILABLE')} ` +
|
||||
`Run ${cmd(await getUpdateCommand())} to install Now CLI ${update.latest}`
|
||||
`Run ${cmd(await getUpdateCommand())} to install Now CLI ${
|
||||
update.latest
|
||||
}`
|
||||
)
|
||||
);
|
||||
|
||||
console.log(
|
||||
info(
|
||||
`Changelog: https://github.com/zeit/now/releases/tag/now@${update.latest}`
|
||||
)
|
||||
);
|
||||
}
|
||||
@@ -307,9 +318,9 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error(
|
||||
`${'An unexpected error occurred while trying to write the ' +
|
||||
`default now config to "${hp(
|
||||
NOW_AUTH_CONFIG_PATH
|
||||
)}" `}${err.message}`
|
||||
`default now config to "${hp(NOW_AUTH_CONFIG_PATH)}" `}${
|
||||
err.message
|
||||
}`
|
||||
)
|
||||
);
|
||||
return 1;
|
||||
@@ -329,7 +340,7 @@ const main = async argv_ => {
|
||||
config,
|
||||
authConfig,
|
||||
localConfig,
|
||||
argv: argv_
|
||||
argv: argv_,
|
||||
};
|
||||
|
||||
let subcommand;
|
||||
@@ -339,7 +350,8 @@ const main = async argv_ => {
|
||||
const targetPath = join(process.cwd(), targetOrSubcommand);
|
||||
const targetPathExists = existsSync(targetPath);
|
||||
const subcommandExists =
|
||||
GLOBAL_COMMANDS.has(targetOrSubcommand) || commands.has(targetOrSubcommand);
|
||||
GLOBAL_COMMANDS.has(targetOrSubcommand) ||
|
||||
commands.has(targetOrSubcommand);
|
||||
|
||||
if (targetPathExists && subcommandExists) {
|
||||
console.error(
|
||||
@@ -412,7 +424,7 @@ const main = async argv_ => {
|
||||
message:
|
||||
'No existing credentials found. Please run ' +
|
||||
`${param('now login')} or pass ${param('--token')}`,
|
||||
slug: 'no-credentials-found'
|
||||
slug: 'no-credentials-found',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -426,7 +438,7 @@ const main = async argv_ => {
|
||||
message: `This command doesn't work with ${param(
|
||||
'--token'
|
||||
)}. Please use ${param('--scope')}.`,
|
||||
slug: 'no-token-allowed'
|
||||
slug: 'no-token-allowed',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -440,7 +452,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: `You defined ${param('--token')}, but it's missing a value`,
|
||||
slug: 'missing-token-value'
|
||||
slug: 'missing-token-value',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -459,11 +471,22 @@ const main = async argv_ => {
|
||||
const targetCommand = commands.get(subcommand);
|
||||
|
||||
if (argv['--team']) {
|
||||
output.warn(`The ${param('--team')} flag is deprecated. Please use ${param('--scope')} instead.`);
|
||||
output.warn(
|
||||
`The ${param('--team')} flag is deprecated. Please use ${param(
|
||||
'--scope'
|
||||
)} instead.`
|
||||
);
|
||||
}
|
||||
|
||||
if (typeof scope === 'string' && targetCommand !== 'login' && targetCommand !== 'dev' && !(targetCommand === 'teams' && argv._[3] !== 'invite')) {
|
||||
const { authConfig: { token } } = ctx;
|
||||
if (
|
||||
typeof scope === 'string' &&
|
||||
targetCommand !== 'login' &&
|
||||
targetCommand !== 'dev' &&
|
||||
!(targetCommand === 'teams' && argv._[3] !== 'invite')
|
||||
) {
|
||||
const {
|
||||
authConfig: { token },
|
||||
} = ctx;
|
||||
const client = new Client({ apiUrl, token });
|
||||
|
||||
let user = null;
|
||||
@@ -475,7 +498,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: `You do not have access to the specified account`,
|
||||
slug: 'scope-not-accessible'
|
||||
slug: 'scope-not-accessible',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -499,7 +522,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: `You do not have access to the specified team`,
|
||||
slug: 'scope-not-accessible'
|
||||
slug: 'scope-not-accessible',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -517,7 +540,7 @@ const main = async argv_ => {
|
||||
console.error(
|
||||
error({
|
||||
message: 'The specified scope does not exist',
|
||||
slug: 'scope-not-existent'
|
||||
slug: 'scope-not-existent',
|
||||
})
|
||||
);
|
||||
|
||||
@@ -577,7 +600,8 @@ const main = async argv_ => {
|
||||
if (shouldCollectMetrics) {
|
||||
metric
|
||||
.event(eventCategory, '1', pkg.version)
|
||||
.exception(err.message).send();
|
||||
.exception(err.message)
|
||||
.send();
|
||||
}
|
||||
|
||||
return 1;
|
||||
@@ -586,7 +610,8 @@ const main = async argv_ => {
|
||||
if (shouldCollectMetrics) {
|
||||
metric
|
||||
.event(eventCategory, '1', pkg.version)
|
||||
.exception(err.message).send();
|
||||
.exception(err.message)
|
||||
.send();
|
||||
}
|
||||
|
||||
// Otherwise it is an unexpected error and we should show the trace
|
||||
@@ -647,9 +672,7 @@ process.on('uncaughtException', handleUnexpected);
|
||||
// subcommands waiting for further data won't work (like `logs` and `logout`)!
|
||||
main(process.argv)
|
||||
.then(exitCode => {
|
||||
process.exitCode = exitCode;
|
||||
process.emit('nowExit');
|
||||
process.on('beforeExit', () => {
|
||||
process.exit(exitCode);
|
||||
});
|
||||
})
|
||||
.catch(handleUnexpected);
|
||||
|
||||
@@ -195,28 +195,31 @@ export type DNSRecord = {
|
||||
};
|
||||
|
||||
type SRVRecordData = {
|
||||
name: string,
|
||||
type: 'SRV',
|
||||
name: string;
|
||||
type: 'SRV';
|
||||
srv: {
|
||||
port: number,
|
||||
priority: number,
|
||||
target: string,
|
||||
weight: number,
|
||||
}
|
||||
}
|
||||
|
||||
type MXRecordData = {
|
||||
name: string,
|
||||
type: 'MX',
|
||||
value: string,
|
||||
mxPriority: number,
|
||||
port: number;
|
||||
priority: number;
|
||||
target: string;
|
||||
weight: number;
|
||||
};
|
||||
};
|
||||
|
||||
export type DNSRecordData = {
|
||||
name: string,
|
||||
type: string,
|
||||
value: string,
|
||||
} | SRVRecordData | MXRecordData;
|
||||
type MXRecordData = {
|
||||
name: string;
|
||||
type: 'MX';
|
||||
value: string;
|
||||
mxPriority: number;
|
||||
};
|
||||
|
||||
export type DNSRecordData =
|
||||
| {
|
||||
name: string;
|
||||
type: string;
|
||||
value: string;
|
||||
}
|
||||
| SRVRecordData
|
||||
| MXRecordData;
|
||||
|
||||
export interface Project {
|
||||
id: string;
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
import { JsonBody, StreamBody, context } from 'fetch-h2';
|
||||
|
||||
// Packages
|
||||
import { parse } from 'url';
|
||||
import Sema from 'async-sema';
|
||||
import createOutput, { Output } from './output/create-output';
|
||||
|
||||
const MAX_REQUESTS_PER_CONNECTION = 1000;
|
||||
|
||||
type CurrentContext = ReturnType<typeof context> & {
|
||||
fetchesMade: number;
|
||||
ongoingFetches: number;
|
||||
};
|
||||
|
||||
export interface AgentFetchOptions {
|
||||
method?: 'GET' | 'POST' | 'PATCH' | 'PUT' | 'DELETE';
|
||||
body?: NodeJS.ReadableStream | string;
|
||||
headers: { [key: string]: string };
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a `fetch` version with a similar API to the browser's configured with a
|
||||
* HTTP2 agent. It encodes `body` automatically as JSON.
|
||||
*
|
||||
* @param {String} host
|
||||
* @return {Function} fetch
|
||||
*/
|
||||
export default class NowAgent {
|
||||
_contexts: ReturnType<typeof context>[];
|
||||
_currContext: CurrentContext;
|
||||
_output: Output;
|
||||
_protocol?: string;
|
||||
_sema: Sema;
|
||||
_url: string;
|
||||
|
||||
constructor(url: string, { debug = false } = {}) {
|
||||
// We use multiple contexts because each context represent one connection
|
||||
// With nginx, we're limited to 1000 requests before a connection is closed
|
||||
// http://nginx.org/en/docs/http/ngx_http_v2_module.html#http2_max_requests
|
||||
// To get arround this, we keep track of requests made on a connection. when we're about to hit 1000
|
||||
// we start up a new connection, and re-route all future traffic through the new connection
|
||||
// and when the final request from the old connection resolves, we auto-close the old connection
|
||||
this._contexts = [context()];
|
||||
this._currContext = {
|
||||
...this._contexts[0],
|
||||
fetchesMade: 0,
|
||||
ongoingFetches: 0
|
||||
};
|
||||
|
||||
const parsed = parse(url);
|
||||
this._url = url;
|
||||
this._protocol = parsed.protocol;
|
||||
this._sema = new Sema(20);
|
||||
this._output = createOutput({ debug });
|
||||
}
|
||||
|
||||
setConcurrency({
|
||||
maxStreams,
|
||||
capacity
|
||||
}: {
|
||||
maxStreams: number;
|
||||
capacity: number;
|
||||
}) {
|
||||
this._sema = new Sema(maxStreams || 20, { capacity });
|
||||
}
|
||||
|
||||
async fetch(path: string, opts: AgentFetchOptions) {
|
||||
const { debug } = this._output;
|
||||
await this._sema.acquire();
|
||||
let currentContext: CurrentContext;
|
||||
this._currContext.fetchesMade++;
|
||||
if (this._currContext.fetchesMade >= MAX_REQUESTS_PER_CONNECTION) {
|
||||
const ctx = { ...context(), fetchesMade: 1, ongoingFetches: 0 };
|
||||
this._contexts.push(ctx);
|
||||
this._currContext = ctx;
|
||||
}
|
||||
|
||||
// If we're changing contexts, we don't want to record the ongoingFetch on the old context
|
||||
// That'll cause an off-by-one error when trying to close the old socket later
|
||||
this._currContext.ongoingFetches++;
|
||||
currentContext = this._currContext;
|
||||
|
||||
debug(
|
||||
`Total requests made on socket #${this._contexts.length}: ${this
|
||||
._currContext.fetchesMade}`
|
||||
);
|
||||
debug(
|
||||
`Concurrent requests on socket #${this._contexts.length}: ${this
|
||||
._currContext.ongoingFetches}`
|
||||
);
|
||||
|
||||
let body: JsonBody | StreamBody | string | undefined;
|
||||
if (opts.body && typeof opts.body === 'object') {
|
||||
if (typeof (<NodeJS.ReadableStream>opts.body).pipe === 'function') {
|
||||
body = new StreamBody(<NodeJS.ReadableStream>opts.body);
|
||||
} else {
|
||||
opts.headers['Content-Type'] = 'application/json';
|
||||
body = new JsonBody(opts.body);
|
||||
}
|
||||
} else {
|
||||
body = opts.body;
|
||||
}
|
||||
|
||||
const { host, protocol } = parse(path);
|
||||
const url = host ? `${protocol}//${host}` : this._url;
|
||||
const handleCompleted = async <T>(res: T) => {
|
||||
currentContext.ongoingFetches--;
|
||||
if (
|
||||
(currentContext !== this._currContext || host) &&
|
||||
currentContext.ongoingFetches <= 0
|
||||
) {
|
||||
// We've completely moved on to a new socket
|
||||
// close the old one
|
||||
|
||||
// TODO: Fix race condition:
|
||||
// If the response is a stream, and the server is still streaming data
|
||||
// we should check if the stream has closed before disconnecting
|
||||
// hasCompleted CAN technically be called before the res body stream is closed
|
||||
debug('Closing old socket');
|
||||
currentContext.disconnect(url);
|
||||
}
|
||||
|
||||
this._sema.release();
|
||||
return res;
|
||||
};
|
||||
|
||||
return currentContext
|
||||
.fetch((host ? '' : this._url) + path, { ...opts, body })
|
||||
.then(res => handleCompleted(res))
|
||||
.catch((err: Error) => {
|
||||
handleCompleted(null);
|
||||
throw err;
|
||||
});
|
||||
}
|
||||
|
||||
close() {
|
||||
const { debug } = this._output;
|
||||
debug('Closing agent');
|
||||
|
||||
this._currContext.disconnect(this._url);
|
||||
}
|
||||
}
|
||||
@@ -34,6 +34,11 @@ export default async function getDeploymentForAlias(
|
||||
}
|
||||
|
||||
const appName = await getAppName(output, localConfig, localConfigPath);
|
||||
|
||||
if (!appName) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const deployment = await getAppLastDeployment(
|
||||
output,
|
||||
client,
|
||||
|
||||
@@ -7,7 +7,11 @@ export default async function getInferredTargets(
|
||||
output: Output,
|
||||
config: Config
|
||||
) {
|
||||
output.warn(`The ${cmd('now alias')} command (no arguments) was deprecated in favour of ${cmd('now --prod')}.`);
|
||||
output.warn(
|
||||
`The ${cmd(
|
||||
'now alias'
|
||||
)} command (no arguments) was deprecated in favor of ${cmd('now --prod')}.`
|
||||
);
|
||||
|
||||
// This field is deprecated, warn about it
|
||||
if (config.aliases) {
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import qs from 'querystring';
|
||||
import { EventEmitter } from 'events';
|
||||
import { parse as parseUrl } from 'url';
|
||||
import fetch, { RequestInit } from 'node-fetch';
|
||||
import retry, { RetryFunction, Options as RetryOptions } from 'async-retry';
|
||||
import createOutput, { Output } from './output/create-output';
|
||||
import Agent, { AgentFetchOptions } from './agent';
|
||||
import responseError from './response-error';
|
||||
import ua from './ua';
|
||||
|
||||
@@ -17,7 +17,6 @@ export type FetchOptions = {
|
||||
};
|
||||
|
||||
export default class Client extends EventEmitter {
|
||||
_agent: Agent;
|
||||
_apiUrl: string;
|
||||
_debug: boolean;
|
||||
_forceNew: boolean;
|
||||
@@ -30,7 +29,7 @@ export default class Client extends EventEmitter {
|
||||
token,
|
||||
currentTeam,
|
||||
forceNew = false,
|
||||
debug = false
|
||||
debug = false,
|
||||
}: {
|
||||
apiUrl: string;
|
||||
token: string;
|
||||
@@ -44,30 +43,23 @@ export default class Client extends EventEmitter {
|
||||
this._forceNew = forceNew;
|
||||
this._output = createOutput({ debug });
|
||||
this._apiUrl = apiUrl;
|
||||
this._agent = new Agent(apiUrl, { debug });
|
||||
this._onRetry = this._onRetry.bind(this);
|
||||
this.currentTeam = currentTeam;
|
||||
|
||||
const closeAgent = () => {
|
||||
this._agent.close();
|
||||
process.removeListener('nowExit', closeAgent);
|
||||
};
|
||||
|
||||
// @ts-ignore
|
||||
process.on('nowExit', closeAgent);
|
||||
}
|
||||
|
||||
retry<T>(fn: RetryFunction<T>, { retries = 3, maxTimeout = Infinity } = {}) {
|
||||
return retry(fn, {
|
||||
retries,
|
||||
maxTimeout,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
});
|
||||
}
|
||||
|
||||
_fetch(_url: string, opts: FetchOptions = {}) {
|
||||
const parsedUrl = parseUrl(_url, true);
|
||||
const apiUrl = parsedUrl.host ? `${parsedUrl.protocol}//${parsedUrl.host}` : '';
|
||||
const apiUrl = parsedUrl.host
|
||||
? `${parsedUrl.protocol}//${parsedUrl.host}`
|
||||
: '';
|
||||
|
||||
if (opts.useCurrentTeam !== false && this.currentTeam) {
|
||||
const query = parsedUrl.query;
|
||||
@@ -80,20 +72,19 @@ export default class Client extends EventEmitter {
|
||||
Object.assign(opts, {
|
||||
body: JSON.stringify(opts.body),
|
||||
headers: Object.assign({}, opts.headers, {
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
});
|
||||
}
|
||||
|
||||
opts.headers = opts.headers || {};
|
||||
opts.headers.authorization = `Bearer ${this._token}`;
|
||||
opts.headers.Authorization = `Bearer ${this._token}`;
|
||||
opts.headers['user-agent'] = ua;
|
||||
|
||||
const url = `${apiUrl ? '' : this._apiUrl}${_url}`;
|
||||
return this._output.time(
|
||||
`${opts.method || 'GET'} ${apiUrl ? '' : this._apiUrl}${_url} ${JSON.stringify(
|
||||
opts.body
|
||||
) || ''}`,
|
||||
this._agent.fetch(_url, opts as AgentFetchOptions)
|
||||
`${opts.method || 'GET'} ${url} ${JSON.stringify(opts.body) || ''}`,
|
||||
fetch(url, opts as RequestInit)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -126,7 +117,5 @@ export default class Client extends EventEmitter {
|
||||
this._output.debug(`Retrying: ${error}\n${error.stack}`);
|
||||
}
|
||||
|
||||
close() {
|
||||
this._agent.close();
|
||||
}
|
||||
close() {}
|
||||
}
|
||||
|
||||
@@ -15,40 +15,45 @@ export default async function createDeploy(
|
||||
return await now.create(paths, createArgs);
|
||||
} catch (error) {
|
||||
if (error.code === 'rate_limited') {
|
||||
return new ERRORS_TS.DeploymentsRateLimited(error.message);
|
||||
throw new ERRORS_TS.DeploymentsRateLimited(error.message);
|
||||
}
|
||||
|
||||
// Means that the domain used as a suffix no longer exists
|
||||
if (error.code === 'domain_missing') {
|
||||
return new ERRORS_TS.DomainNotFound(error.value);
|
||||
throw new ERRORS_TS.DomainNotFound(error.value);
|
||||
}
|
||||
|
||||
if (error.code === 'domain_not_found' && error.domain) {
|
||||
return new ERRORS_TS.DomainNotFound(error.domain);
|
||||
throw new ERRORS_TS.DomainNotFound(error.domain);
|
||||
}
|
||||
|
||||
// This error occures when a domain used in the `alias`
|
||||
// is not yet verified
|
||||
if (error.code === 'domain_not_verified' && error.domain) {
|
||||
return new ERRORS_TS.DomainNotVerified(error.domain);
|
||||
throw new ERRORS_TS.DomainNotVerified(error.domain);
|
||||
}
|
||||
|
||||
// If the domain used as a suffix is not verified, we fail
|
||||
if (error.code === 'domain_not_verified' && error.value) {
|
||||
return new ERRORS_TS.DomainVerificationFailed(error.value);
|
||||
throw new ERRORS_TS.DomainVerificationFailed(error.value);
|
||||
}
|
||||
|
||||
// If the domain isn't owned by the user
|
||||
if (error.code === 'not_domain_owner') {
|
||||
throw new ERRORS_TS.NotDomainOwner(error.message);
|
||||
}
|
||||
|
||||
if (error.code === 'builds_rate_limited') {
|
||||
return new ERRORS_TS.BuildsRateLimited(error.message);
|
||||
throw new ERRORS_TS.BuildsRateLimited(error.message);
|
||||
}
|
||||
|
||||
// If the user doesn't have permissions over the domain used as a suffix we fail
|
||||
if (error.code === 'forbidden') {
|
||||
return new ERRORS_TS.DomainPermissionDenied(error.value, contextName);
|
||||
throw new ERRORS_TS.DomainPermissionDenied(error.value, contextName);
|
||||
}
|
||||
|
||||
if (error.code === 'bad_request' && error.keyword) {
|
||||
return new ERRORS.SchemaValidationFailed(
|
||||
throw new ERRORS.SchemaValidationFailed(
|
||||
error.message,
|
||||
error.keyword,
|
||||
error.dataPath,
|
||||
@@ -57,19 +62,19 @@ export default async function createDeploy(
|
||||
}
|
||||
|
||||
if (error.code === 'domain_configured') {
|
||||
return new ERRORS_TS.AliasDomainConfigured(error);
|
||||
throw new ERRORS_TS.AliasDomainConfigured(error);
|
||||
}
|
||||
|
||||
if (error.code === 'missing_build_script') {
|
||||
return new ERRORS_TS.MissingBuildScript(error);
|
||||
throw new ERRORS_TS.MissingBuildScript(error);
|
||||
}
|
||||
|
||||
if (error.code === 'conflicting_file_path') {
|
||||
return new ERRORS_TS.ConflictingFilePath(error);
|
||||
throw new ERRORS_TS.ConflictingFilePath(error);
|
||||
}
|
||||
|
||||
if (error.code === 'conflicting_path_segment') {
|
||||
return new ERRORS_TS.ConflictingPathSegment(error);
|
||||
throw new ERRORS_TS.ConflictingPathSegment(error);
|
||||
}
|
||||
|
||||
// If the cert is missing we try to generate a new one and the retry
|
||||
@@ -87,10 +92,10 @@ export default async function createDeploy(
|
||||
}
|
||||
|
||||
if (error.code === 'not_found') {
|
||||
return new ERRORS_TS.DeploymentNotFound({ context: contextName });
|
||||
throw new ERRORS_TS.DeploymentNotFound({ context: contextName });
|
||||
}
|
||||
|
||||
const certError = mapCertError(error)
|
||||
const certError = mapCertError(error);
|
||||
if (certError) {
|
||||
return certError;
|
||||
}
|
||||
|
||||
@@ -4,11 +4,11 @@ import { Deployment } from '../../types';
|
||||
import {
|
||||
DeploymentNotFound,
|
||||
DeploymentPermissionDenied,
|
||||
InvalidDeploymentId
|
||||
InvalidDeploymentId,
|
||||
} from '../errors-ts';
|
||||
import mapCertError from '../certs/map-cert-error';
|
||||
|
||||
type APIVersion = 'v5' | 'v9';
|
||||
type APIVersion = 'v5' | 'v10';
|
||||
|
||||
export default async function getDeploymentByIdOrHost(
|
||||
client: Client,
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
//
|
||||
import sleep from '../sleep';
|
||||
|
||||
import createPollingFn from '../create-polling-fn';
|
||||
|
||||
import getDeploymentByIdOrThrow from './get-deployment-by-id-or-throw';
|
||||
|
||||
const POLLING_INTERVAL = 5000;
|
||||
|
||||
async function* getStatusChangeFromPolling(
|
||||
now: any,
|
||||
contextName: string,
|
||||
idOrHost: string,
|
||||
initialState: string
|
||||
) {
|
||||
const pollDeployment = createPollingFn(
|
||||
getDeploymentByIdOrThrow,
|
||||
POLLING_INTERVAL
|
||||
);
|
||||
let prevState = initialState;
|
||||
for await (const deployment of pollDeployment(now, contextName, idOrHost)) {
|
||||
if (prevState !== deployment.state) {
|
||||
await sleep(5000);
|
||||
yield {
|
||||
type: 'state-change',
|
||||
created: Date.now(),
|
||||
payload: { value: deployment.state }
|
||||
};
|
||||
} else {
|
||||
prevState = deployment.state;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default getStatusChangeFromPolling;
|
||||
246
packages/now-cli/src/util/deploy/process-deployment.ts
Normal file
246
packages/now-cli/src/util/deploy/process-deployment.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import bytes from 'bytes';
|
||||
import Progress from 'progress';
|
||||
import chalk from 'chalk';
|
||||
import pluralize from 'pluralize';
|
||||
import {
|
||||
createDeployment,
|
||||
createLegacyDeployment,
|
||||
DeploymentOptions,
|
||||
} from '../../../../now-client';
|
||||
import wait from '../output/wait';
|
||||
import { Output } from '../output';
|
||||
// @ts-ignore
|
||||
import Now from '../../util';
|
||||
import { NowConfig } from '../dev/types';
|
||||
|
||||
export default async function processDeployment({
|
||||
now,
|
||||
output,
|
||||
hashes,
|
||||
paths,
|
||||
requestBody,
|
||||
uploadStamp,
|
||||
deployStamp,
|
||||
legacy,
|
||||
env,
|
||||
quiet,
|
||||
nowConfig,
|
||||
}: {
|
||||
now: Now;
|
||||
output: Output;
|
||||
hashes: { [key: string]: any };
|
||||
paths: string[];
|
||||
requestBody: DeploymentOptions;
|
||||
uploadStamp: () => number;
|
||||
deployStamp: () => number;
|
||||
legacy: boolean;
|
||||
env: any;
|
||||
quiet: boolean;
|
||||
nowConfig?: NowConfig;
|
||||
}) {
|
||||
const { warn, log, debug, note } = output;
|
||||
let bar: Progress | null = null;
|
||||
|
||||
const path0 = paths[0];
|
||||
const opts: DeploymentOptions = {
|
||||
...requestBody,
|
||||
debug: now._debug,
|
||||
apiUrl: now._apiUrl,
|
||||
};
|
||||
|
||||
if (!legacy) {
|
||||
let queuedSpinner = null;
|
||||
let buildSpinner = null;
|
||||
let deploySpinner = null;
|
||||
|
||||
for await (const event of createDeployment(path0, opts, nowConfig)) {
|
||||
if (event.type === 'hashes-calculated') {
|
||||
hashes = event.payload;
|
||||
}
|
||||
|
||||
if (event.type === 'warning') {
|
||||
warn(event.payload);
|
||||
}
|
||||
|
||||
if (event.type === 'notice') {
|
||||
note(event.payload);
|
||||
}
|
||||
|
||||
if (event.type === 'file_count') {
|
||||
debug(
|
||||
`Total files ${event.payload.total.size}, ${event.payload.missing.length} changed`
|
||||
);
|
||||
|
||||
if (!quiet) {
|
||||
log(
|
||||
`Synced ${pluralize(
|
||||
'file',
|
||||
event.payload.missing.length,
|
||||
true
|
||||
)} ${uploadStamp()}`
|
||||
);
|
||||
}
|
||||
|
||||
const missingSize = event.payload.missing
|
||||
.map((sha: string) => event.payload.total.get(sha).data.length)
|
||||
.reduce((a: number, b: number) => a + b, 0);
|
||||
|
||||
bar = new Progress(`${chalk.gray('>')} Upload [:bar] :percent :etas`, {
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: missingSize,
|
||||
clear: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (event.type === 'file-uploaded') {
|
||||
debug(
|
||||
`Uploaded: ${event.payload.file.names.join(' ')} (${bytes(
|
||||
event.payload.file.data.length
|
||||
)})`
|
||||
);
|
||||
|
||||
if (bar) {
|
||||
bar.tick(event.payload.file.data.length);
|
||||
}
|
||||
}
|
||||
|
||||
if (event.type === 'created') {
|
||||
now._host = event.payload.url;
|
||||
|
||||
if (!quiet) {
|
||||
const version = legacy ? `${chalk.grey('[v1]')} ` : '';
|
||||
log(`https://${event.payload.url} ${version}${deployStamp()}`);
|
||||
} else {
|
||||
process.stdout.write(`https://${event.payload.url}`);
|
||||
}
|
||||
|
||||
if (queuedSpinner === null) {
|
||||
queuedSpinner = wait('Queued...');
|
||||
}
|
||||
}
|
||||
|
||||
if (
|
||||
event.type === 'build-state-changed' &&
|
||||
event.payload.readyState === 'BUILDING'
|
||||
) {
|
||||
if (queuedSpinner) {
|
||||
queuedSpinner();
|
||||
}
|
||||
|
||||
if (buildSpinner === null) {
|
||||
buildSpinner = wait('Building...');
|
||||
}
|
||||
}
|
||||
|
||||
if (event.type === 'all-builds-completed') {
|
||||
if (queuedSpinner) {
|
||||
queuedSpinner();
|
||||
}
|
||||
if (buildSpinner) {
|
||||
buildSpinner();
|
||||
}
|
||||
|
||||
deploySpinner = wait('Finalizing...');
|
||||
}
|
||||
|
||||
// Handle error events
|
||||
if (event.type === 'error') {
|
||||
if (queuedSpinner) {
|
||||
queuedSpinner();
|
||||
}
|
||||
if (buildSpinner) {
|
||||
buildSpinner();
|
||||
}
|
||||
if (deploySpinner) {
|
||||
deploySpinner();
|
||||
}
|
||||
|
||||
throw await now.handleDeploymentError(event.payload, { hashes, env });
|
||||
}
|
||||
|
||||
// Handle ready event
|
||||
if (event.type === 'alias-assigned') {
|
||||
if (queuedSpinner) {
|
||||
queuedSpinner();
|
||||
}
|
||||
if (buildSpinner) {
|
||||
buildSpinner();
|
||||
}
|
||||
if (deploySpinner) {
|
||||
deploySpinner();
|
||||
}
|
||||
|
||||
return event.payload;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for await (const event of createLegacyDeployment(path0, opts, nowConfig)) {
|
||||
if (event.type === 'hashes-calculated') {
|
||||
hashes = event.payload;
|
||||
}
|
||||
|
||||
if (event.type === 'file_count') {
|
||||
debug(
|
||||
`Total files ${event.payload.total.size}, ${event.payload.missing.length} changed`
|
||||
);
|
||||
if (!quiet) {
|
||||
log(
|
||||
`Synced ${pluralize(
|
||||
'file',
|
||||
event.payload.missing.length,
|
||||
true
|
||||
)} ${uploadStamp()}`
|
||||
);
|
||||
}
|
||||
|
||||
const missingSize = event.payload.missing
|
||||
.map((sha: string) => event.payload.total.get(sha).data.length)
|
||||
.reduce((a: number, b: number) => a + b, 0);
|
||||
|
||||
bar = new Progress(`${chalk.gray('>')} Upload [:bar] :percent :etas`, {
|
||||
width: 20,
|
||||
complete: '=',
|
||||
incomplete: '',
|
||||
total: missingSize,
|
||||
clear: true,
|
||||
});
|
||||
}
|
||||
|
||||
if (event.type === 'file-uploaded') {
|
||||
debug(
|
||||
`Uploaded: ${event.payload.file.names.join(' ')} (${bytes(
|
||||
event.payload.file.data.length
|
||||
)})`
|
||||
);
|
||||
|
||||
if (bar) {
|
||||
bar.tick(event.payload.file.data.length);
|
||||
}
|
||||
}
|
||||
|
||||
if (event.type === 'created') {
|
||||
now._host = event.payload.url;
|
||||
|
||||
if (!quiet) {
|
||||
const version = legacy ? `${chalk.grey('[v1]')} ` : '';
|
||||
log(`${event.payload.url} ${version}${deployStamp()}`);
|
||||
} else {
|
||||
process.stdout.write(`https://${event.payload.url}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle error events
|
||||
if (event.type === 'error') {
|
||||
throw await now.handleDeploymentError(event.payload, { hashes, env });
|
||||
}
|
||||
|
||||
// Handle ready event
|
||||
if (event.type === 'ready') {
|
||||
log(`Build completed`);
|
||||
return event.payload;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
18
packages/now-cli/src/util/deploy/should-deploy-dir.ts
Normal file
18
packages/now-cli/src/util/deploy/should-deploy-dir.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { homedir } from 'os';
|
||||
import promptBool from '../input/prompt-bool';
|
||||
import { Output } from '../output';
|
||||
|
||||
export default async function shouldDeployDir(argv0: string, output: Output) {
|
||||
let yes = true;
|
||||
if (argv0 === homedir()) {
|
||||
if (
|
||||
!(await promptBool(
|
||||
'You are deploying your home directory. Do you want to continue?'
|
||||
))
|
||||
) {
|
||||
output.log('Aborted');
|
||||
yes = false;
|
||||
}
|
||||
}
|
||||
return yes;
|
||||
}
|
||||
@@ -2,31 +2,33 @@ import chalk from 'chalk';
|
||||
import execa from 'execa';
|
||||
import semver from 'semver';
|
||||
import pipe from 'promisepipe';
|
||||
import retry from 'async-retry';
|
||||
import npa from 'npm-package-arg';
|
||||
import { extract } from 'tar-fs';
|
||||
import { createHash } from 'crypto';
|
||||
import { createGunzip } from 'zlib';
|
||||
import { join, resolve } from 'path';
|
||||
import { funCacheDir } from '@zeit/fun';
|
||||
import cacheDirectory from 'cache-or-tmp-directory';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
import XDGAppPaths from 'xdg-app-paths';
|
||||
import {
|
||||
createReadStream,
|
||||
mkdirp,
|
||||
readFile,
|
||||
readJSON,
|
||||
writeFile,
|
||||
remove
|
||||
remove,
|
||||
} from 'fs-extra';
|
||||
import pkg from '../../../package.json';
|
||||
|
||||
import { NoBuilderCacheError, BuilderCacheCleanError } from '../errors-ts';
|
||||
import { NoBuilderCacheError } from '../errors-ts';
|
||||
import wait from '../output/wait';
|
||||
import { Output } from '../output';
|
||||
import { getDistTag } from '../get-dist-tag';
|
||||
import { devDependencies } from '../../../package.json';
|
||||
|
||||
import * as staticBuilder from './static-builder';
|
||||
import { BuilderWithPackage, Package } from './types';
|
||||
import { BuilderWithPackage } from './types';
|
||||
import { getBundledBuilders } from './get-bundled-builders';
|
||||
|
||||
const registryTypes = new Set(['version', 'tag', 'range']);
|
||||
|
||||
@@ -34,14 +36,10 @@ const localBuilders: { [key: string]: BuilderWithPackage } = {
|
||||
'@now/static': {
|
||||
runInProcess: true,
|
||||
builder: Object.freeze(staticBuilder),
|
||||
package: Object.freeze({ name: '@now/static', version: '' })
|
||||
}
|
||||
package: Object.freeze({ name: '@now/static', version: '' }),
|
||||
},
|
||||
};
|
||||
|
||||
const bundledBuilders = Object.keys(devDependencies).filter(d =>
|
||||
d.startsWith('@now/')
|
||||
);
|
||||
|
||||
const distTag = getDistTag(pkg.version);
|
||||
|
||||
export const cacheDirPromise = prepareCacheDir();
|
||||
@@ -80,7 +78,7 @@ export async function prepareCacheDir() {
|
||||
const { NOW_BUILDER_CACHE_DIR } = process.env;
|
||||
const designated = NOW_BUILDER_CACHE_DIR
|
||||
? resolve(NOW_BUILDER_CACHE_DIR)
|
||||
: cacheDirectory('co.zeit.now');
|
||||
: XDGAppPaths('co.zeit.now').cache();
|
||||
|
||||
if (!designated) {
|
||||
throw new NoBuilderCacheError();
|
||||
@@ -117,7 +115,7 @@ export async function prepareBuilderDir() {
|
||||
export async function prepareBuilderModulePath() {
|
||||
const [builderDir, builderContents] = await Promise.all([
|
||||
builderDirPromise,
|
||||
readFile(join(__dirname, 'builder-worker.js'))
|
||||
readFile(join(__dirname, 'builder-worker.js')),
|
||||
]);
|
||||
let needsWrite = false;
|
||||
const builderSha = getSha(builderContents);
|
||||
@@ -140,24 +138,6 @@ export async function prepareBuilderModulePath() {
|
||||
return cachedBuilderPath;
|
||||
}
|
||||
|
||||
// Is responsible for cleaning the cache
|
||||
export async function cleanCacheDir(output: Output): Promise<void> {
|
||||
const cacheDir = await cacheDirPromise;
|
||||
try {
|
||||
output.log(chalk`{magenta Deleting} ${cacheDir}`);
|
||||
await remove(cacheDir);
|
||||
} catch (err) {
|
||||
throw new BuilderCacheCleanError(cacheDir, err.message);
|
||||
}
|
||||
|
||||
try {
|
||||
await remove(funCacheDir);
|
||||
output.log(chalk`{magenta Deleting} ${funCacheDir}`);
|
||||
} catch (err) {
|
||||
throw new BuilderCacheCleanError(funCacheDir, err.message);
|
||||
}
|
||||
}
|
||||
|
||||
function getNpmVersion(use = ''): string {
|
||||
const parsed = npa(use);
|
||||
if (registryTypes.has(parsed.type)) {
|
||||
@@ -179,7 +159,7 @@ export function getBuildUtils(packages: string[]): string {
|
||||
export function filterPackage(
|
||||
builderSpec: string,
|
||||
distTag: string,
|
||||
buildersPkg: Package
|
||||
buildersPkg: PackageJson
|
||||
) {
|
||||
if (builderSpec in localBuilders) return false;
|
||||
const parsed = npa(builderSpec);
|
||||
@@ -187,7 +167,7 @@ export function filterPackage(
|
||||
parsed.name &&
|
||||
parsed.type === 'tag' &&
|
||||
parsed.fetchSpec === distTag &&
|
||||
bundledBuilders.includes(parsed.name) &&
|
||||
getBundledBuilders().includes(parsed.name) &&
|
||||
buildersPkg.dependencies
|
||||
) {
|
||||
const parsedInstalled = npa(
|
||||
@@ -251,19 +231,23 @@ export async function installBuilders(
|
||||
`Installing builders: ${packagesToInstall.sort().join(', ')}`
|
||||
);
|
||||
try {
|
||||
await execa(
|
||||
process.execPath,
|
||||
[
|
||||
yarnPath,
|
||||
'add',
|
||||
'--exact',
|
||||
'--no-lockfile',
|
||||
'--non-interactive',
|
||||
...packagesToInstall
|
||||
],
|
||||
{
|
||||
cwd: builderDir
|
||||
}
|
||||
await retry(
|
||||
() =>
|
||||
execa(
|
||||
process.execPath,
|
||||
[
|
||||
yarnPath,
|
||||
'add',
|
||||
'--exact',
|
||||
'--no-lockfile',
|
||||
'--non-interactive',
|
||||
...packagesToInstall,
|
||||
],
|
||||
{
|
||||
cwd: builderDir,
|
||||
}
|
||||
),
|
||||
{ retries: 2 }
|
||||
);
|
||||
} finally {
|
||||
stopSpinner();
|
||||
@@ -286,19 +270,23 @@ export async function updateBuilders(
|
||||
|
||||
packages.push(getBuildUtils(packages));
|
||||
|
||||
await execa(
|
||||
process.execPath,
|
||||
[
|
||||
yarnPath,
|
||||
'add',
|
||||
'--exact',
|
||||
'--no-lockfile',
|
||||
'--non-interactive',
|
||||
...packages.filter(p => p !== '@now/static')
|
||||
],
|
||||
{
|
||||
cwd: builderDir
|
||||
}
|
||||
await retry(
|
||||
() =>
|
||||
execa(
|
||||
process.execPath,
|
||||
[
|
||||
yarnPath,
|
||||
'add',
|
||||
'--exact',
|
||||
'--no-lockfile',
|
||||
'--non-interactive',
|
||||
...packages.filter(p => p !== '@now/static'),
|
||||
],
|
||||
{
|
||||
cwd: builderDir,
|
||||
}
|
||||
),
|
||||
{ retries: 2 }
|
||||
);
|
||||
|
||||
const updatedPackages: string[] = [];
|
||||
@@ -336,7 +324,7 @@ export async function getBuilder(
|
||||
const pkg = require(join(dest, 'package.json'));
|
||||
builderWithPkg = {
|
||||
builder: Object.freeze(mod),
|
||||
package: Object.freeze(pkg)
|
||||
package: Object.freeze(pkg),
|
||||
};
|
||||
} catch (err) {
|
||||
if (err.code === 'MODULE_NOT_FOUND') {
|
||||
@@ -357,7 +345,7 @@ export async function getBuilder(
|
||||
|
||||
function getPackageName(
|
||||
parsed: npa.Result,
|
||||
buildersPkg: Package
|
||||
buildersPkg: PackageJson
|
||||
): string | null {
|
||||
if (registryTypes.has(parsed.type)) {
|
||||
return parsed.name;
|
||||
@@ -378,7 +366,7 @@ function getSha(buffer: Buffer): string {
|
||||
}
|
||||
|
||||
function hasBundledBuilders(dependencies: { [name: string]: string }): boolean {
|
||||
for (const name of bundledBuilders) {
|
||||
for (const name of getBundledBuilders()) {
|
||||
if (!(name in dependencies)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -2,16 +2,17 @@
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
import ms from 'ms';
|
||||
import bytes from 'bytes';
|
||||
import { promisify } from 'util';
|
||||
import { delimiter, dirname, join } from 'path';
|
||||
import { fork, ChildProcess } from 'child_process';
|
||||
import { createFunction } from '@zeit/fun';
|
||||
import { File, Lambda, FileBlob, FileFsRef } from '@now/build-utils';
|
||||
import { Builder, File, Lambda, FileBlob, FileFsRef } from '@now/build-utils';
|
||||
import stripAnsi from 'strip-ansi';
|
||||
import chalk from 'chalk';
|
||||
import which from 'which';
|
||||
import plural from 'pluralize';
|
||||
import ora, { Ora } from 'ora';
|
||||
import minimatch from 'minimatch';
|
||||
import _treeKill from 'tree-kill';
|
||||
|
||||
import { Output } from '../output';
|
||||
import highlight from '../output/highlight';
|
||||
@@ -23,13 +24,13 @@ import { builderModulePathPromise, getBuilder } from './builder-cache';
|
||||
import {
|
||||
EnvConfig,
|
||||
NowConfig,
|
||||
BuildConfig,
|
||||
BuildMatch,
|
||||
BuildResult,
|
||||
BuilderInputs,
|
||||
BuilderOutput,
|
||||
BuilderOutputs
|
||||
BuilderOutputs,
|
||||
} from './types';
|
||||
import { normalizeRoutes } from '@now/routing-utils';
|
||||
|
||||
interface BuildMessage {
|
||||
type: string;
|
||||
@@ -41,7 +42,7 @@ interface BuildMessageResult extends BuildMessage {
|
||||
error?: object;
|
||||
}
|
||||
|
||||
const isLogging = new WeakSet<ChildProcess>();
|
||||
const treeKill = promisify(_treeKill);
|
||||
|
||||
let nodeBinPromise: Promise<string>;
|
||||
|
||||
@@ -49,43 +50,48 @@ async function getNodeBin(): Promise<string> {
|
||||
return which.sync('node', { nothrow: true }) || process.execPath;
|
||||
}
|
||||
|
||||
function pipeChildLogging(child: ChildProcess): void {
|
||||
if (!isLogging.has(child)) {
|
||||
child.stdout!.pipe(process.stdout);
|
||||
child.stderr!.pipe(process.stderr);
|
||||
isLogging.add(child);
|
||||
}
|
||||
}
|
||||
|
||||
async function createBuildProcess(
|
||||
match: BuildMatch,
|
||||
buildEnv: EnvConfig,
|
||||
workPath: string,
|
||||
output: Output,
|
||||
yarnPath?: string
|
||||
yarnPath?: string,
|
||||
debugEnabled: boolean = false
|
||||
): Promise<ChildProcess> {
|
||||
if (!nodeBinPromise) {
|
||||
nodeBinPromise = getNodeBin();
|
||||
}
|
||||
const [execPath, modulePath] = await Promise.all([
|
||||
nodeBinPromise,
|
||||
builderModulePathPromise
|
||||
builderModulePathPromise,
|
||||
]);
|
||||
|
||||
// Ensure that `node` is in the builder's `PATH`
|
||||
let PATH = `${dirname(execPath)}${delimiter}${process.env.PATH}`;
|
||||
|
||||
// Ensure that `yarn` is in the builder's `PATH`
|
||||
if (yarnPath) {
|
||||
PATH = `${yarnPath}${delimiter}${PATH}`;
|
||||
}
|
||||
|
||||
const env: EnvConfig = {
|
||||
...process.env,
|
||||
PATH,
|
||||
...buildEnv,
|
||||
NOW_REGION: 'dev1',
|
||||
};
|
||||
|
||||
// Builders won't show debug logs by default.
|
||||
// The `NOW_BUILDER_DEBUG` env variable enables them.
|
||||
if (debugEnabled) {
|
||||
env.NOW_BUILDER_DEBUG = '1';
|
||||
}
|
||||
|
||||
const buildProcess = fork(modulePath, [], {
|
||||
cwd: workPath,
|
||||
env: {
|
||||
...process.env,
|
||||
PATH,
|
||||
...buildEnv,
|
||||
NOW_REGION: 'dev1'
|
||||
},
|
||||
env,
|
||||
execPath,
|
||||
execArgv: [],
|
||||
stdio: ['ignore', 'pipe', 'pipe', 'ipc']
|
||||
});
|
||||
match.buildProcess = buildProcess;
|
||||
|
||||
@@ -96,9 +102,6 @@ async function createBuildProcess(
|
||||
match.buildProcess = undefined;
|
||||
});
|
||||
|
||||
buildProcess.stdout!.setEncoding('utf8');
|
||||
buildProcess.stderr!.setEncoding('utf8');
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
// The first message that the builder process sends is the `ready` event
|
||||
buildProcess.once('message', ({ type }) => {
|
||||
@@ -122,7 +125,7 @@ export async function executeBuild(
|
||||
filesRemoved?: string[]
|
||||
): Promise<void> {
|
||||
const {
|
||||
builderWithPkg: { runInProcess, builder, package: pkg }
|
||||
builderWithPkg: { runInProcess, builder, package: pkg },
|
||||
} = match;
|
||||
const { src: entrypoint } = match;
|
||||
const { env, debug, buildEnv, yarnPath, cwd: workPath } = devServer;
|
||||
@@ -150,7 +153,8 @@ export async function executeBuild(
|
||||
buildEnv,
|
||||
workPath,
|
||||
devServer.output,
|
||||
yarnPath
|
||||
yarnPath,
|
||||
debug
|
||||
);
|
||||
}
|
||||
|
||||
@@ -165,91 +169,45 @@ export async function executeBuild(
|
||||
filesChanged,
|
||||
filesRemoved,
|
||||
env,
|
||||
buildEnv
|
||||
}
|
||||
buildEnv,
|
||||
},
|
||||
};
|
||||
|
||||
let buildResultOrOutputs: BuilderOutputs | BuildResult;
|
||||
if (buildProcess) {
|
||||
let spinLogger;
|
||||
let spinner: Ora | undefined;
|
||||
const fullLogs: string[] = [];
|
||||
buildProcess.send({
|
||||
type: 'build',
|
||||
builderName: pkg.name,
|
||||
buildParams,
|
||||
});
|
||||
|
||||
if (isInitialBuild && !debug && process.stdout.isTTY) {
|
||||
const logTitle = `${chalk.bold(
|
||||
`Preparing ${chalk.underline(entrypoint)} for build`
|
||||
)}:`;
|
||||
spinner = ora(logTitle).start();
|
||||
|
||||
spinLogger = (data: Buffer) => {
|
||||
const rawLog = stripAnsi(data.toString());
|
||||
fullLogs.push(rawLog);
|
||||
|
||||
const lines = rawLog.replace(/\s+$/, '').split('\n');
|
||||
const spinText = `${logTitle} ${lines[lines.length - 1]}`;
|
||||
const maxCols = process.stdout.columns || 80;
|
||||
const overflow = stripAnsi(spinText).length + 2 - maxCols;
|
||||
spinner!.text =
|
||||
overflow > 0 ? `${spinText.slice(0, -overflow - 3)}...` : spinText;
|
||||
};
|
||||
|
||||
buildProcess!.stdout!.on('data', spinLogger);
|
||||
buildProcess!.stderr!.on('data', spinLogger);
|
||||
} else {
|
||||
pipeChildLogging(buildProcess!);
|
||||
}
|
||||
|
||||
try {
|
||||
buildProcess.send({
|
||||
type: 'build',
|
||||
builderName: pkg.name,
|
||||
buildParams
|
||||
});
|
||||
|
||||
buildResultOrOutputs = await new Promise((resolve, reject) => {
|
||||
function onMessage({ type, result, error }: BuildMessageResult) {
|
||||
cleanup();
|
||||
if (type === 'buildResult') {
|
||||
if (result) {
|
||||
resolve(result);
|
||||
} else if (error) {
|
||||
reject(Object.assign(new Error(), error));
|
||||
}
|
||||
} else {
|
||||
reject(new Error(`Got unexpected message type: ${type}`));
|
||||
buildResultOrOutputs = await new Promise((resolve, reject) => {
|
||||
function onMessage({ type, result, error }: BuildMessageResult) {
|
||||
cleanup();
|
||||
if (type === 'buildResult') {
|
||||
if (result) {
|
||||
resolve(result);
|
||||
} else if (error) {
|
||||
reject(Object.assign(new Error(), error));
|
||||
}
|
||||
} else {
|
||||
reject(new Error(`Got unexpected message type: ${type}`));
|
||||
}
|
||||
function onExit(code: number | null, signal: string | null) {
|
||||
cleanup();
|
||||
const err = new Error(
|
||||
`Builder exited with ${signal || code} before sending build result`
|
||||
);
|
||||
reject(err);
|
||||
}
|
||||
function cleanup() {
|
||||
buildProcess!.removeListener('exit', onExit);
|
||||
buildProcess!.removeListener('message', onMessage);
|
||||
}
|
||||
buildProcess!.on('exit', onExit);
|
||||
buildProcess!.on('message', onMessage);
|
||||
});
|
||||
} catch (err) {
|
||||
if (spinner) {
|
||||
spinner.stop();
|
||||
spinner = undefined;
|
||||
console.log(fullLogs.join(''));
|
||||
}
|
||||
throw err;
|
||||
} finally {
|
||||
if (spinLogger) {
|
||||
buildProcess.stdout!.removeListener('data', spinLogger);
|
||||
buildProcess.stderr!.removeListener('data', spinLogger);
|
||||
function onExit(code: number | null, signal: string | null) {
|
||||
cleanup();
|
||||
const err = new Error(
|
||||
`Builder exited with ${signal || code} before sending build result`
|
||||
);
|
||||
reject(err);
|
||||
}
|
||||
if (spinner) {
|
||||
spinner.stop();
|
||||
function cleanup() {
|
||||
buildProcess!.removeListener('exit', onExit);
|
||||
buildProcess!.removeListener('message', onMessage);
|
||||
}
|
||||
pipeChildLogging(buildProcess!);
|
||||
}
|
||||
buildProcess!.on('exit', onExit);
|
||||
buildProcess!.on('message', onMessage);
|
||||
});
|
||||
} else {
|
||||
buildResultOrOutputs = await builder.build(buildParams);
|
||||
}
|
||||
@@ -260,15 +218,42 @@ export async function executeBuild(
|
||||
result = {
|
||||
output: buildResultOrOutputs as BuilderOutputs,
|
||||
routes: [],
|
||||
watch: []
|
||||
watch: [],
|
||||
distPath:
|
||||
typeof buildResultOrOutputs.distPath === 'string'
|
||||
? buildResultOrOutputs.distPath
|
||||
: undefined,
|
||||
};
|
||||
} else {
|
||||
result = buildResultOrOutputs as BuildResult;
|
||||
}
|
||||
|
||||
// Normalize Builder Routes
|
||||
const normalized = normalizeRoutes(result.routes);
|
||||
if (normalized.error) {
|
||||
throw new Error(normalized.error.message);
|
||||
} else {
|
||||
result.routes = normalized.routes || [];
|
||||
}
|
||||
|
||||
const { output } = result;
|
||||
|
||||
// Mimic fmeta-util and convert cleanUrls
|
||||
if (nowConfig.cleanUrls) {
|
||||
Object.entries(output)
|
||||
.filter(([name, value]) => name.endsWith('.html'))
|
||||
.forEach(([name, value]) => {
|
||||
const cleanName = name.slice(0, -5);
|
||||
delete output[name];
|
||||
output[cleanName] = value;
|
||||
if (value.type === 'FileBlob' || value.type === 'FileFsRef') {
|
||||
value.contentType = value.contentType || 'text/html; charset=utf-8';
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Convert the JSON-ified output map back into their corresponding `File`
|
||||
// subclass type instances.
|
||||
const output = result.output as BuilderOutputs;
|
||||
for (const name of Object.keys(output)) {
|
||||
const obj = output[name] as File;
|
||||
let lambda: Lambda;
|
||||
@@ -346,9 +331,9 @@ export async function executeBuild(
|
||||
...nowConfig.env,
|
||||
...asset.environment,
|
||||
...env,
|
||||
NOW_REGION: 'dev1'
|
||||
}
|
||||
}
|
||||
NOW_REGION: 'dev1',
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
@@ -382,7 +367,7 @@ export async function getBuildMatches(
|
||||
return matches;
|
||||
}
|
||||
|
||||
const noMatches: BuildConfig[] = [];
|
||||
const noMatches: Builder[] = [];
|
||||
const builds = nowConfig.builds || [{ src: '**', use: '@now/static' }];
|
||||
|
||||
for (const buildConfig of builds) {
|
||||
@@ -420,7 +405,7 @@ export async function getBuildMatches(
|
||||
builderWithPkg,
|
||||
buildOutput: {},
|
||||
buildResults: new Map(),
|
||||
buildTimestamp: 0
|
||||
buildTimestamp: 0,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -442,3 +427,35 @@ export async function getBuildMatches(
|
||||
|
||||
return matches;
|
||||
}
|
||||
|
||||
export async function shutdownBuilder(
|
||||
match: BuildMatch,
|
||||
{ debug }: Output
|
||||
): Promise<void> {
|
||||
const ops: Promise<void>[] = [];
|
||||
|
||||
if (match.buildProcess) {
|
||||
const { pid } = match.buildProcess;
|
||||
debug(`Killing builder sub-process with PID ${pid}`);
|
||||
const killPromise = treeKill(pid)
|
||||
.then(() => {
|
||||
debug(`Killed builder with PID ${pid}`);
|
||||
})
|
||||
.catch((err: Error) => {
|
||||
debug(`Failed to kill builder with PID ${pid}: ${err}`);
|
||||
});
|
||||
ops.push(killPromise);
|
||||
delete match.buildProcess;
|
||||
}
|
||||
|
||||
if (match.buildOutput) {
|
||||
for (const asset of Object.values(match.buildOutput)) {
|
||||
if (asset.type === 'Lambda' && asset.fn) {
|
||||
debug(`Shutting down Lambda function`);
|
||||
ops.push(asset.fn.destroy());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(ops);
|
||||
}
|
||||
|
||||
@@ -12,13 +12,13 @@ export const httpStatusDescriptionMap = new Map([
|
||||
[502, 'BAD_GATEWAY'],
|
||||
[503, 'SERVICE_UNAVAILABLE'],
|
||||
[504, 'GATEWAY_TIMEOUT'],
|
||||
[508, 'INFINITE_LOOP']
|
||||
[508, 'INFINITE_LOOP'],
|
||||
]);
|
||||
|
||||
export const errorMessageMap = new Map([
|
||||
[400, 'Bad request'],
|
||||
[402, 'Payment required'],
|
||||
[403, 'You don\'t have the required permissions'],
|
||||
[403, "You don't have the required permissions"],
|
||||
[404, 'The page could not be found'],
|
||||
[405, 'Method not allowed'],
|
||||
[410, 'The deployment has been removed'],
|
||||
@@ -28,7 +28,7 @@ export const errorMessageMap = new Map([
|
||||
[501, 'Not implemented'],
|
||||
[503, 'The deployment is currently unavailable'],
|
||||
[504, 'An error occurred with your deployment'],
|
||||
[508, 'Infinite loop detected']
|
||||
[508, 'Infinite loop detected'],
|
||||
]);
|
||||
|
||||
interface ErrorMessage {
|
||||
@@ -40,20 +40,20 @@ interface ErrorMessage {
|
||||
const appError = {
|
||||
title: 'An error occurred with this application.',
|
||||
subtitle: 'This is an error with the application itself, not the platform.',
|
||||
app_error: true
|
||||
app_error: true,
|
||||
};
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
const infrastructureError = {
|
||||
title: 'An internal error occurred with ZEIT Now.',
|
||||
subtitle: 'This is an error with the platform itself, not the application.',
|
||||
app_error: false
|
||||
app_error: false,
|
||||
};
|
||||
|
||||
const pageNotFoundError = {
|
||||
title: 'The page could not be found.',
|
||||
subtitle: 'The page could not be found in the application.',
|
||||
app_error: true
|
||||
app_error: true,
|
||||
};
|
||||
|
||||
export function generateErrorMessage(
|
||||
@@ -68,7 +68,7 @@ export function generateErrorMessage(
|
||||
}
|
||||
return {
|
||||
title: errorMessageMap.get(statusCode) || 'Error occurred',
|
||||
app_error: false
|
||||
app_error: false,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
11
packages/now-cli/src/util/dev/get-bundled-builders.ts
Normal file
11
packages/now-cli/src/util/dev/get-bundled-builders.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export function getBundledBuilders() {
|
||||
return [
|
||||
'@now/go',
|
||||
'@now/next',
|
||||
'@now/node',
|
||||
'@now/ruby',
|
||||
'@now/python',
|
||||
'@now/static-build',
|
||||
'@now/build-utils',
|
||||
];
|
||||
}
|
||||
@@ -4,7 +4,13 @@ import PCRE from 'pcre-to-regexp';
|
||||
import isURL from './is-url';
|
||||
import DevServer from './server';
|
||||
|
||||
import { HttpHeadersConfig, RouteConfig, RouteResult } from './types';
|
||||
import {
|
||||
HttpHeadersConfig,
|
||||
RouteConfig,
|
||||
RouteResult,
|
||||
NowConfig,
|
||||
} from './types';
|
||||
import { isHandler } from '@now/routing-utils';
|
||||
|
||||
export function resolveRouteParameters(
|
||||
str: string,
|
||||
@@ -40,9 +46,8 @@ export default async function(
|
||||
let idx = -1;
|
||||
for (const routeConfig of routes) {
|
||||
idx++;
|
||||
let { src, headers, methods, handle } = routeConfig;
|
||||
if (handle) {
|
||||
if (handle === 'filesystem' && devServer) {
|
||||
if (isHandler(routeConfig)) {
|
||||
if (routeConfig.handle === 'filesystem' && devServer) {
|
||||
if (await devServer.hasFilesystem(reqPathname)) {
|
||||
break;
|
||||
}
|
||||
@@ -50,18 +55,12 @@ export default async function(
|
||||
continue;
|
||||
}
|
||||
|
||||
let { src, headers, methods } = routeConfig;
|
||||
|
||||
if (Array.isArray(methods) && reqMethod && !methods.includes(reqMethod)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!src.startsWith('^')) {
|
||||
src = `^${src}`;
|
||||
}
|
||||
|
||||
if (!src.endsWith('$')) {
|
||||
src = `${src}$`;
|
||||
}
|
||||
|
||||
const keys: string[] = [];
|
||||
const matcher = PCRE(`%${src}%i`, keys);
|
||||
const match =
|
||||
@@ -98,7 +97,7 @@ export default async function(
|
||||
headers: combinedHeaders,
|
||||
uri_args: query,
|
||||
matched_route: routeConfig,
|
||||
matched_route_idx: idx
|
||||
matched_route_idx: idx,
|
||||
};
|
||||
break;
|
||||
} else {
|
||||
@@ -114,7 +113,7 @@ export default async function(
|
||||
headers: combinedHeaders,
|
||||
uri_args: query,
|
||||
matched_route: routeConfig,
|
||||
matched_route_idx: idx
|
||||
matched_route_idx: idx,
|
||||
};
|
||||
break;
|
||||
}
|
||||
@@ -127,7 +126,7 @@ export default async function(
|
||||
found: false,
|
||||
dest: reqPathname,
|
||||
uri_args: query,
|
||||
headers: combinedHeaders
|
||||
headers: combinedHeaders,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -13,13 +13,15 @@ import serveHandler from 'serve-handler';
|
||||
import { watch, FSWatcher } from 'chokidar';
|
||||
import { parse as parseDotenv } from 'dotenv';
|
||||
import { basename, dirname, extname, join } from 'path';
|
||||
import { getTransformedRoutes } from '@now/routing-utils';
|
||||
import directoryTemplate from 'serve-handler/src/directory';
|
||||
|
||||
import {
|
||||
Builder,
|
||||
FileFsRef,
|
||||
PackageJson,
|
||||
detectBuilders,
|
||||
detectRoutes
|
||||
detectRoutes,
|
||||
} from '@now/build-utils';
|
||||
|
||||
import { once } from '../once';
|
||||
@@ -33,20 +35,29 @@ import { version as cliVersion } from '../../../package.json';
|
||||
import {
|
||||
createIgnore,
|
||||
staticFiles as getFiles,
|
||||
getAllProjectFiles
|
||||
getAllProjectFiles,
|
||||
} from '../get-files';
|
||||
import { validateNowConfigBuilds, validateNowConfigRoutes } from './validate';
|
||||
import {
|
||||
validateNowConfigBuilds,
|
||||
validateNowConfigRoutes,
|
||||
validateNowConfigCleanUrls,
|
||||
validateNowConfigHeaders,
|
||||
validateNowConfigRedirects,
|
||||
validateNowConfigRewrites,
|
||||
validateNowConfigTrailingSlash,
|
||||
validateNowConfigFunctions,
|
||||
} from './validate';
|
||||
|
||||
import isURL from './is-url';
|
||||
import devRouter from './router';
|
||||
import getMimeType from './mime-type';
|
||||
import { getYarnPath } from './yarn-installer';
|
||||
import { executeBuild, getBuildMatches } from './builder';
|
||||
import { executeBuild, getBuildMatches, shutdownBuilder } from './builder';
|
||||
import { generateErrorMessage, generateHttpStatusDescription } from './errors';
|
||||
import {
|
||||
builderDirPromise,
|
||||
installBuilders,
|
||||
updateBuilders
|
||||
updateBuilders,
|
||||
} from './builder-cache';
|
||||
|
||||
// HTML templates
|
||||
@@ -60,7 +71,6 @@ import {
|
||||
EnvConfig,
|
||||
NowConfig,
|
||||
DevServerOptions,
|
||||
BuildConfig,
|
||||
BuildMatch,
|
||||
BuildResult,
|
||||
BuilderInputs,
|
||||
@@ -70,7 +80,7 @@ import {
|
||||
InvokeResult,
|
||||
ListenSpec,
|
||||
RouteConfig,
|
||||
RouteResult
|
||||
RouteResult,
|
||||
} from './types';
|
||||
|
||||
interface FSEvent {
|
||||
@@ -87,7 +97,7 @@ interface NodeRequire {
|
||||
|
||||
declare const __non_webpack_require__: NodeRequire;
|
||||
|
||||
function sortBuilders(buildA: BuildConfig, buildB: BuildConfig) {
|
||||
function sortBuilders(buildA: Builder, buildB: Builder) {
|
||||
if (buildA && buildA.use && buildA.use.startsWith('@now/static-build')) {
|
||||
return 1;
|
||||
}
|
||||
@@ -182,6 +192,20 @@ export default class DevServer {
|
||||
const filesChanged: Set<string> = new Set();
|
||||
const filesRemoved: Set<string> = new Set();
|
||||
|
||||
const distPaths: string[] = [];
|
||||
|
||||
for (const buildMatch of this.buildMatches.values()) {
|
||||
for (const buildResult of buildMatch.buildResults.values()) {
|
||||
if (buildResult.distPath) {
|
||||
distPaths.push(buildResult.distPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
events = events.filter(event =>
|
||||
distPaths.every(distPath => !event.path.startsWith(distPath))
|
||||
);
|
||||
|
||||
// First, update the `files` mapping of source files
|
||||
for (const event of events) {
|
||||
if (event.type === 'add') {
|
||||
@@ -255,9 +279,7 @@ export default class DevServer {
|
||||
});
|
||||
} else {
|
||||
this.output.debug(
|
||||
`Not rebuilding because \`shouldServe()\` returned \`false\` for "${
|
||||
match.use
|
||||
}" request path "${requestPath}"`
|
||||
`Not rebuilding because \`shouldServe()\` returned \`false\` for "${match.use}" request path "${requestPath}"`
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -335,13 +357,18 @@ export default class DevServer {
|
||||
}
|
||||
|
||||
// Delete build matches that no longer exists
|
||||
const ops: Promise<void>[] = [];
|
||||
for (const src of this.buildMatches.keys()) {
|
||||
if (!sources.includes(src)) {
|
||||
this.output.debug(`Removing build match for "${src}"`);
|
||||
// TODO: shutdown lambda functions
|
||||
const match = this.buildMatches.get(src);
|
||||
if (match) {
|
||||
ops.push(shutdownBuilder(match, this.output));
|
||||
}
|
||||
this.buildMatches.delete(src);
|
||||
}
|
||||
}
|
||||
await Promise.all(ops);
|
||||
|
||||
// Add the new matches to the `buildMatches` map
|
||||
const blockingBuilds: Promise<void>[] = [];
|
||||
@@ -376,7 +403,7 @@ export default class DevServer {
|
||||
// Sort build matches to make sure `@now/static-build` is always last
|
||||
this.buildMatches = new Map(
|
||||
[...this.buildMatches.entries()].sort((matchA, matchB) => {
|
||||
return sortBuilders(matchA[1] as BuildConfig, matchB[1] as BuildConfig);
|
||||
return sortBuilders(matchA[1] as Builder, matchB[1] as Builder);
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -413,10 +440,11 @@ export default class DevServer {
|
||||
for (const buildMatch of this.buildMatches.values()) {
|
||||
const {
|
||||
src,
|
||||
builderWithPkg: { package: pkg }
|
||||
builderWithPkg: { package: pkg },
|
||||
} = buildMatch;
|
||||
if (pkg.name === '@now/static') continue;
|
||||
if (updatedBuilders.includes(pkg.name)) {
|
||||
if (pkg.name && updatedBuilders.includes(pkg.name)) {
|
||||
shutdownBuilder(buildMatch, this.output);
|
||||
this.buildMatches.delete(src);
|
||||
this.output.debug(`Invalidated build match for "${src}"`);
|
||||
}
|
||||
@@ -441,7 +469,7 @@ export default class DevServer {
|
||||
}
|
||||
}
|
||||
try {
|
||||
this.validateEnvConfig(fileName, base || {}, env);
|
||||
return this.validateEnvConfig(fileName, base || {}, env);
|
||||
} catch (err) {
|
||||
if (err instanceof MissingDotenvVarsError) {
|
||||
this.output.error(err.message);
|
||||
@@ -450,7 +478,7 @@ export default class DevServer {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
return { ...base, ...env };
|
||||
return {};
|
||||
}
|
||||
|
||||
async getNowConfig(
|
||||
@@ -473,7 +501,6 @@ export default class DevServer {
|
||||
isInitialLoad: boolean = false
|
||||
): Promise<NowConfig> {
|
||||
if (canUseCache && this.cachedNowConfig) {
|
||||
this.output.debug('Using cached `now.json` config');
|
||||
return this.cachedNowConfig;
|
||||
}
|
||||
|
||||
@@ -506,18 +533,30 @@ export default class DevServer {
|
||||
}
|
||||
}
|
||||
|
||||
const allFiles = await getAllProjectFiles(this.cwd, this.output);
|
||||
const files = allFiles.filter(this.filter);
|
||||
|
||||
this.output.debug(
|
||||
`Found ${allFiles.length} and ` +
|
||||
`filtered out ${allFiles.length - files.length} files`
|
||||
);
|
||||
|
||||
await this.validateNowConfig(config);
|
||||
const { error: routeError, routes: maybeRoutes } = getTransformedRoutes({
|
||||
nowConfig: config,
|
||||
filePaths: files,
|
||||
});
|
||||
if (routeError) {
|
||||
this.output.error(routeError.message);
|
||||
await this.exit();
|
||||
}
|
||||
config.routes = maybeRoutes || [];
|
||||
|
||||
// no builds -> zero config
|
||||
if (!config.builds || config.builds.length === 0) {
|
||||
const allFiles = await getAllProjectFiles(this.cwd, this.output);
|
||||
const files = allFiles.filter(this.filter);
|
||||
|
||||
this.output.debug(
|
||||
`Found ${allFiles.length} and ` +
|
||||
`filtered out ${allFiles.length - files.length} files`
|
||||
);
|
||||
|
||||
const { builders, errors } = await detectBuilders(files, pkg, {
|
||||
tag: getDistTag(cliVersion) === 'canary' ? 'canary' : 'latest'
|
||||
const { builders, warnings, errors } = await detectBuilders(files, pkg, {
|
||||
tag: getDistTag(cliVersion) === 'canary' ? 'canary' : 'latest',
|
||||
functions: config.functions,
|
||||
});
|
||||
|
||||
if (errors) {
|
||||
@@ -525,6 +564,10 @@ export default class DevServer {
|
||||
await this.exit();
|
||||
}
|
||||
|
||||
if (warnings && warnings.length > 0) {
|
||||
warnings.forEach(warning => this.output.warn(warning.message));
|
||||
}
|
||||
|
||||
if (builders) {
|
||||
const { defaultRoutes, error: routesError } = await detectRoutes(
|
||||
files,
|
||||
@@ -583,32 +626,41 @@ export default class DevServer {
|
||||
return pkg;
|
||||
}
|
||||
|
||||
async tryValidateOrExit(
|
||||
config: NowConfig,
|
||||
validate: (c: NowConfig) => string | null
|
||||
): Promise<void> {
|
||||
const message = validate(config);
|
||||
|
||||
if (message) {
|
||||
this.output.error(message);
|
||||
await this.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
async validateNowConfig(config: NowConfig): Promise<void> {
|
||||
if (config.version === 1) {
|
||||
this.output.error('Only `version: 2` is supported by `now dev`');
|
||||
await this.exit(1);
|
||||
}
|
||||
|
||||
const buildsError = validateNowConfigBuilds(config);
|
||||
|
||||
if (buildsError) {
|
||||
this.output.error(buildsError);
|
||||
await this.exit(1);
|
||||
}
|
||||
|
||||
const routesError = validateNowConfigRoutes(config);
|
||||
|
||||
if (routesError) {
|
||||
this.output.error(routesError);
|
||||
await this.exit(1);
|
||||
}
|
||||
await this.tryValidateOrExit(config, validateNowConfigBuilds);
|
||||
await this.tryValidateOrExit(config, validateNowConfigRoutes);
|
||||
await this.tryValidateOrExit(config, validateNowConfigCleanUrls);
|
||||
await this.tryValidateOrExit(config, validateNowConfigHeaders);
|
||||
await this.tryValidateOrExit(config, validateNowConfigRedirects);
|
||||
await this.tryValidateOrExit(config, validateNowConfigRewrites);
|
||||
await this.tryValidateOrExit(config, validateNowConfigTrailingSlash);
|
||||
await this.tryValidateOrExit(config, validateNowConfigFunctions);
|
||||
}
|
||||
|
||||
validateEnvConfig(
|
||||
type: string,
|
||||
env: EnvConfig = {},
|
||||
localEnv: EnvConfig = {}
|
||||
): void {
|
||||
): EnvConfig {
|
||||
// Validate if there are any missing env vars defined in `now.json`,
|
||||
// but not in the `.env` / `.build.env` file
|
||||
const missing: string[] = Object.entries(env)
|
||||
.filter(
|
||||
([name, value]) =>
|
||||
@@ -617,9 +669,36 @@ export default class DevServer {
|
||||
!hasOwnProperty(localEnv, name)
|
||||
)
|
||||
.map(([name]) => name);
|
||||
if (missing.length >= 1) {
|
||||
|
||||
if (missing.length > 0) {
|
||||
throw new MissingDotenvVarsError(type, missing);
|
||||
}
|
||||
|
||||
const merged: EnvConfig = { ...env, ...localEnv };
|
||||
|
||||
// Validate that the env var name matches what AWS Lambda allows:
|
||||
// - https://docs.aws.amazon.com/lambda/latest/dg/env_variables.html
|
||||
let hasInvalidName = false;
|
||||
for (const key of Object.keys(merged)) {
|
||||
if (!/^[a-zA-Z][a-zA-Z0-9_]*$/.test(key)) {
|
||||
this.output.warn(
|
||||
`Ignoring ${type
|
||||
.split('.')
|
||||
.slice(1)
|
||||
.reverse()
|
||||
.join(' ')} var ${JSON.stringify(key)} because name is invalid`
|
||||
);
|
||||
hasInvalidName = true;
|
||||
delete merged[key];
|
||||
}
|
||||
}
|
||||
if (hasInvalidName) {
|
||||
this.output.log(
|
||||
'Env var names must start with letters, and can only contain alphanumeric characters and underscores'
|
||||
);
|
||||
}
|
||||
|
||||
return merged;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -652,7 +731,7 @@ export default class DevServer {
|
||||
const nowConfigBuild = nowConfig.build || {};
|
||||
const [env, buildEnv] = await Promise.all([
|
||||
this.getLocalEnv('.env', nowConfig.env),
|
||||
this.getLocalEnv('.env.build', nowConfigBuild.env)
|
||||
this.getLocalEnv('.env.build', nowConfigBuild.env),
|
||||
]);
|
||||
Object.assign(process.env, buildEnv);
|
||||
this.env = env;
|
||||
@@ -670,8 +749,8 @@ export default class DevServer {
|
||||
|
||||
const builders: Set<string> = new Set(
|
||||
(nowConfig.builds || [])
|
||||
.filter((b: BuildConfig) => b.use)
|
||||
.map((b: BuildConfig) => b.use as string)
|
||||
.filter((b: Builder) => b.use)
|
||||
.map((b: Builder) => b.use as string)
|
||||
);
|
||||
|
||||
await installBuilders(builders, this.yarnPath, this.output);
|
||||
@@ -684,10 +763,12 @@ export default class DevServer {
|
||||
this.yarnPath,
|
||||
this.output
|
||||
)
|
||||
.then(updatedBuilders =>
|
||||
this.invalidateBuildMatches(nowConfig, updatedBuilders)
|
||||
)
|
||||
.then(updatedBuilders => {
|
||||
this.updateBuildersPromise = null;
|
||||
this.invalidateBuildMatches(nowConfig, updatedBuilders);
|
||||
})
|
||||
.catch(err => {
|
||||
this.updateBuildersPromise = null;
|
||||
this.output.error(`Failed to update builders: ${err.message}`);
|
||||
this.output.debug(err.stack);
|
||||
});
|
||||
@@ -716,7 +797,7 @@ export default class DevServer {
|
||||
ignoreInitial: true,
|
||||
useFsEvents: false,
|
||||
usePolling: false,
|
||||
persistent: true
|
||||
persistent: true,
|
||||
});
|
||||
this.watcher.on('add', (path: string) => {
|
||||
this.enqueueFsEvent('add', path);
|
||||
@@ -786,22 +867,18 @@ export default class DevServer {
|
||||
const ops: Promise<void>[] = [];
|
||||
|
||||
for (const match of this.buildMatches.values()) {
|
||||
if (!match.buildOutput) continue;
|
||||
|
||||
for (const asset of Object.values(match.buildOutput)) {
|
||||
if (asset.type === 'Lambda' && asset.fn) {
|
||||
ops.push(asset.fn.destroy());
|
||||
}
|
||||
}
|
||||
ops.push(shutdownBuilder(match, this.output));
|
||||
}
|
||||
|
||||
ops.push(close(this.server));
|
||||
|
||||
if (this.watcher) {
|
||||
this.output.debug(`Closing file watcher`);
|
||||
this.watcher.close();
|
||||
}
|
||||
|
||||
if (this.updateBuildersPromise) {
|
||||
this.output.debug(`Waiting for builders update to complete`);
|
||||
ops.push(this.updateBuildersPromise);
|
||||
}
|
||||
|
||||
@@ -856,8 +933,8 @@ export default class DevServer {
|
||||
const json = JSON.stringify({
|
||||
error: {
|
||||
code: statusCode,
|
||||
message: errorMessage.title
|
||||
}
|
||||
message: errorMessage.title,
|
||||
},
|
||||
});
|
||||
body = `${json}\n`;
|
||||
} else if (accept.includes('html')) {
|
||||
@@ -870,7 +947,7 @@ export default class DevServer {
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
error_code,
|
||||
now_id: nowRequestId
|
||||
now_id: nowRequestId,
|
||||
});
|
||||
} else if (statusCode === 502) {
|
||||
view = errorTemplate502({
|
||||
@@ -878,19 +955,19 @@ export default class DevServer {
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
error_code,
|
||||
now_id: nowRequestId
|
||||
now_id: nowRequestId,
|
||||
});
|
||||
} else {
|
||||
view = errorTemplate({
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
now_id: nowRequestId
|
||||
now_id: nowRequestId,
|
||||
});
|
||||
}
|
||||
body = errorTemplateBase({
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
view
|
||||
view,
|
||||
});
|
||||
} else {
|
||||
res.setHeader('content-type', 'text/plain; charset=utf-8');
|
||||
@@ -917,7 +994,7 @@ export default class DevServer {
|
||||
res.setHeader('content-type', 'application/json');
|
||||
const json = JSON.stringify({
|
||||
redirect: location,
|
||||
status: String(statusCode)
|
||||
status: String(statusCode),
|
||||
});
|
||||
body = `${json}\n`;
|
||||
} else if (accept.includes('html')) {
|
||||
@@ -949,7 +1026,7 @@ export default class DevServer {
|
||||
server: 'now',
|
||||
'x-now-trace': 'dev1',
|
||||
'x-now-id': nowRequestId,
|
||||
'x-now-cache': 'MISS'
|
||||
'x-now-cache': 'MISS',
|
||||
};
|
||||
for (const [name, value] of Object.entries(allHeaders)) {
|
||||
res.setHeader(name, value);
|
||||
@@ -976,7 +1053,7 @@ export default class DevServer {
|
||||
'x-now-deployment-url': host,
|
||||
'x-now-id': nowRequestId,
|
||||
'x-now-log-id': nowRequestId.split('-')[2],
|
||||
'x-zeit-co-forwarded-for': ip
|
||||
'x-zeit-co-forwarded-for': ip,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1057,7 +1134,7 @@ export default class DevServer {
|
||||
}
|
||||
|
||||
const method = req.method || 'GET';
|
||||
this.output.log(`${chalk.bold(method)} ${req.url}`);
|
||||
this.output.debug(`${chalk.bold(method)} ${req.url}`);
|
||||
|
||||
try {
|
||||
const nowConfig = await this.getNowConfig();
|
||||
@@ -1139,7 +1216,7 @@ export default class DevServer {
|
||||
|
||||
if (status) {
|
||||
res.statusCode = status;
|
||||
if ([301, 302, 303].includes(status)) {
|
||||
if (300 <= status && status <= 399) {
|
||||
await this.sendRedirect(
|
||||
req,
|
||||
res,
|
||||
@@ -1183,9 +1260,7 @@ export default class DevServer {
|
||||
Object.assign(origUrl.query, uri_args);
|
||||
const newUrl = url.format(origUrl);
|
||||
this.output.debug(
|
||||
`Checking build result's ${
|
||||
buildResult.routes.length
|
||||
} \`routes\` to match ${newUrl}`
|
||||
`Checking build result's ${buildResult.routes.length} \`routes\` to match ${newUrl}`
|
||||
);
|
||||
const matchedRoute = await devRouter(
|
||||
newUrl,
|
||||
@@ -1210,12 +1285,12 @@ export default class DevServer {
|
||||
}
|
||||
}
|
||||
|
||||
let foundAsset = findAsset(match, requestPath);
|
||||
let foundAsset = findAsset(match, requestPath, nowConfig);
|
||||
if ((!foundAsset || this.shouldRebuild(req)) && callLevel === 0) {
|
||||
await this.triggerBuild(match, buildRequestPath, req);
|
||||
|
||||
// Since the `asset` was re-built, resolve it again to get the new asset
|
||||
foundAsset = findAsset(match, requestPath);
|
||||
foundAsset = findAsset(match, requestPath, nowConfig);
|
||||
}
|
||||
|
||||
if (!foundAsset) {
|
||||
@@ -1224,7 +1299,10 @@ export default class DevServer {
|
||||
}
|
||||
|
||||
const { asset, assetKey } = foundAsset;
|
||||
this.output.debug(`Serving asset: [${asset.type}] ${assetKey}`);
|
||||
this.output.debug(
|
||||
`Serving asset: [${asset.type}] ${assetKey} ${(asset as any)
|
||||
.contentType || ''}`
|
||||
);
|
||||
|
||||
/* eslint-disable no-case-declarations */
|
||||
switch (asset.type) {
|
||||
@@ -1238,17 +1316,17 @@ export default class DevServer {
|
||||
headers: [
|
||||
{
|
||||
key: 'Content-Type',
|
||||
value: getMimeType(assetKey)
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
value: asset.contentType || getMimeType(assetKey),
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
case 'FileBlob':
|
||||
const headers: http.OutgoingHttpHeaders = {
|
||||
'Content-Length': asset.data.length,
|
||||
'Content-Type': getMimeType(assetKey)
|
||||
'Content-Type': asset.contentType || getMimeType(assetKey),
|
||||
};
|
||||
this.setResponseHeaders(res, nowRequestId, headers);
|
||||
res.end(asset.data);
|
||||
@@ -1273,7 +1351,7 @@ export default class DevServer {
|
||||
Object.assign(parsed.query, uri_args);
|
||||
const path = url.format({
|
||||
pathname: parsed.pathname,
|
||||
query: parsed.query
|
||||
query: parsed.query,
|
||||
});
|
||||
|
||||
const body = await rawBody(req);
|
||||
@@ -1283,7 +1361,7 @@ export default class DevServer {
|
||||
path,
|
||||
headers: this.getNowProxyHeaders(req, nowRequestId),
|
||||
encoding: 'base64',
|
||||
body: body.toString('base64')
|
||||
body: body.toString('base64'),
|
||||
};
|
||||
|
||||
this.output.debug(`Invoking lambda: "${assetKey}" with ${path}`);
|
||||
@@ -1292,7 +1370,7 @@ export default class DevServer {
|
||||
try {
|
||||
result = await asset.fn<InvokeResult>({
|
||||
Action: 'Invoke',
|
||||
body: JSON.stringify(payload)
|
||||
body: JSON.stringify(payload),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
@@ -1379,7 +1457,7 @@ export default class DevServer {
|
||||
relative: href,
|
||||
ext,
|
||||
title: href,
|
||||
base
|
||||
base,
|
||||
};
|
||||
});
|
||||
|
||||
@@ -1391,13 +1469,13 @@ export default class DevServer {
|
||||
const paths = [
|
||||
{
|
||||
name: directory,
|
||||
url: requestPath
|
||||
}
|
||||
url: requestPath,
|
||||
},
|
||||
];
|
||||
const directoryHtml = directoryTemplate({
|
||||
files,
|
||||
paths,
|
||||
directory
|
||||
directory,
|
||||
});
|
||||
this.setResponseHeaders(res, nowRequestId);
|
||||
res.setHeader('Content-Type', 'text/html; charset=utf-8');
|
||||
@@ -1409,25 +1487,6 @@ export default class DevServer {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Serve project directory as a static deployment.
|
||||
*/
|
||||
serveProjectAsStatic = async (
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse,
|
||||
nowRequestId: string
|
||||
) => {
|
||||
const filePath = req.url ? req.url.replace(/^\//, '') : '';
|
||||
|
||||
if (filePath && typeof this.files[filePath] === 'undefined') {
|
||||
await this.send404(req, res, nowRequestId);
|
||||
return;
|
||||
}
|
||||
|
||||
this.setResponseHeaders(res, nowRequestId);
|
||||
return serveStaticFile(req, res, this.cwd, { cleanUrls: true });
|
||||
};
|
||||
|
||||
async hasFilesystem(dest: string): Promise<boolean> {
|
||||
const requestPath = dest.replace(/^\//, '');
|
||||
if (
|
||||
@@ -1459,7 +1518,7 @@ function proxyPass(
|
||||
ws: true,
|
||||
xfwd: true,
|
||||
ignorePath: true,
|
||||
target: dest
|
||||
target: dest,
|
||||
});
|
||||
|
||||
proxy.on('error', (error: NodeJS.ErrnoException) => {
|
||||
@@ -1490,7 +1549,7 @@ function serveStaticFile(
|
||||
public: cwd,
|
||||
cleanUrls: false,
|
||||
etag: true,
|
||||
...opts
|
||||
...opts,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1544,22 +1603,49 @@ async function shouldServe(
|
||||
isFilesystem?: boolean
|
||||
): Promise<boolean> {
|
||||
const {
|
||||
src: entrypoint,
|
||||
src,
|
||||
config,
|
||||
builderWithPkg: { builder }
|
||||
builderWithPkg: { builder },
|
||||
} = match;
|
||||
if (typeof builder.shouldServe === 'function') {
|
||||
const nowConfig = await devServer.getNowConfig();
|
||||
const cleanSrc = src.endsWith('.html') ? src.slice(0, -5) : src;
|
||||
const trimmedPath = requestPath.endsWith('/')
|
||||
? requestPath.slice(0, -1)
|
||||
: requestPath;
|
||||
|
||||
if (
|
||||
nowConfig.cleanUrls &&
|
||||
nowConfig.trailingSlash &&
|
||||
cleanSrc === trimmedPath
|
||||
) {
|
||||
// Mimic fmeta-util and convert cleanUrls and trailingSlash
|
||||
return true;
|
||||
} else if (
|
||||
nowConfig.cleanUrls &&
|
||||
!nowConfig.trailingSlash &&
|
||||
cleanSrc === requestPath
|
||||
) {
|
||||
// Mimic fmeta-util and convert cleanUrls
|
||||
return true;
|
||||
} else if (
|
||||
!nowConfig.cleanUrls &&
|
||||
nowConfig.trailingSlash &&
|
||||
src === trimmedPath
|
||||
) {
|
||||
// Mimic fmeta-util and convert trailingSlash
|
||||
return true;
|
||||
} else if (typeof builder.shouldServe === 'function') {
|
||||
const shouldServe = await builder.shouldServe({
|
||||
entrypoint,
|
||||
entrypoint: src,
|
||||
files,
|
||||
config,
|
||||
requestPath,
|
||||
workPath: devServer.cwd
|
||||
workPath: devServer.cwd,
|
||||
});
|
||||
if (shouldServe) {
|
||||
return true;
|
||||
}
|
||||
} else if (findAsset(match, requestPath)) {
|
||||
} else if (findAsset(match, requestPath, nowConfig)) {
|
||||
// If there's no `shouldServe()` function, then look up if there's
|
||||
// a matching build asset on the `match` that has already been built.
|
||||
return true;
|
||||
@@ -1597,7 +1683,8 @@ async function findMatchingRoute(
|
||||
|
||||
function findAsset(
|
||||
match: BuildMatch,
|
||||
requestPath: string
|
||||
requestPath: string,
|
||||
nowConfig: NowConfig
|
||||
): { asset: BuilderOutput; assetKey: string } | void {
|
||||
if (!match.buildOutput) {
|
||||
return;
|
||||
@@ -1605,6 +1692,10 @@ function findAsset(
|
||||
let assetKey: string = requestPath.replace(/\/$/, '');
|
||||
let asset = match.buildOutput[requestPath];
|
||||
|
||||
if (nowConfig.trailingSlash && requestPath.endsWith('/')) {
|
||||
asset = match.buildOutput[requestPath.slice(0, -1)];
|
||||
}
|
||||
|
||||
// In the case of an index path, fall back to iterating over the
|
||||
// builder outputs and doing an "is index" check until a match is found.
|
||||
if (!asset) {
|
||||
|
||||
@@ -5,7 +5,7 @@ export const version = 2;
|
||||
|
||||
export function build({ files, entrypoint }: BuilderParams): BuildResult {
|
||||
const output = {
|
||||
[entrypoint]: files[entrypoint]
|
||||
[entrypoint]: files[entrypoint],
|
||||
};
|
||||
const watch = [entrypoint];
|
||||
|
||||
@@ -15,7 +15,7 @@ export function build({ files, entrypoint }: BuilderParams): BuildResult {
|
||||
export function shouldServe({
|
||||
entrypoint,
|
||||
files,
|
||||
requestPath
|
||||
requestPath,
|
||||
}: ShouldServeParams) {
|
||||
if (isIndex(entrypoint)) {
|
||||
const indexPath = join(requestPath, basename(entrypoint));
|
||||
|
||||
@@ -1,7 +1,15 @@
|
||||
import http from 'http';
|
||||
import { ChildProcess } from 'child_process';
|
||||
import { Lambda as FunLambda } from '@zeit/fun';
|
||||
import { FileBlob, FileFsRef, Lambda } from '@now/build-utils';
|
||||
import {
|
||||
Builder as BuildConfig,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
Lambda,
|
||||
PackageJson,
|
||||
BuilderFunctions,
|
||||
} from '@now/build-utils';
|
||||
import { NowRedirect, NowRewrite, NowHeader, Route } from '@now/routing-utils';
|
||||
import { Output } from '../output';
|
||||
|
||||
export interface DevServerOptions {
|
||||
@@ -13,12 +21,6 @@ export interface EnvConfig {
|
||||
[name: string]: string | undefined;
|
||||
}
|
||||
|
||||
export interface BuildConfig {
|
||||
src: string;
|
||||
use?: string;
|
||||
config?: object;
|
||||
}
|
||||
|
||||
export interface BuildMatch extends BuildConfig {
|
||||
builderWithPkg: BuilderWithPackage;
|
||||
buildOutput: BuilderOutputs;
|
||||
@@ -27,15 +29,7 @@ export interface BuildMatch extends BuildConfig {
|
||||
buildProcess?: ChildProcess;
|
||||
}
|
||||
|
||||
export interface RouteConfig {
|
||||
src: string;
|
||||
dest: string;
|
||||
methods?: string[];
|
||||
headers?: HttpHeadersConfig;
|
||||
status?: number;
|
||||
handle?: string;
|
||||
continue?: boolean;
|
||||
}
|
||||
export type RouteConfig = Route;
|
||||
|
||||
export interface NowConfig {
|
||||
name?: string;
|
||||
@@ -47,6 +41,12 @@ export interface NowConfig {
|
||||
builds?: BuildConfig[];
|
||||
routes?: RouteConfig[];
|
||||
files?: string[];
|
||||
cleanUrls?: boolean;
|
||||
rewrites?: NowRewrite[];
|
||||
redirects?: NowRedirect[];
|
||||
headers?: NowHeader[];
|
||||
trailingSlash?: boolean;
|
||||
functions?: BuilderFunctions;
|
||||
}
|
||||
|
||||
export interface HttpHandler {
|
||||
@@ -119,6 +119,7 @@ export interface BuildResult {
|
||||
output: BuilderOutputs;
|
||||
routes: RouteConfig[];
|
||||
watch: string[];
|
||||
distPath?: string;
|
||||
}
|
||||
|
||||
export interface ShouldServeParams {
|
||||
@@ -129,18 +130,10 @@ export interface ShouldServeParams {
|
||||
workPath: string;
|
||||
}
|
||||
|
||||
export interface Package {
|
||||
name: string;
|
||||
version: string;
|
||||
scripts?: { [key: string]: string };
|
||||
dependencies?: { [name: string]: string };
|
||||
devDependencies?: { [name: string]: string };
|
||||
}
|
||||
|
||||
export interface BuilderWithPackage {
|
||||
runInProcess?: boolean;
|
||||
builder: Readonly<Builder>;
|
||||
package: Readonly<Package>;
|
||||
package: Readonly<PackageJson>;
|
||||
}
|
||||
|
||||
export interface HttpHeadersConfig {
|
||||
|
||||
@@ -1,5 +1,12 @@
|
||||
import Ajv from 'ajv';
|
||||
import { schema as routesSchema } from '@now/routing-utils';
|
||||
import {
|
||||
routesSchema,
|
||||
cleanUrlsSchema,
|
||||
headersSchema,
|
||||
redirectsSchema,
|
||||
rewritesSchema,
|
||||
trailingSlashSchema,
|
||||
} from '@now/routing-utils';
|
||||
import { NowConfig } from './types';
|
||||
|
||||
const ajv = new Ajv();
|
||||
@@ -16,52 +23,106 @@ const buildsSchema = {
|
||||
src: {
|
||||
type: 'string',
|
||||
minLength: 1,
|
||||
maxLength: 4096
|
||||
maxLength: 4096,
|
||||
},
|
||||
use: {
|
||||
type: 'string',
|
||||
minLength: 3,
|
||||
maxLength: 256
|
||||
maxLength: 256,
|
||||
},
|
||||
config: { type: 'object' }
|
||||
}
|
||||
}
|
||||
config: { type: 'object' },
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const functionsSchema = {
|
||||
type: 'object',
|
||||
minProperties: 1,
|
||||
maxProperties: 50,
|
||||
additionalProperties: false,
|
||||
patternProperties: {
|
||||
'^.{1,256}$': {
|
||||
type: 'object',
|
||||
additionalProperties: false,
|
||||
properties: {
|
||||
runtime: {
|
||||
type: 'string',
|
||||
maxLength: 256,
|
||||
},
|
||||
memory: {
|
||||
enum: Object.keys(Array.from({ length: 50 }))
|
||||
.slice(2, 48)
|
||||
.map(x => Number(x) * 64),
|
||||
},
|
||||
maxDuration: {
|
||||
type: 'number',
|
||||
minimum: 1,
|
||||
maximum: 900,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const validateBuilds = ajv.compile(buildsSchema);
|
||||
const validateRoutes = ajv.compile(routesSchema);
|
||||
const validateCleanUrls = ajv.compile(cleanUrlsSchema);
|
||||
const validateHeaders = ajv.compile(headersSchema);
|
||||
const validateRedirects = ajv.compile(redirectsSchema);
|
||||
const validateRewrites = ajv.compile(rewritesSchema);
|
||||
const validateTrailingSlash = ajv.compile(trailingSlashSchema);
|
||||
const validateFunctions = ajv.compile(functionsSchema);
|
||||
|
||||
export function validateNowConfigBuilds({ builds }: NowConfig) {
|
||||
if (!builds) {
|
||||
export function validateNowConfigBuilds(config: NowConfig) {
|
||||
return validateKey(config, 'builds', validateBuilds);
|
||||
}
|
||||
|
||||
export function validateNowConfigRoutes(config: NowConfig) {
|
||||
return validateKey(config, 'routes', validateRoutes);
|
||||
}
|
||||
|
||||
export function validateNowConfigCleanUrls(config: NowConfig) {
|
||||
return validateKey(config, 'cleanUrls', validateCleanUrls);
|
||||
}
|
||||
|
||||
export function validateNowConfigHeaders(config: NowConfig) {
|
||||
return validateKey(config, 'headers', validateHeaders);
|
||||
}
|
||||
|
||||
export function validateNowConfigRedirects(config: NowConfig) {
|
||||
return validateKey(config, 'redirects', validateRedirects);
|
||||
}
|
||||
|
||||
export function validateNowConfigRewrites(config: NowConfig) {
|
||||
return validateKey(config, 'rewrites', validateRewrites);
|
||||
}
|
||||
|
||||
export function validateNowConfigTrailingSlash(config: NowConfig) {
|
||||
return validateKey(config, 'trailingSlash', validateTrailingSlash);
|
||||
}
|
||||
|
||||
export function validateNowConfigFunctions(config: NowConfig) {
|
||||
return validateKey(config, 'functions', validateFunctions);
|
||||
}
|
||||
|
||||
function validateKey(
|
||||
config: NowConfig,
|
||||
key: keyof NowConfig,
|
||||
validate: Ajv.ValidateFunction
|
||||
) {
|
||||
const value = config[key];
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!validateBuilds(builds)) {
|
||||
if (!validateBuilds.errors) {
|
||||
if (!validate(value)) {
|
||||
if (!validate.errors) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const error = validateBuilds.errors[0];
|
||||
const error = validate.errors[0];
|
||||
|
||||
return `Invalid \`builds\` property: ${error.dataPath} ${error.message}`;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function validateNowConfigRoutes({ routes }: NowConfig) {
|
||||
if (!routes) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!validateRoutes(routes)) {
|
||||
if (!validateRoutes.errors) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const error = validateRoutes.errors[0];
|
||||
|
||||
return `Invalid \`routes\` property: ${error.dataPath} ${error.message}`;
|
||||
return `Invalid \`${key}\` property: ${error.dataPath} ${error.message}`;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
writeFile,
|
||||
statSync,
|
||||
chmodSync,
|
||||
createReadStream
|
||||
createReadStream,
|
||||
} from 'fs-extra';
|
||||
import pipe from 'promisepipe';
|
||||
import { join } from 'path';
|
||||
@@ -63,7 +63,7 @@ async function installYarn(output: Output): Promise<string> {
|
||||
output.debug(`Downloading ${YARN_URL}`);
|
||||
const response = await fetch(YARN_URL, {
|
||||
compress: false,
|
||||
redirect: 'follow'
|
||||
redirect: 'follow',
|
||||
});
|
||||
|
||||
if (response.status !== 200) {
|
||||
@@ -90,7 +90,7 @@ async function installYarn(output: Output): Promise<string> {
|
||||
'@echo off',
|
||||
'@SETLOCAL',
|
||||
'@SET PATHEXT=%PATHEXT:;.JS;=;%',
|
||||
'node "%~dp0\\yarn" %*'
|
||||
'node "%~dp0\\yarn" %*',
|
||||
].join('\r\n')
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,22 +1,24 @@
|
||||
import chalk from 'chalk';
|
||||
import { readFileSync } from 'fs';
|
||||
import { resolve } from 'path';
|
||||
import { Response } from 'fetch-h2'
|
||||
import { Response } from 'node-fetch';
|
||||
import { DomainNotFound, InvalidDomain } from '../errors-ts';
|
||||
import Client from '../client';
|
||||
import wait from '../output/wait';
|
||||
|
||||
type JSONResponse = {
|
||||
recordIds: string[]
|
||||
}
|
||||
recordIds: string[];
|
||||
};
|
||||
|
||||
export default async function importZonefile(
|
||||
client: Client,
|
||||
contextName: string,
|
||||
domain: string,
|
||||
zonefilePath: string,
|
||||
zonefilePath: string
|
||||
) {
|
||||
const cancelWait = wait(`Importing Zone file for domain ${domain} under ${chalk.bold(contextName)}`);
|
||||
const cancelWait = wait(
|
||||
`Importing Zone file for domain ${domain} under ${chalk.bold(contextName)}`
|
||||
);
|
||||
const zonefile = readFileSync(resolve(zonefilePath), 'utf8');
|
||||
|
||||
try {
|
||||
@@ -27,7 +29,7 @@ export default async function importZonefile(
|
||||
json: false,
|
||||
});
|
||||
|
||||
const { recordIds } = await res.json() as JSONResponse;
|
||||
const { recordIds } = (await res.json()) as JSONResponse;
|
||||
cancelWait();
|
||||
return recordIds;
|
||||
} catch (error) {
|
||||
|
||||
@@ -9,6 +9,7 @@ import maybeGetDomainByName from './maybe-get-domain-by-name';
|
||||
import purchaseDomainIfAvailable from './purchase-domain-if-available';
|
||||
import verifyDomain from './verify-domain';
|
||||
import extractDomain from '../alias/extract-domain';
|
||||
import isWildcardAlias from '../alias/is-wildcard-alias';
|
||||
|
||||
export default async function setupDomain(
|
||||
output: Output,
|
||||
@@ -34,7 +35,7 @@ export default async function setupDomain(
|
||||
|
||||
if (info) {
|
||||
output.debug(`Domain ${domain} found for the given context`);
|
||||
if (!info.verified) {
|
||||
if (!info.verified || (!info.nsVerifiedAt && isWildcardAlias(alias))) {
|
||||
output.debug(
|
||||
`Domain ${domain} is not verified, trying to perform a verification`
|
||||
);
|
||||
@@ -47,8 +48,17 @@ export default async function setupDomain(
|
||||
output.debug(`Domain ${domain} verification failed`);
|
||||
return verificationResult;
|
||||
}
|
||||
if (!verificationResult.nsVerifiedAt && isWildcardAlias(alias)) {
|
||||
return new ERRORS.DomainNsNotVerifiedForWildcard({
|
||||
domain,
|
||||
nsVerification: {
|
||||
intendedNameservers: verificationResult.intendedNameservers,
|
||||
nameservers: verificationResult.nameservers
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
output.debug(`Domain ${domain} successfuly verified`);
|
||||
output.debug(`Domain ${domain} successfuly verified`);
|
||||
return maybeGetDomainByName(client, contextName, domain) as Promise<
|
||||
Domain
|
||||
>;
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import bytes from 'bytes';
|
||||
import { Response } from 'fetch-h2';
|
||||
import { Response } from 'node-fetch';
|
||||
import { NowError } from './now-error';
|
||||
import param from './output/param';
|
||||
import cmd from './output/cmd';
|
||||
@@ -53,7 +53,7 @@ export class TeamDeleted extends NowError<'TEAM_DELETED', {}> {
|
||||
message: `Your team was deleted. You can switch to a different one using ${param(
|
||||
'now switch'
|
||||
)}.`,
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -67,7 +67,7 @@ export class InvalidToken extends NowError<'NOT_AUTHORIZED', {}> {
|
||||
super({
|
||||
code: `NOT_AUTHORIZED`,
|
||||
message: `The specified token is not valid`,
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -81,7 +81,7 @@ export class MissingUser extends NowError<'MISSING_USER', {}> {
|
||||
super({
|
||||
code: 'MISSING_USER',
|
||||
message: `Not able to load user, missing from response`,
|
||||
meta: {}
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -98,7 +98,7 @@ export class DomainAlreadyExists extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_ALREADY_EXISTS',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} already exists under a different context.`
|
||||
message: `The domain ${domain} already exists under a different context.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -115,7 +115,7 @@ export class DomainPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_PERMISSION_DENIED',
|
||||
meta: { domain, context },
|
||||
message: `You don't have access to the domain ${domain} under ${context}.`
|
||||
message: `You don't have access to the domain ${domain} under ${context}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -128,7 +128,7 @@ export class DomainExternal extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_EXTERNAL',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} must point to zeit.world.`
|
||||
message: `The domain ${domain} must point to zeit.world.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -143,7 +143,7 @@ export class SourceNotFound extends NowError<'SOURCE_NOT_FOUND', {}> {
|
||||
meta: {},
|
||||
message: `Not able to purchase. Please add a payment method using ${cmd(
|
||||
'now billing add'
|
||||
)}.`
|
||||
)}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -156,7 +156,7 @@ export class InvalidTransferAuthCode extends NowError<
|
||||
super({
|
||||
code: 'INVALID_TRANSFER_AUTH_CODE',
|
||||
meta: { domain, authCode },
|
||||
message: `The provided auth code does not match with the one expected by the current registar`
|
||||
message: `The provided auth code does not match with the one expected by the current registar`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -169,7 +169,7 @@ export class DomainRegistrationFailed extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_REGISTRATION_FAILED',
|
||||
meta: { domain },
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -185,7 +185,7 @@ export class DomainNotFound extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_FOUND',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} can't be found.`
|
||||
message: `The domain ${domain} can't be found.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -198,7 +198,7 @@ export class DomainNotVerified extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_VERIFIED',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} is not verified.`
|
||||
message: `The domain ${domain} is not verified.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -221,7 +221,7 @@ export class DomainVerificationFailed extends NowError<
|
||||
domain,
|
||||
nsVerification,
|
||||
txtVerification,
|
||||
purchased = false
|
||||
purchased = false,
|
||||
}: {
|
||||
domain: string;
|
||||
nsVerification: NSVerificationError;
|
||||
@@ -231,7 +231,7 @@ export class DomainVerificationFailed extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_VERIFICATION_FAILED',
|
||||
meta: { domain, nsVerification, txtVerification, purchased },
|
||||
message: `We can't verify the domain ${domain}. Both Name Servers and DNS TXT verifications failed.`
|
||||
message: `We can't verify the domain ${domain}. Both Name Servers and DNS TXT verifications failed.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -252,6 +252,31 @@ export type TXTVerificationError = {
|
||||
values: string[];
|
||||
};
|
||||
|
||||
/**
|
||||
* This error is returned when the domain is not verified by nameservers for wildcard alias.
|
||||
*/
|
||||
export class DomainNsNotVerifiedForWildcard extends NowError<
|
||||
'DOMAIN_NS_NOT_VERIFIED_FOR_WILDCARD',
|
||||
{
|
||||
domain: string;
|
||||
nsVerification: NSVerificationError;
|
||||
}
|
||||
> {
|
||||
constructor({
|
||||
domain,
|
||||
nsVerification,
|
||||
}: {
|
||||
domain: string;
|
||||
nsVerification: NSVerificationError;
|
||||
}) {
|
||||
super({
|
||||
code: 'DOMAIN_NS_NOT_VERIFIED_FOR_WILDCARD',
|
||||
meta: { domain, nsVerification },
|
||||
message: `The domain ${domain} is not verified by nameservers for wildcard alias.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Used when a domain is validated because we tried to add it to an account
|
||||
* via API or for any other reason.
|
||||
@@ -264,7 +289,17 @@ export class InvalidDomain extends NowError<
|
||||
super({
|
||||
code: 'INVALID_DOMAIN',
|
||||
meta: { domain },
|
||||
message: message || `The domain ${domain} is not valid.`
|
||||
message: message || `The domain ${domain} is not valid.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class NotDomainOwner extends NowError<'NOT_DOMAIN_OWNER', {}> {
|
||||
constructor(message: string) {
|
||||
super({
|
||||
code: 'NOT_DOMAIN_OWNER',
|
||||
meta: {},
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -277,7 +312,7 @@ export class InvalidDeploymentId extends NowError<
|
||||
super({
|
||||
code: 'INVALID_DEPLOYMENT_ID',
|
||||
meta: { id },
|
||||
message: `The deployment id "${id}" is not valid.`
|
||||
message: `The deployment id "${id}" is not valid.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -294,7 +329,7 @@ export class UnsupportedTLD extends NowError<
|
||||
super({
|
||||
code: 'UNSUPPORTED_TLD',
|
||||
meta: { domain },
|
||||
message: `The TLD for domain name ${domain} is not supported.`
|
||||
message: `The TLD for domain name ${domain} is not supported.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -311,7 +346,7 @@ export class DomainNotAvailable extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_AVAILABLE',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} is not available to be purchased.`
|
||||
message: `The domain ${domain} is not available to be purchased.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -328,7 +363,7 @@ export class DomainServiceNotAvailable extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_SERVICE_NOT_AVAILABLE',
|
||||
meta: { domain },
|
||||
message: `The domain purchase is unavailable, try again later.`
|
||||
message: `The domain purchase is unavailable, try again later.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -345,7 +380,7 @@ export class DomainNotTransferable extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_NOT_TRANSFERABLE',
|
||||
meta: { domain },
|
||||
message: `The domain ${domain} is not available to be transferred.`
|
||||
message: `The domain ${domain} is not available to be transferred.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -361,7 +396,7 @@ export class UnexpectedDomainPurchaseError extends NowError<
|
||||
super({
|
||||
code: 'UNEXPECTED_DOMAIN_PURCHASE_ERROR',
|
||||
meta: { domain },
|
||||
message: `An unexpected error happened while purchasing.`
|
||||
message: `An unexpected error happened while purchasing.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -374,7 +409,7 @@ export class DomainPaymentError extends NowError<'DOMAIN_PAYMENT_ERROR', {}> {
|
||||
super({
|
||||
code: 'DOMAIN_PAYMENT_ERROR',
|
||||
meta: {},
|
||||
message: `Your card was declined.`
|
||||
message: `Your card was declined.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -391,7 +426,7 @@ export class DomainPurchasePending extends NowError<
|
||||
super({
|
||||
code: 'DOMAIN_PURCHASE_PENDING',
|
||||
meta: { domain },
|
||||
message: `The domain purchase for ${domain} is pending.`
|
||||
message: `The domain purchase for ${domain} is pending.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -405,7 +440,7 @@ export class UserAborted extends NowError<'USER_ABORTED', {}> {
|
||||
super({
|
||||
code: 'USER_ABORTED',
|
||||
meta: {},
|
||||
message: `The user aborted the operation.`
|
||||
message: `The user aborted the operation.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -415,7 +450,7 @@ export class CertNotFound extends NowError<'CERT_NOT_FOUND', { id: string }> {
|
||||
super({
|
||||
code: 'CERT_NOT_FOUND',
|
||||
meta: { id },
|
||||
message: `The cert ${id} can't be found.`
|
||||
message: `The cert ${id} can't be found.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -428,7 +463,7 @@ export class CertsPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'CERTS_PERMISSION_DENIED',
|
||||
meta: { domain },
|
||||
message: `You don't have access to ${domain}'s certs under ${context}.`
|
||||
message: `You don't have access to ${domain}'s certs under ${context}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -441,7 +476,7 @@ export class CertOrderNotFound extends NowError<
|
||||
super({
|
||||
code: 'CERT_ORDER_NOT_FOUND',
|
||||
meta: { cns },
|
||||
message: `No cert order could be found for cns ${cns.join(' ,')}`
|
||||
message: `No cert order could be found for cns ${cns.join(' ,')}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -459,7 +494,7 @@ export class TooManyRequests extends NowError<
|
||||
super({
|
||||
code: 'TOO_MANY_REQUESTS',
|
||||
meta: { api, retryAfter },
|
||||
message: `Rate limited. Too many requests to the same endpoint.`
|
||||
message: `Rate limited. Too many requests to the same endpoint.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -493,7 +528,7 @@ export class CertError extends NowError<
|
||||
cns,
|
||||
code,
|
||||
message,
|
||||
helpUrl
|
||||
helpUrl,
|
||||
}: {
|
||||
cns: string[];
|
||||
code: CertErrorCode;
|
||||
@@ -503,7 +538,7 @@ export class CertError extends NowError<
|
||||
super({
|
||||
code: `CERT_ERROR`,
|
||||
meta: { cns, code, helpUrl },
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -522,7 +557,7 @@ export class CertConfigurationError extends NowError<
|
||||
message,
|
||||
external,
|
||||
type,
|
||||
helpUrl
|
||||
helpUrl,
|
||||
}: {
|
||||
cns: string[];
|
||||
message: string;
|
||||
@@ -533,7 +568,7 @@ export class CertConfigurationError extends NowError<
|
||||
super({
|
||||
code: `CERT_CONFIGURATION_ERROR`,
|
||||
meta: { cns, helpUrl, external, type },
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -550,7 +585,7 @@ export class DeploymentNotFound extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_NOT_FOUND',
|
||||
meta: { id, context },
|
||||
message: `Can't find the deployment ${id} under the context ${context}`
|
||||
message: `Can't find the deployment ${id} under the context ${context}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -561,13 +596,13 @@ export class DeploymentNotFound extends NowError<
|
||||
*/
|
||||
export class DeploymentNotReady extends NowError<
|
||||
'DEPLOYMENT_NOT_READY',
|
||||
{ url: string; }
|
||||
{ url: string }
|
||||
> {
|
||||
constructor({ url = '' }: { url: string }) {
|
||||
super({
|
||||
code: 'DEPLOYMENT_NOT_READY',
|
||||
meta: { url },
|
||||
message: `The deployment https://${url} is not ready.`
|
||||
message: `The deployment https://${url} is not ready.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -580,7 +615,7 @@ export class DeploymentFailedAliasImpossible extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_FAILED_ALIAS_IMPOSSIBLE',
|
||||
meta: {},
|
||||
message: `The deployment build has failed and cannot be aliased`
|
||||
message: `The deployment build has failed and cannot be aliased`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -597,7 +632,7 @@ export class DeploymentPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_PERMISSION_DENIED',
|
||||
meta: { id, context },
|
||||
message: `You don't have access to the deployment ${id} under ${context}.`
|
||||
message: `You don't have access to the deployment ${id} under ${context}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -610,7 +645,7 @@ export class DeploymentTypeUnsupported extends NowError<
|
||||
super({
|
||||
code: 'DEPLOYMENT_TYPE_UNSUPPORTED',
|
||||
meta: {},
|
||||
message: `This region only accepts Serverless Docker Deployments`
|
||||
message: `This region only accepts Serverless Docker Deployments`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -624,7 +659,7 @@ export class InvalidAlias extends NowError<'INVALID_ALIAS', { alias: string }> {
|
||||
super({
|
||||
code: 'INVALID_ALIAS',
|
||||
meta: { alias },
|
||||
message: `The given alias ${alias} is not valid`
|
||||
message: `The given alias ${alias} is not valid`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -638,7 +673,7 @@ export class AliasInUse extends NowError<'ALIAS_IN_USE', { alias: string }> {
|
||||
super({
|
||||
code: 'ALIAS_IN_USE',
|
||||
meta: { alias },
|
||||
message: `The alias is already in use`
|
||||
message: `The alias is already in use`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -653,7 +688,7 @@ export class CertMissing extends NowError<'ALIAS_IN_USE', { domain: string }> {
|
||||
super({
|
||||
code: 'ALIAS_IN_USE',
|
||||
meta: { domain },
|
||||
message: `The alias is already in use`
|
||||
message: `The alias is already in use`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -666,7 +701,7 @@ export class ForbiddenScaleMinInstances extends NowError<
|
||||
super({
|
||||
code: 'FORBIDDEN_SCALE_MIN_INSTANCES',
|
||||
meta: { url, max },
|
||||
message: `You can't scale to more than ${max} min instances with your current plan.`
|
||||
message: `You can't scale to more than ${max} min instances with your current plan.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -679,7 +714,7 @@ export class ForbiddenScaleMaxInstances extends NowError<
|
||||
super({
|
||||
code: 'FORBIDDEN_SCALE_MAX_INSTANCES',
|
||||
meta: { url, max },
|
||||
message: `You can't scale to more than ${max} max instances with your current plan.`
|
||||
message: `You can't scale to more than ${max} max instances with your current plan.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -692,7 +727,7 @@ export class InvalidScaleMinMaxRelation extends NowError<
|
||||
super({
|
||||
code: 'INVALID_SCALE_MIN_MAX_RELATION',
|
||||
meta: { url },
|
||||
message: `Min number of instances can't be higher than max.`
|
||||
message: `Min number of instances can't be higher than max.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -705,7 +740,7 @@ export class NotSupportedMinScaleSlots extends NowError<
|
||||
super({
|
||||
code: 'NOT_SUPPORTED_MIN_SCALE_SLOTS',
|
||||
meta: { url },
|
||||
message: `Cloud v2 does not yet support setting a non-zero min scale setting.`
|
||||
message: `Cloud v2 does not yet support setting a non-zero min scale setting.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -718,7 +753,7 @@ export class VerifyScaleTimeout extends NowError<
|
||||
super({
|
||||
code: 'VERIFY_SCALE_TIMEOUT',
|
||||
meta: { timeout },
|
||||
message: `Instance verification timed out (${timeout}ms)`
|
||||
message: `Instance verification timed out (${timeout}ms)`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -731,7 +766,7 @@ export class CantParseJSONFile extends NowError<
|
||||
super({
|
||||
code: 'CANT_PARSE_JSON_FILE',
|
||||
meta: { file },
|
||||
message: `Can't parse json file`
|
||||
message: `Can't parse json file`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -744,7 +779,20 @@ export class CantFindConfig extends NowError<
|
||||
super({
|
||||
code: 'CANT_FIND_CONFIG',
|
||||
meta: { paths },
|
||||
message: `Can't find a configuration file in the given locations.`
|
||||
message: `Can't find a configuration file in the given locations.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class WorkingDirectoryDoesNotExist extends NowError<
|
||||
'CWD_DOES_NOT_EXIST',
|
||||
{}
|
||||
> {
|
||||
constructor() {
|
||||
super({
|
||||
code: 'CWD_DOES_NOT_EXIST',
|
||||
meta: {},
|
||||
message: 'The current working directory does not exist.',
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -754,7 +802,7 @@ export class FileNotFound extends NowError<'FILE_NOT_FOUND', { file: string }> {
|
||||
super({
|
||||
code: 'FILE_NOT_FOUND',
|
||||
meta: { file },
|
||||
message: `Can't find a file in provided location '${file}'.`
|
||||
message: `Can't find a file in provided location '${file}'.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -767,7 +815,7 @@ export class RulesFileValidationError extends NowError<
|
||||
super({
|
||||
code: 'PATH_ALIAS_VALIDATION_ERROR',
|
||||
meta: { location, message },
|
||||
message: `The provided rules format in file for path alias are invalid`
|
||||
message: `The provided rules format in file for path alias are invalid`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -777,7 +825,7 @@ export class NoAliasInConfig extends NowError<'NO_ALIAS_IN_CONFIG', {}> {
|
||||
super({
|
||||
code: 'NO_ALIAS_IN_CONFIG',
|
||||
meta: {},
|
||||
message: `There is no alias set up in config file.`
|
||||
message: `There is no alias set up in config file.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -790,7 +838,7 @@ export class InvalidAliasInConfig extends NowError<
|
||||
super({
|
||||
code: 'INVALID_ALIAS_IN_CONFIG',
|
||||
meta: { value },
|
||||
message: `Invalid alias option in configuration.`
|
||||
message: `Invalid alias option in configuration.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -803,7 +851,7 @@ export class RuleValidationFailed extends NowError<
|
||||
super({
|
||||
code: 'RULE_VALIDATION_FAILED',
|
||||
meta: { message },
|
||||
message: `The server validation for rules failed`
|
||||
message: `The server validation for rules failed`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -816,7 +864,7 @@ export class InvalidMinForScale extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MIN_FOR_SCALE',
|
||||
meta: { value },
|
||||
message: `Invalid <min> parameter "${value}". A number or "auto" were expected`
|
||||
message: `Invalid <min> parameter "${value}". A number or "auto" were expected`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -829,7 +877,7 @@ export class InvalidArgsForMinMaxScale extends NowError<
|
||||
super({
|
||||
code: 'INVALID_ARGS_FOR_MIN_MAX_SCALE',
|
||||
meta: { min },
|
||||
message: `Invalid number of arguments: expected <min> ("${min}") and [max]`
|
||||
message: `Invalid number of arguments: expected <min> ("${min}") and [max]`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -842,7 +890,7 @@ export class InvalidMaxForScale extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MAX_FOR_SCALE',
|
||||
meta: { value },
|
||||
message: `Invalid <max> parameter "${value}". A number or "auto" were expected`
|
||||
message: `Invalid <max> parameter "${value}". A number or "auto" were expected`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -852,7 +900,7 @@ export class InvalidCert extends NowError<'INVALID_CERT', {}> {
|
||||
super({
|
||||
code: 'INVALID_CERT',
|
||||
meta: {},
|
||||
message: `The provided custom certificate is invalid and couldn't be added`
|
||||
message: `The provided custom certificate is invalid and couldn't be added`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -865,7 +913,7 @@ export class DNSPermissionDenied extends NowError<
|
||||
super({
|
||||
code: 'DNS_PERMISSION_DENIED',
|
||||
meta: { domain },
|
||||
message: `You don't have access to the DNS records of ${domain}.`
|
||||
message: `You don't have access to the DNS records of ${domain}.`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -875,7 +923,7 @@ export class DNSInvalidPort extends NowError<'DNS_INVALID_PORT', {}> {
|
||||
super({
|
||||
code: 'DNS_INVALID_PORT',
|
||||
meta: {},
|
||||
message: `Invalid <port> parameter. A number was expected`
|
||||
message: `Invalid <port> parameter. A number was expected`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -888,7 +936,7 @@ export class DNSInvalidType extends NowError<
|
||||
super({
|
||||
code: 'DNS_INVALID_TYPE',
|
||||
meta: { type },
|
||||
message: `Invalid <type> parameter "${type}". Expected one of A, AAAA, ALIAS, CAA, CNAME, MX, SRV, TXT`
|
||||
message: `Invalid <type> parameter "${type}". Expected one of A, AAAA, ALIAS, CAA, CNAME, MX, SRV, TXT`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -901,7 +949,7 @@ export class DNSConflictingRecord extends NowError<
|
||||
super({
|
||||
code: 'DNS_CONFLICTING_RECORD',
|
||||
meta: { record },
|
||||
message: ` A conflicting record exists "${record}".`
|
||||
message: ` A conflicting record exists "${record}".`,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -924,7 +972,7 @@ export class DomainRemovalConflict extends NowError<
|
||||
pendingAsyncPurchase,
|
||||
resolvable,
|
||||
suffix,
|
||||
transferring
|
||||
transferring,
|
||||
}: {
|
||||
aliases: string[];
|
||||
certs: string[];
|
||||
@@ -942,9 +990,9 @@ export class DomainRemovalConflict extends NowError<
|
||||
pendingAsyncPurchase,
|
||||
suffix,
|
||||
transferring,
|
||||
resolvable
|
||||
resolvable,
|
||||
},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -957,7 +1005,7 @@ export class DomainMoveConflict extends NowError<
|
||||
message,
|
||||
pendingAsyncPurchase,
|
||||
resolvable,
|
||||
suffix
|
||||
suffix,
|
||||
}: {
|
||||
message: string;
|
||||
pendingAsyncPurchase: boolean;
|
||||
@@ -969,9 +1017,9 @@ export class DomainMoveConflict extends NowError<
|
||||
meta: {
|
||||
pendingAsyncPurchase,
|
||||
resolvable,
|
||||
suffix
|
||||
suffix,
|
||||
},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -981,17 +1029,23 @@ export class InvalidEmail extends NowError<'INVALID_EMAIL', { email: string }> {
|
||||
super({
|
||||
code: 'INVALID_EMAIL',
|
||||
message,
|
||||
meta: { email }
|
||||
meta: { email },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class AccountNotFound extends NowError<'ACCOUNT_NOT_FOUND', { email: string }> {
|
||||
constructor(email: string, message: string = `Please sign up: https://zeit.co/signup`) {
|
||||
export class AccountNotFound extends NowError<
|
||||
'ACCOUNT_NOT_FOUND',
|
||||
{ email: string }
|
||||
> {
|
||||
constructor(
|
||||
email: string,
|
||||
message: string = `Please sign up: https://zeit.co/signup`
|
||||
) {
|
||||
super({
|
||||
code: 'ACCOUNT_NOT_FOUND',
|
||||
message,
|
||||
meta: { email }
|
||||
meta: { email },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1004,7 +1058,7 @@ export class InvalidMoveDestination extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MOVE_DESTINATION',
|
||||
message: `Invalid move destination "${destination}"`,
|
||||
meta: { destination }
|
||||
meta: { destination },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1017,7 +1071,7 @@ export class InvalidMoveToken extends NowError<
|
||||
super({
|
||||
code: 'INVALID_MOVE_TOKEN',
|
||||
message: `Invalid move token "${token}"`,
|
||||
meta: { token }
|
||||
meta: { token },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1027,20 +1081,7 @@ export class NoBuilderCacheError extends NowError<'NO_BUILDER_CACHE', {}> {
|
||||
super({
|
||||
code: 'NO_BUILDER_CACHE',
|
||||
message: 'Could not find cache directory for now-builders.',
|
||||
meta: {}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class BuilderCacheCleanError extends NowError<
|
||||
'BUILDER_CACHE_CLEAN_FAILED',
|
||||
{ path: string }
|
||||
> {
|
||||
constructor(path: string, message: string) {
|
||||
super({
|
||||
code: 'BUILDER_CACHE_CLEAN_FAILED',
|
||||
message: `Error cleaning builder cache: ${message}`,
|
||||
meta: { path }
|
||||
meta: {},
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1057,7 +1098,7 @@ export class LambdaSizeExceededError extends NowError<
|
||||
).toLowerCase()}) exceeds the maximum size limit (${bytes(
|
||||
maxLambdaSize
|
||||
).toLowerCase()}). Learn more: https://zeit.co/docs/v2/deployments/concepts/lambdas/#maximum-bundle-size`,
|
||||
meta: { size, maxLambdaSize }
|
||||
meta: { size, maxLambdaSize },
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1076,7 +1117,7 @@ export class MissingDotenvVarsError extends NowError<
|
||||
} else {
|
||||
message = [
|
||||
`The following env vars are not defined in ${code(type)} file:`,
|
||||
...missing.map(name => ` - ${JSON.stringify(name)}`)
|
||||
...missing.map(name => ` - ${JSON.stringify(name)}`),
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
@@ -1085,17 +1126,20 @@ export class MissingDotenvVarsError extends NowError<
|
||||
super({
|
||||
code: 'MISSING_DOTENV_VARS',
|
||||
message,
|
||||
meta: { type, missing }
|
||||
meta: { type, missing },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class DeploymentsRateLimited extends NowError<'DEPLOYMENTS_RATE_LIMITED', {}> {
|
||||
export class DeploymentsRateLimited extends NowError<
|
||||
'DEPLOYMENTS_RATE_LIMITED',
|
||||
{}
|
||||
> {
|
||||
constructor(message: string) {
|
||||
super({
|
||||
code: 'DEPLOYMENTS_RATE_LIMITED',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1105,7 +1149,7 @@ export class BuildsRateLimited extends NowError<'BUILDS_RATE_LIMITED', {}> {
|
||||
super({
|
||||
code: 'BUILDS_RATE_LIMITED',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1115,47 +1159,66 @@ export class ProjectNotFound extends NowError<'PROJECT_NOT_FOUND', {}> {
|
||||
super({
|
||||
code: 'PROJECT_NOT_FOUND',
|
||||
meta: {},
|
||||
message: `There is no project for "${nameOrId}"`
|
||||
message: `There is no project for "${nameOrId}"`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class AliasDomainConfigured extends NowError<'DOMAIN_CONFIGURED', {}> {
|
||||
constructor({ message }: { message: string; }) {
|
||||
constructor({ message }: { message: string }) {
|
||||
super({
|
||||
code: 'DOMAIN_CONFIGURED',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class MissingBuildScript extends NowError<'MISSING_BUILD_SCRIPT', {}> {
|
||||
constructor({ message }: { message: string; }) {
|
||||
constructor({ message }: { message: string }) {
|
||||
super({
|
||||
code: 'MISSING_BUILD_SCRIPT',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class ConflictingFilePath extends NowError<'CONFLICTING_FILE_PATH', {}> {
|
||||
constructor({ message }: { message: string; }) {
|
||||
constructor({ message }: { message: string }) {
|
||||
super({
|
||||
code: 'CONFLICTING_FILE_PATH',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class ConflictingPathSegment extends NowError<'CONFLICTING_PATH_SEGMENT', {}> {
|
||||
constructor({ message }: { message: string; }) {
|
||||
export class ConflictingPathSegment extends NowError<
|
||||
'CONFLICTING_PATH_SEGMENT',
|
||||
{}
|
||||
> {
|
||||
constructor({ message }: { message: string }) {
|
||||
super({
|
||||
code: 'CONFLICTING_PATH_SEGMENT',
|
||||
meta: {},
|
||||
message
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export class BuildError extends NowError<'BUILD_ERROR', {}> {
|
||||
constructor({
|
||||
message,
|
||||
meta,
|
||||
}: {
|
||||
message: string;
|
||||
meta: { entrypoint: string };
|
||||
}) {
|
||||
super({
|
||||
code: 'BUILD_ERROR',
|
||||
meta,
|
||||
message,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
import path from 'path';
|
||||
import { CantParseJSONFile, CantFindConfig } from './errors-ts';
|
||||
import {
|
||||
CantParseJSONFile,
|
||||
CantFindConfig,
|
||||
WorkingDirectoryDoesNotExist,
|
||||
} from './errors-ts';
|
||||
import humanizePath from './humanize-path';
|
||||
import readJSONFile from './read-json-file';
|
||||
import readPackage from './read-package';
|
||||
@@ -8,14 +12,25 @@ import { Output } from './output';
|
||||
|
||||
let config: Config;
|
||||
|
||||
export default async function getConfig(output: Output, configFile?: string) {
|
||||
const localPath = process.cwd();
|
||||
|
||||
export default async function getConfig(
|
||||
output: Output,
|
||||
configFile?: string
|
||||
): Promise<Config | Error> {
|
||||
// If config was already read, just return it
|
||||
if (config) {
|
||||
return config;
|
||||
}
|
||||
|
||||
let localPath: string;
|
||||
try {
|
||||
localPath = process.cwd();
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return new WorkingDirectoryDoesNotExist();
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
|
||||
// First try with the config supplied by the user via --local-config
|
||||
if (configFile) {
|
||||
const localFilePath = path.resolve(localPath, configFile);
|
||||
@@ -27,8 +42,7 @@ export default async function getConfig(output: Output, configFile?: string) {
|
||||
return localConfig;
|
||||
}
|
||||
if (localConfig !== null) {
|
||||
const castedConfig = localConfig;
|
||||
config = castedConfig;
|
||||
config = localConfig;
|
||||
return config;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,7 +18,10 @@ export default function handleError(
|
||||
|
||||
if ((<APIError>error).status === 403) {
|
||||
console.error(
|
||||
errorOutput('Authentication error. Run `now login` to log-in again.')
|
||||
errorOutput(
|
||||
error.message ||
|
||||
'Authentication error. Run `now login` to log-in again.'
|
||||
)
|
||||
);
|
||||
} else if ((<APIError>error).status === 429) {
|
||||
// Rate limited: display the message from the server-side,
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { homedir } from 'os';
|
||||
import { resolve as resolvePath, join, basename } from 'path';
|
||||
import { resolve as resolvePath } from 'path';
|
||||
import EventEmitter from 'events';
|
||||
import qs from 'querystring';
|
||||
import { parse as parseUrl } from 'url';
|
||||
@@ -7,24 +7,22 @@ import bytes from 'bytes';
|
||||
import chalk from 'chalk';
|
||||
import retry from 'async-retry';
|
||||
import { parse as parseIni } from 'ini';
|
||||
import { createReadStream } from 'fs';
|
||||
import fs from 'fs-extra';
|
||||
import ms from 'ms';
|
||||
import fetch from 'node-fetch';
|
||||
import { URLSearchParams } from 'url';
|
||||
import {
|
||||
staticFiles as getFiles,
|
||||
npm as getNpmFiles,
|
||||
docker as getDockerFiles
|
||||
docker as getDockerFiles,
|
||||
} from './get-files';
|
||||
import Agent from './agent.ts';
|
||||
import ua from './ua.ts';
|
||||
import hash from './hash';
|
||||
import processDeployment from './deploy/process-deployment.ts';
|
||||
import highlight from './output/highlight';
|
||||
import createOutput from './output';
|
||||
import { responseError } from './error';
|
||||
|
||||
// How many concurrent HTTP/2 stream uploads
|
||||
const MAX_CONCURRENT = 50;
|
||||
import stamp from './output/stamp';
|
||||
import { BuildError } from './errors-ts';
|
||||
|
||||
// Check if running windows
|
||||
const IS_WIN = process.platform.startsWith('win');
|
||||
@@ -39,14 +37,8 @@ export default class Now extends EventEmitter {
|
||||
this._forceNew = forceNew;
|
||||
this._output = createOutput({ debug });
|
||||
this._apiUrl = apiUrl;
|
||||
this._agent = new Agent(apiUrl, { debug });
|
||||
this._onRetry = this._onRetry.bind(this);
|
||||
this.currentTeam = currentTeam;
|
||||
const closeAgent = () => {
|
||||
this._agent.close();
|
||||
process.removeListener('nowExit', closeAgent);
|
||||
};
|
||||
process.on('nowExit', closeAgent);
|
||||
}
|
||||
|
||||
async create(
|
||||
@@ -61,7 +53,6 @@ export default class Now extends EventEmitter {
|
||||
nowConfig = {},
|
||||
hasNowJson = false,
|
||||
sessionAffinity = 'random',
|
||||
isFile = false,
|
||||
atlas = false,
|
||||
|
||||
// Latest
|
||||
@@ -73,361 +64,153 @@ export default class Now extends EventEmitter {
|
||||
quiet = false,
|
||||
env,
|
||||
build,
|
||||
followSymlinks = true,
|
||||
forceNew = false,
|
||||
target = null
|
||||
target = null,
|
||||
deployStamp,
|
||||
}
|
||||
) {
|
||||
const { log, warn, time } = this._output;
|
||||
const opts = { output: this._output, hasNowJson };
|
||||
const { log, warn, debug } = this._output;
|
||||
const isBuilds = type === null;
|
||||
|
||||
let files = [];
|
||||
let hashes = {};
|
||||
const relatives = {};
|
||||
let engines;
|
||||
let deployment;
|
||||
let requestBody = {};
|
||||
|
||||
await time('Getting files', async () => {
|
||||
const opts = { output: this._output, hasNowJson };
|
||||
if (isBuilds) {
|
||||
requestBody = {
|
||||
token: this._token,
|
||||
teamId: this.currentTeam,
|
||||
env,
|
||||
build,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
name,
|
||||
project,
|
||||
meta,
|
||||
regions,
|
||||
force: forceNew,
|
||||
};
|
||||
|
||||
if (type === 'npm') {
|
||||
files = await getNpmFiles(paths[0], pkg, nowConfig, opts);
|
||||
if (target) {
|
||||
requestBody.target = target;
|
||||
}
|
||||
} else if (type === 'npm') {
|
||||
files = await getNpmFiles(paths[0], pkg, nowConfig, opts);
|
||||
|
||||
// A `start` or `now-start` npm script, or a `server.js` file
|
||||
// in the root directory of the deployment are required
|
||||
if (
|
||||
!isBuilds &&
|
||||
!hasNpmStart(pkg) &&
|
||||
!hasFile(paths[0], files, 'server.js')
|
||||
) {
|
||||
const err = new Error(
|
||||
'Missing `start` (or `now-start`) script in `package.json`. ' +
|
||||
'See: https://docs.npmjs.com/cli/start'
|
||||
);
|
||||
throw err;
|
||||
// A `start` or `now-start` npm script, or a `server.js` file
|
||||
// in the root directory of the deployment are required
|
||||
if (
|
||||
!isBuilds &&
|
||||
!hasNpmStart(pkg) &&
|
||||
!hasFile(paths[0], files, 'server.js')
|
||||
) {
|
||||
const err = new Error(
|
||||
'Missing `start` (or `now-start`) script in `package.json`. ' +
|
||||
'See: https://docs.npmjs.com/cli/start'
|
||||
);
|
||||
throw err;
|
||||
}
|
||||
|
||||
engines = nowConfig.engines || pkg.engines;
|
||||
forwardNpm = forwardNpm || nowConfig.forwardNpm;
|
||||
} else if (type === 'static') {
|
||||
if (paths.length === 1) {
|
||||
files = await getFiles(paths[0], nowConfig, opts);
|
||||
} else {
|
||||
if (!files) {
|
||||
files = [];
|
||||
}
|
||||
|
||||
engines = nowConfig.engines || pkg.engines;
|
||||
forwardNpm = forwardNpm || nowConfig.forwardNpm;
|
||||
} else if (type === 'static') {
|
||||
if (isFile) {
|
||||
files = [resolvePath(paths[0])];
|
||||
} else if (paths.length === 1) {
|
||||
files = await getFiles(paths[0], nowConfig, opts);
|
||||
} else {
|
||||
if (!files) {
|
||||
files = [];
|
||||
}
|
||||
for (const path of paths) {
|
||||
const list = await getFiles(path, {}, opts);
|
||||
files = files.concat(list);
|
||||
|
||||
for (const path of paths) {
|
||||
const list = await getFiles(path, {}, opts);
|
||||
files = files.concat(list);
|
||||
|
||||
for (const file of list) {
|
||||
relatives[file] = path;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if (type === 'docker') {
|
||||
files = await getDockerFiles(paths[0], nowConfig, opts);
|
||||
} else if (isBuilds) {
|
||||
opts.isBuilds = isBuilds;
|
||||
|
||||
if (isFile) {
|
||||
files = [resolvePath(paths[0])];
|
||||
} else if (paths.length === 1) {
|
||||
files = await getFiles(paths[0], {}, opts);
|
||||
} else {
|
||||
if (!files) {
|
||||
files = [];
|
||||
}
|
||||
|
||||
for (const path of paths) {
|
||||
const list = await getFiles(path, {}, opts);
|
||||
files = files.concat(list);
|
||||
|
||||
for (const file of list) {
|
||||
relatives[file] = path;
|
||||
}
|
||||
for (const file of list) {
|
||||
relatives[file] = path;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Read `registry.npmjs.org` authToken from .npmrc
|
||||
let authToken;
|
||||
|
||||
if (type === 'npm' && forwardNpm) {
|
||||
authToken =
|
||||
(await readAuthToken(paths[0])) || (await readAuthToken(homedir()));
|
||||
} else if (type === 'docker') {
|
||||
files = await getDockerFiles(paths[0], nowConfig, opts);
|
||||
}
|
||||
|
||||
const hashes = await time('Computing hashes', () => {
|
||||
const pkgDetails = Object.assign({ name }, pkg);
|
||||
return hash(files, pkgDetails);
|
||||
});
|
||||
const uploadStamp = stamp();
|
||||
|
||||
this._files = hashes;
|
||||
if (isBuilds) {
|
||||
deployment = await processDeployment({
|
||||
now: this,
|
||||
output: this._output,
|
||||
hashes,
|
||||
paths,
|
||||
requestBody,
|
||||
uploadStamp,
|
||||
deployStamp,
|
||||
quiet,
|
||||
nowConfig,
|
||||
});
|
||||
} else {
|
||||
// Read `registry.npmjs.org` authToken from .npmrc
|
||||
let authToken;
|
||||
|
||||
const deployment = await this.retry(async bail => {
|
||||
// Flatten the array to contain files to sync where each nested input
|
||||
// array has a group of files with the same sha but different path
|
||||
const files = await time(
|
||||
'Get files ready for deployment',
|
||||
Promise.all(
|
||||
Array.prototype.concat.apply(
|
||||
[],
|
||||
await Promise.all(
|
||||
Array.from(this._files).map(async ([sha, { data, names }]) => {
|
||||
const statFn = followSymlinks ? fs.stat : fs.lstat;
|
||||
|
||||
return names.map(async name => {
|
||||
const getMode = async () => {
|
||||
const st = await statFn(name);
|
||||
return st.mode;
|
||||
};
|
||||
|
||||
const mode = await getMode();
|
||||
const multipleStatic = Object.keys(relatives).length !== 0;
|
||||
|
||||
let file;
|
||||
|
||||
if (isFile) {
|
||||
file = basename(paths[0]);
|
||||
} else if (multipleStatic) {
|
||||
file = toRelative(name, join(relatives[name], '..'));
|
||||
} else {
|
||||
file = toRelative(name, paths[0]);
|
||||
}
|
||||
|
||||
return {
|
||||
sha,
|
||||
size: data.length,
|
||||
file,
|
||||
mode
|
||||
};
|
||||
});
|
||||
})
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
// This is a useful warning because it prevents people
|
||||
// from getting confused about a deployment that renders 404.
|
||||
if (
|
||||
files.length === 0 ||
|
||||
files.every(item => item.file.startsWith('.'))
|
||||
) {
|
||||
warn(
|
||||
'There are no files (or only files starting with a dot) inside your deployment.'
|
||||
);
|
||||
if (type === 'npm' && forwardNpm) {
|
||||
authToken =
|
||||
(await readAuthToken(paths[0])) || (await readAuthToken(homedir()));
|
||||
}
|
||||
|
||||
const queryProps = {};
|
||||
const requestBody = isBuilds
|
||||
? {
|
||||
version: 2,
|
||||
env,
|
||||
build,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
name,
|
||||
project,
|
||||
files,
|
||||
meta,
|
||||
regions
|
||||
}
|
||||
: {
|
||||
env,
|
||||
build,
|
||||
meta,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
forceNew,
|
||||
name,
|
||||
project,
|
||||
description,
|
||||
deploymentType: type,
|
||||
registryAuthToken: authToken,
|
||||
files,
|
||||
engines,
|
||||
scale,
|
||||
sessionAffinity,
|
||||
limits: nowConfig.limits,
|
||||
atlas
|
||||
};
|
||||
requestBody = {
|
||||
token: this._token,
|
||||
teamId: this.currentTeam,
|
||||
env,
|
||||
build,
|
||||
meta,
|
||||
public: wantsPublic || nowConfig.public,
|
||||
forceNew,
|
||||
name,
|
||||
project,
|
||||
description,
|
||||
deploymentType: type,
|
||||
registryAuthToken: authToken,
|
||||
engines,
|
||||
scale,
|
||||
sessionAffinity,
|
||||
limits: nowConfig.limits,
|
||||
atlas,
|
||||
config: nowConfig,
|
||||
functions: nowConfig.functions,
|
||||
};
|
||||
|
||||
if (Object.keys(nowConfig).length > 0) {
|
||||
if (isBuilds) {
|
||||
// These properties are only used inside Now CLI and
|
||||
// are not supported on the API.
|
||||
const exclude = ['github', 'scope'];
|
||||
|
||||
// Request properties that are made of a combination of
|
||||
// command flags and config properties were already set
|
||||
// earlier. Here, we are setting request properties that
|
||||
// are purely made of their equally-named config property.
|
||||
for (const key of Object.keys(nowConfig)) {
|
||||
const value = nowConfig[key];
|
||||
|
||||
if (!requestBody[key] && !exclude.includes(key)) {
|
||||
requestBody[key] = value;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
requestBody.config = nowConfig;
|
||||
}
|
||||
}
|
||||
|
||||
if (isBuilds) {
|
||||
if (forceNew) {
|
||||
queryProps.forceNew = 1;
|
||||
}
|
||||
|
||||
if (target) {
|
||||
requestBody.target = target;
|
||||
}
|
||||
|
||||
if (isFile) {
|
||||
requestBody.routes = [
|
||||
{
|
||||
src: '/',
|
||||
dest: `/${files[0].file}`
|
||||
}
|
||||
];
|
||||
}
|
||||
}
|
||||
|
||||
const query = qs.stringify(queryProps);
|
||||
const version = isBuilds ? 'v9' : 'v4';
|
||||
|
||||
const res = await this._fetch(
|
||||
`/${version}/now/deployments${query ? `?${query}` : ''}`,
|
||||
{
|
||||
method: 'POST',
|
||||
body: requestBody
|
||||
}
|
||||
);
|
||||
|
||||
// No retry on 4xx
|
||||
let body;
|
||||
|
||||
try {
|
||||
body = await res.json();
|
||||
} catch (err) {
|
||||
throw new Error(
|
||||
`Unexpected response error: ${err.message} (${
|
||||
res.status
|
||||
} status code)`
|
||||
);
|
||||
}
|
||||
|
||||
if (res.status === 429) {
|
||||
if (body.error && body.error.code === 'builds_rate_limited') {
|
||||
const err = new Error(body.error.message);
|
||||
err.status = res.status;
|
||||
err.retryAfter = 'never';
|
||||
err.code = body.error.code;
|
||||
|
||||
return bail(err);
|
||||
}
|
||||
|
||||
let msg = 'You have been creating deployments at a very fast pace. ';
|
||||
|
||||
if (body.error && body.error.limit && body.error.limit.reset) {
|
||||
const { reset } = body.error.limit;
|
||||
const difference = reset * 1000 - Date.now();
|
||||
|
||||
msg += `Please retry in ${ms(difference, { long: true })}.`;
|
||||
} else {
|
||||
msg += 'Please slow down.';
|
||||
}
|
||||
|
||||
const err = new Error(msg);
|
||||
|
||||
err.status = res.status;
|
||||
err.retryAfter = 'never';
|
||||
|
||||
return bail(err);
|
||||
}
|
||||
|
||||
// If the deployment domain is missing a cert, bail with the error
|
||||
if (
|
||||
res.status === 400 &&
|
||||
body.error &&
|
||||
body.error.code === 'cert_missing'
|
||||
) {
|
||||
bail(await responseError(res, null, body));
|
||||
}
|
||||
|
||||
if (
|
||||
res.status === 400 &&
|
||||
body.error &&
|
||||
body.error.code === 'missing_files'
|
||||
) {
|
||||
return body;
|
||||
}
|
||||
|
||||
if (res.status === 404 && body.error && body.error.code === 'not_found') {
|
||||
return body;
|
||||
}
|
||||
|
||||
if (res.status >= 400 && res.status < 500) {
|
||||
const err = new Error();
|
||||
|
||||
if (body.error) {
|
||||
const { code, unreferencedBuildSpecs } = body.error;
|
||||
|
||||
if (code === 'env_value_invalid_type') {
|
||||
const { key } = body.error;
|
||||
err.message =
|
||||
`The env key ${key} has an invalid type: ${typeof env[key]}. ` +
|
||||
'Please supply a String or a Number (https://err.sh/now/env-value-invalid-type)';
|
||||
} else if (code === 'unreferenced_build_specifications') {
|
||||
const count = unreferencedBuildSpecs.length;
|
||||
const prefix = count === 1 ? 'build' : 'builds';
|
||||
|
||||
err.message =
|
||||
`You defined ${count} ${prefix} that did not match any source files (please ensure they are NOT defined in ${highlight(
|
||||
'.nowignore'
|
||||
)}):` +
|
||||
`\n- ${unreferencedBuildSpecs
|
||||
.map(item => JSON.stringify(item))
|
||||
.join('\n- ')}`;
|
||||
} else {
|
||||
Object.assign(err, body.error);
|
||||
}
|
||||
} else {
|
||||
err.message = 'Not able to create deployment';
|
||||
}
|
||||
|
||||
return bail(err);
|
||||
}
|
||||
|
||||
if (res.status !== 200) {
|
||||
throw new Error(body.error.message);
|
||||
}
|
||||
|
||||
for (const [name, value] of res.headers.entries()) {
|
||||
if (name.startsWith('x-now-warning-')) {
|
||||
this._output.warn(value);
|
||||
}
|
||||
}
|
||||
|
||||
return body;
|
||||
});
|
||||
deployment = await processDeployment({
|
||||
legacy: true,
|
||||
now: this,
|
||||
output: this._output,
|
||||
hashes,
|
||||
paths,
|
||||
requestBody,
|
||||
uploadStamp,
|
||||
deployStamp,
|
||||
quiet,
|
||||
env,
|
||||
nowConfig,
|
||||
});
|
||||
}
|
||||
|
||||
// We report about files whose sizes are too big
|
||||
let missingVersion = false;
|
||||
|
||||
if (deployment.warnings) {
|
||||
if (deployment && deployment.warnings) {
|
||||
let sizeExceeded = 0;
|
||||
|
||||
deployment.warnings.forEach(warning => {
|
||||
if (warning.reason === 'size_limit_exceeded') {
|
||||
const { sha, limit } = warning;
|
||||
const n = hashes.get(sha).names.pop();
|
||||
const n = hashes[sha].names.pop();
|
||||
|
||||
warn(`Skipping file ${n} (size exceeded ${bytes(limit)}`);
|
||||
|
||||
hashes.get(sha).names.unshift(n); // Move name (hack, if duplicate matches we report them in order)
|
||||
hashes[sha].names.unshift(n); // Move name (hack, if duplicate matches we report them in order)
|
||||
sizeExceeded++;
|
||||
} else if (warning.reason === 'node_version_not_found') {
|
||||
warn(`Requested node version ${warning.wanted} is not available`);
|
||||
@@ -445,19 +228,10 @@ export default class Now extends EventEmitter {
|
||||
}
|
||||
}
|
||||
|
||||
if (deployment.error && deployment.error.code === 'missing_files') {
|
||||
this._missing = deployment.error.missing || [];
|
||||
this._fileCount = files.length;
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!isBuilds && !quiet && type === 'npm' && deployment.nodeVersion) {
|
||||
if (engines && engines.node && !missingVersion) {
|
||||
log(
|
||||
chalk`Using Node.js {bold ${
|
||||
deployment.nodeVersion
|
||||
}} (requested: {dim \`${engines.node}\`})`
|
||||
chalk`Using Node.js {bold ${deployment.nodeVersion}} (requested: {dim \`${engines.node}\`})`
|
||||
);
|
||||
} else {
|
||||
log(chalk`Using Node.js {bold ${deployment.nodeVersion}} (default)`);
|
||||
@@ -472,81 +246,90 @@ export default class Now extends EventEmitter {
|
||||
return deployment;
|
||||
}
|
||||
|
||||
upload({ atlas = false, scale = {} } = {}) {
|
||||
const { debug, time } = this._output;
|
||||
debug(`Will upload ${this._missing.length} files`);
|
||||
async handleDeploymentError(error, { hashes, env }) {
|
||||
if (error.status === 429) {
|
||||
if (error.code === 'builds_rate_limited') {
|
||||
const err = new Error(error.message);
|
||||
err.status = error.status;
|
||||
err.retryAfter = 'never';
|
||||
err.code = error.code;
|
||||
|
||||
this._agent.setConcurrency({
|
||||
maxStreams: MAX_CONCURRENT,
|
||||
capacity: this._missing.length
|
||||
});
|
||||
return err;
|
||||
}
|
||||
|
||||
time(
|
||||
'Uploading files',
|
||||
Promise.all(
|
||||
this._missing.map(sha =>
|
||||
retry(
|
||||
async bail => {
|
||||
const file = this._files.get(sha);
|
||||
const fPath = file.names[0];
|
||||
const stream = createReadStream(fPath);
|
||||
const { data } = file;
|
||||
let msg = 'You have been creating deployments at a very fast pace. ';
|
||||
|
||||
const fstreamPush = stream.push;
|
||||
if (error.limit && error.limit.reset) {
|
||||
const { reset } = error.limit;
|
||||
const difference = reset * 1000 - Date.now();
|
||||
|
||||
let uploadedSoFar = 0;
|
||||
stream.push = chunk => {
|
||||
// If we're about to push the last chunk, then don't do it here
|
||||
// But instead, we'll "hang" the progress bar and do it on 200
|
||||
if (chunk && uploadedSoFar + chunk.length < data.length) {
|
||||
this.emit('uploadProgress', chunk.length);
|
||||
uploadedSoFar += chunk.length;
|
||||
}
|
||||
return fstreamPush.call(stream, chunk);
|
||||
};
|
||||
msg += `Please retry in ${ms(difference, { long: true })}.`;
|
||||
} else {
|
||||
msg += 'Please slow down.';
|
||||
}
|
||||
|
||||
const url = atlas ? '/v1/now/images' : '/v2/now/files';
|
||||
const additionalHeaders = atlas
|
||||
? {
|
||||
'x-now-dcs': Object.keys(scale).join(',')
|
||||
}
|
||||
: {};
|
||||
const res = await this._fetch(url, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/octet-stream',
|
||||
'x-now-digest': sha,
|
||||
'x-now-size': data.length,
|
||||
...additionalHeaders
|
||||
},
|
||||
body: stream
|
||||
});
|
||||
const err = new Error(msg);
|
||||
|
||||
if (res.status === 200) {
|
||||
// What we want
|
||||
this.emit('uploadProgress', file.data.length - uploadedSoFar);
|
||||
this.emit('upload', file);
|
||||
} else if (res.status > 200 && res.status < 500) {
|
||||
// If something is wrong with our request, we don't retry
|
||||
return bail(await responseError(res, `Failed to upload file with status: ${res.status}`));
|
||||
} else {
|
||||
// If something is wrong with the server, we retry
|
||||
throw await responseError(res, 'Failed to upload file');
|
||||
}
|
||||
},
|
||||
{
|
||||
retries: 3,
|
||||
randomize: true,
|
||||
onRetry: this._onRetry
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
.then(() => {
|
||||
this.emit('complete');
|
||||
})
|
||||
.catch(err => this.emit('error', err));
|
||||
err.status = error.status;
|
||||
err.retryAfter = 'never';
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
// If the deployment domain is missing a cert, bail with the error
|
||||
if (error.status === 400 && error.code === 'cert_missing') {
|
||||
return responseError(error, null, error);
|
||||
}
|
||||
|
||||
if (error.status === 400 && error.code === 'missing_files') {
|
||||
this._missing = error.missing || [];
|
||||
this._fileCount = hashes.length;
|
||||
|
||||
return error;
|
||||
}
|
||||
|
||||
if (error.status === 404 && error.code === 'not_found') {
|
||||
return error;
|
||||
}
|
||||
|
||||
if (error.status >= 400 && error.status < 500) {
|
||||
const err = new Error();
|
||||
|
||||
const { code, unreferencedBuildSpecs } = error;
|
||||
|
||||
if (code === 'env_value_invalid_type') {
|
||||
const { key } = error;
|
||||
err.message =
|
||||
`The env key ${key} has an invalid type: ${typeof env[key]}. ` +
|
||||
'Please supply a String or a Number (https://err.sh/now-cli/env-value-invalid-type)';
|
||||
} else if (code === 'unreferenced_build_specifications') {
|
||||
const count = unreferencedBuildSpecs.length;
|
||||
const prefix = count === 1 ? 'build' : 'builds';
|
||||
|
||||
err.message =
|
||||
`You defined ${count} ${prefix} that did not match any source files (please ensure they are NOT defined in ${highlight(
|
||||
'.nowignore'
|
||||
)}):` +
|
||||
`\n- ${unreferencedBuildSpecs
|
||||
.map(item => JSON.stringify(item))
|
||||
.join('\n- ')}`;
|
||||
} else {
|
||||
Object.assign(err, error);
|
||||
}
|
||||
|
||||
return err;
|
||||
}
|
||||
|
||||
// Handle build errors
|
||||
if (error.id && error.id.startsWith('bld_')) {
|
||||
return new BuildError({
|
||||
meta: {
|
||||
entrypoint: error.entrypoint,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return new Error(error.message);
|
||||
}
|
||||
|
||||
async listSecrets() {
|
||||
@@ -589,7 +372,7 @@ export default class Now extends EventEmitter {
|
||||
{
|
||||
retries: 3,
|
||||
minTimeout: 2500,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
}
|
||||
);
|
||||
};
|
||||
@@ -597,7 +380,7 @@ export default class Now extends EventEmitter {
|
||||
if (!app && !Object.keys(meta).length) {
|
||||
// Get the 35 latest projects and their latest deployment
|
||||
const query = new URLSearchParams({ limit: 35 });
|
||||
const projects = await fetchRetry(`/projects/list?${query}`);
|
||||
const projects = await fetchRetry(`/v2/projects/?${query}`);
|
||||
|
||||
const deployments = await Promise.all(
|
||||
projects.map(async ({ id: projectId }) => {
|
||||
@@ -647,7 +430,7 @@ export default class Now extends EventEmitter {
|
||||
{
|
||||
retries: 3,
|
||||
minTimeout: 2500,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
}
|
||||
);
|
||||
|
||||
@@ -697,7 +480,7 @@ export default class Now extends EventEmitter {
|
||||
}
|
||||
|
||||
const url = `/${
|
||||
isBuilds ? 'v9' : 'v5'
|
||||
isBuilds ? 'v11' : 'v5'
|
||||
}/now/deployments/${encodeURIComponent(id)}`;
|
||||
|
||||
return this.retry(
|
||||
@@ -727,7 +510,7 @@ export default class Now extends EventEmitter {
|
||||
|
||||
await this.retry(async bail => {
|
||||
const res = await this._fetch(url, {
|
||||
method: 'DELETE'
|
||||
method: 'DELETE',
|
||||
});
|
||||
|
||||
if (res.status === 200) {
|
||||
@@ -748,7 +531,7 @@ export default class Now extends EventEmitter {
|
||||
return retry(fn, {
|
||||
retries,
|
||||
maxTimeout,
|
||||
onRetry: this._onRetry
|
||||
onRetry: this._onRetry,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -756,9 +539,7 @@ export default class Now extends EventEmitter {
|
||||
this._output.debug(`Retrying: ${err}\n${err.stack}`);
|
||||
}
|
||||
|
||||
close() {
|
||||
this._agent.close();
|
||||
}
|
||||
close() {}
|
||||
|
||||
get id() {
|
||||
return this._id;
|
||||
@@ -802,14 +583,21 @@ export default class Now extends EventEmitter {
|
||||
|
||||
opts.headers = opts.headers || {};
|
||||
opts.headers.accept = 'application/json';
|
||||
opts.headers.authorization = `Bearer ${this._token}`;
|
||||
opts.headers.Authorization = `Bearer ${this._token}`;
|
||||
opts.headers['user-agent'] = ua;
|
||||
|
||||
if (
|
||||
opts.body &&
|
||||
typeof opts.body === 'object' &&
|
||||
opts.body.constructor === Object
|
||||
) {
|
||||
opts.body = JSON.stringify(opts.body);
|
||||
opts.headers['Content-Type'] = 'application/json';
|
||||
}
|
||||
|
||||
return this._output.time(
|
||||
`${opts.method || 'GET'} ${this._apiUrl}${_url} ${JSON.stringify(
|
||||
opts.body
|
||||
) || ''}`,
|
||||
this._agent.fetch(_url, opts)
|
||||
`${opts.method || 'GET'} ${this._apiUrl}${_url} ${opts.body || ''}`,
|
||||
fetch(`${this._apiUrl}${_url}`, opts)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -827,8 +615,8 @@ export default class Now extends EventEmitter {
|
||||
opts = Object.assign({}, opts, {
|
||||
body: JSON.stringify(opts.body),
|
||||
headers: Object.assign({}, opts.headers, {
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
'Content-Type': 'application/json',
|
||||
}),
|
||||
});
|
||||
}
|
||||
const res = await this._fetch(url, opts);
|
||||
@@ -875,6 +663,7 @@ function hasNpmStart(pkg) {
|
||||
|
||||
function hasFile(base, files, name) {
|
||||
const relative = files.map(file => toRelative(file, base));
|
||||
console.log(731, relative);
|
||||
return relative.indexOf(name) !== -1;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { join } from 'path';
|
||||
import { exists } from 'fs-extra';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
|
||||
import Client from './client';
|
||||
import { Config } from '../types';
|
||||
import { Package } from './dev/types';
|
||||
import { CantParseJSONFile, ProjectNotFound } from './errors-ts';
|
||||
import getProjectByIdOrName from './projects/get-project-by-id-or-name';
|
||||
|
||||
@@ -26,14 +27,14 @@ export default async function preferV2Deployment({
|
||||
hasServerfile,
|
||||
pkg,
|
||||
localConfig,
|
||||
projectName
|
||||
projectName,
|
||||
}: {
|
||||
client?: Client,
|
||||
hasDockerfile: boolean,
|
||||
hasServerfile: boolean,
|
||||
pkg: Package | CantParseJSONFile | null,
|
||||
localConfig: Config | undefined,
|
||||
projectName?: string
|
||||
client?: Client;
|
||||
hasDockerfile: boolean;
|
||||
hasServerfile: boolean;
|
||||
pkg: PackageJson | CantParseJSONFile | null;
|
||||
localConfig: Config | undefined;
|
||||
projectName?: string;
|
||||
}): Promise<null | string> {
|
||||
if (localConfig && localConfig.version) {
|
||||
// We will prefer anything that is set here
|
||||
@@ -52,10 +53,14 @@ export default async function preferV2Deployment({
|
||||
const { scripts = {} } = pkg;
|
||||
|
||||
if (!scripts.start && !scripts['now-start']) {
|
||||
return `Deploying to Now 2.0, because ${highlight('package.json')} is missing a ${cmd('start')} script. ${INFO}`;
|
||||
return `Deploying to Now 2.0, because ${highlight(
|
||||
'package.json'
|
||||
)} is missing a ${cmd('start')} script. ${INFO}`;
|
||||
}
|
||||
} else if (!pkg && !hasDockerfile) {
|
||||
return `Deploying to Now 2.0, because no ${highlight('Dockerfile')} was found. ${INFO}`;
|
||||
return `Deploying to Now 2.0, because no ${highlight(
|
||||
'Dockerfile'
|
||||
)} was found. ${INFO}`;
|
||||
}
|
||||
|
||||
if (client && projectName) {
|
||||
|
||||
@@ -2,10 +2,10 @@ import path from 'path';
|
||||
import { CantParseJSONFile } from './errors-ts';
|
||||
import readJSONFile from './read-json-file';
|
||||
import { Config } from '../types';
|
||||
import { Package } from './dev/types';
|
||||
import { PackageJson } from '@now/build-utils';
|
||||
|
||||
interface CustomPackage extends Package {
|
||||
now?: Config
|
||||
interface CustomPackage extends PackageJson {
|
||||
now?: Config;
|
||||
}
|
||||
|
||||
export default async function readPackage(file?: string) {
|
||||
@@ -16,8 +16,8 @@ export default async function readPackage(file?: string) {
|
||||
return result;
|
||||
}
|
||||
|
||||
if (result){
|
||||
return result as CustomPackage
|
||||
if (result) {
|
||||
return result as CustomPackage;
|
||||
}
|
||||
|
||||
return null;
|
||||
|
||||
@@ -22,7 +22,7 @@ export default async (sentry, error, apiUrl, configFiles) => {
|
||||
if (user) {
|
||||
const spec = {
|
||||
email: user.email,
|
||||
id: user.uid
|
||||
id: user.uid,
|
||||
};
|
||||
|
||||
if (user.username) {
|
||||
@@ -44,7 +44,7 @@ export default async (sentry, error, apiUrl, configFiles) => {
|
||||
scope.setExtra('scopeError', {
|
||||
name: scopeError.name,
|
||||
message: scopeError.message,
|
||||
stack: scopeError.stack
|
||||
stack: scopeError.stack,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -81,7 +81,8 @@ export default async (sentry, error, apiUrl, configFiles) => {
|
||||
// Report information about the version of `node` being used
|
||||
scope.setExtra('node', {
|
||||
execPath: process.execPath,
|
||||
version: process.version
|
||||
version: process.version,
|
||||
platform: process.platform,
|
||||
});
|
||||
|
||||
sentry.captureException(error);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Response } from 'fetch-h2';
|
||||
import { Response } from 'node-fetch';
|
||||
import { APIError } from './errors-ts';
|
||||
|
||||
export default async function responseError(
|
||||
|
||||
1
packages/now-cli/test/dev-builder.unit.js
vendored
1
packages/now-cli/test/dev-builder.unit.js
vendored
@@ -1,5 +1,4 @@
|
||||
import test from 'ava';
|
||||
|
||||
import { filterPackage } from '../src/util/dev/builder-cache';
|
||||
|
||||
test('[dev-builder] filter install "latest", cached canary', async t => {
|
||||
|
||||
68
packages/now-cli/test/dev-router.unit.js
vendored
68
packages/now-cli/test/dev-router.unit.js
vendored
@@ -1,10 +1,9 @@
|
||||
import test from 'ava';
|
||||
|
||||
import devRouter from '../src/util/dev/router';
|
||||
|
||||
test('[dev-router] 301 redirection', async t => {
|
||||
const routesConfig = [
|
||||
{ src: '/redirect', status: 301, headers: { Location: 'https://zeit.co' } }
|
||||
{ src: '/redirect', status: 301, headers: { Location: 'https://zeit.co' } },
|
||||
];
|
||||
const result = await devRouter('/redirect', 'GET', routesConfig);
|
||||
|
||||
@@ -16,7 +15,7 @@ test('[dev-router] 301 redirection', async t => {
|
||||
uri_args: {},
|
||||
matched_route: routesConfig[0],
|
||||
matched_route_idx: 0,
|
||||
userDest: false
|
||||
userDest: false,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -32,7 +31,7 @@ test('[dev-router] captured groups', async t => {
|
||||
uri_args: {},
|
||||
matched_route: routesConfig[0],
|
||||
matched_route_idx: 0,
|
||||
userDest: true
|
||||
userDest: true,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -48,7 +47,7 @@ test('[dev-router] named groups', async t => {
|
||||
uri_args: { id: '123' },
|
||||
matched_route: routesConfig[0],
|
||||
matched_route_idx: 0,
|
||||
userDest: true
|
||||
userDest: true,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -56,8 +55,8 @@ test('[dev-router] optional named groups', async t => {
|
||||
const routesConfig = [
|
||||
{
|
||||
src: '/api/hello(/(?<name>[^/]+))?',
|
||||
dest: '/api/functions/hello/index.js?name=$name'
|
||||
}
|
||||
dest: '/api/functions/hello/index.js?name=$name',
|
||||
},
|
||||
];
|
||||
const result = await devRouter('/api/hello', 'GET', routesConfig);
|
||||
|
||||
@@ -69,7 +68,7 @@ test('[dev-router] optional named groups', async t => {
|
||||
uri_args: { name: '' },
|
||||
matched_route: routesConfig[0],
|
||||
matched_route_idx: 0,
|
||||
userDest: true
|
||||
userDest: true,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -86,14 +85,14 @@ test('[dev-router] proxy_pass', async t => {
|
||||
uri_args: {},
|
||||
matched_route: routesConfig[0],
|
||||
matched_route_idx: 0,
|
||||
userDest: false
|
||||
userDest: false,
|
||||
});
|
||||
});
|
||||
|
||||
test('[dev-router] methods', async t => {
|
||||
const routesConfig = [
|
||||
{ src: '/.*', methods: ['POST'], dest: '/post' },
|
||||
{ src: '/.*', methods: ['GET'], dest: '/get' }
|
||||
{ src: '/.*', methods: ['GET'], dest: '/get' },
|
||||
];
|
||||
|
||||
let result = await devRouter('/', 'GET', routesConfig);
|
||||
@@ -105,7 +104,7 @@ test('[dev-router] methods', async t => {
|
||||
uri_args: {},
|
||||
matched_route: routesConfig[1],
|
||||
matched_route_idx: 1,
|
||||
userDest: true
|
||||
userDest: true,
|
||||
});
|
||||
|
||||
result = await devRouter('/', 'POST', routesConfig);
|
||||
@@ -117,7 +116,7 @@ test('[dev-router] methods', async t => {
|
||||
uri_args: {},
|
||||
matched_route: routesConfig[0],
|
||||
matched_route_idx: 0,
|
||||
userDest: true
|
||||
userDest: true,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -133,7 +132,7 @@ test('[dev-router] match without prefix slash', async t => {
|
||||
uri_args: {},
|
||||
matched_route: routesConfig[0],
|
||||
matched_route_idx: 0,
|
||||
userDest: true
|
||||
userDest: true,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -141,8 +140,8 @@ test('[dev-router] match with needed prefixed slash', async t => {
|
||||
const routesConfig = [
|
||||
{
|
||||
src: '^\\/([^\\/]+?)\\/comments(?:\\/)?$',
|
||||
dest: '/some/dest'
|
||||
}
|
||||
dest: '/some/dest',
|
||||
},
|
||||
];
|
||||
const result = await devRouter('/post-1/comments', 'GET', routesConfig);
|
||||
|
||||
@@ -155,9 +154,9 @@ test('[dev-router] match with needed prefixed slash', async t => {
|
||||
uri_args: {},
|
||||
matched_route: {
|
||||
src: '^\\/([^\\/]+?)\\/comments(?:\\/)?$',
|
||||
dest: '/some/dest'
|
||||
dest: '/some/dest',
|
||||
},
|
||||
matched_route_idx: 0
|
||||
matched_route_idx: 0,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -167,9 +166,9 @@ test('[dev-router] `continue: true` with fallthrough', async t => {
|
||||
src: '/_next/static/(?:[^/]+/pages|chunks|runtime)/.+',
|
||||
continue: true,
|
||||
headers: {
|
||||
'cache-control': 'immutable,max-age=31536000'
|
||||
}
|
||||
}
|
||||
'cache-control': 'immutable,max-age=31536000',
|
||||
},
|
||||
},
|
||||
];
|
||||
const result = await devRouter(
|
||||
'/_next/static/chunks/0.js',
|
||||
@@ -182,8 +181,8 @@ test('[dev-router] `continue: true` with fallthrough', async t => {
|
||||
dest: '/_next/static/chunks/0.js',
|
||||
uri_args: {},
|
||||
headers: {
|
||||
'cache-control': 'immutable,max-age=31536000'
|
||||
}
|
||||
'cache-control': 'immutable,max-age=31536000',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
@@ -193,13 +192,13 @@ test('[dev-router] `continue: true` with match', async t => {
|
||||
src: '/_next/static/(?:[^/]+/pages|chunks|runtime)/.+',
|
||||
continue: true,
|
||||
headers: {
|
||||
'cache-control': 'immutable,max-age=31536000'
|
||||
}
|
||||
'cache-control': 'immutable,max-age=31536000',
|
||||
},
|
||||
},
|
||||
{
|
||||
src: '/(.*)',
|
||||
dest: '/hi'
|
||||
}
|
||||
dest: '/hi',
|
||||
},
|
||||
];
|
||||
const result = await devRouter(
|
||||
'/_next/static/chunks/0.js',
|
||||
@@ -214,13 +213,13 @@ test('[dev-router] `continue: true` with match', async t => {
|
||||
userDest: true,
|
||||
uri_args: {},
|
||||
headers: {
|
||||
'cache-control': 'immutable,max-age=31536000'
|
||||
'cache-control': 'immutable,max-age=31536000',
|
||||
},
|
||||
matched_route: {
|
||||
src: '/(.*)',
|
||||
dest: '/hi'
|
||||
dest: '/hi',
|
||||
},
|
||||
matched_route_idx: 1
|
||||
matched_route_idx: 1,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -236,7 +235,7 @@ test('[dev-router] match with catch-all with prefix slash', async t => {
|
||||
headers: {},
|
||||
uri_args: {},
|
||||
matched_route: { src: '/(.*)', dest: '/www/$1' },
|
||||
matched_route_idx: 0
|
||||
matched_route_idx: 0,
|
||||
});
|
||||
});
|
||||
|
||||
@@ -252,14 +251,17 @@ test('[dev-router] match with catch-all with no prefix slash', async t => {
|
||||
headers: {},
|
||||
uri_args: {},
|
||||
matched_route: { src: '(.*)', dest: '/www$1' },
|
||||
matched_route_idx: 0
|
||||
matched_route_idx: 0,
|
||||
});
|
||||
});
|
||||
|
||||
test('[dev-router] `continue: true` with `dest`', async t => {
|
||||
const routesConfig = [
|
||||
{ src: '/(.*)', dest: '/www/$1', continue: true },
|
||||
{ src: '^/www/(a\\/([^\\/]+?)(?:\\/)?)$', dest: 'http://localhost:5000/$1' }
|
||||
{
|
||||
src: '^/www/(a\\/([^\\/]+?)(?:\\/)?)$',
|
||||
dest: 'http://localhost:5000/$1',
|
||||
},
|
||||
];
|
||||
const result = await devRouter('/a/foo', 'GET', routesConfig);
|
||||
|
||||
@@ -271,6 +273,6 @@ test('[dev-router] `continue: true` with `dest`', async t => {
|
||||
uri_args: {},
|
||||
matched_route: routesConfig[1],
|
||||
matched_route_idx: 1,
|
||||
userDest: false
|
||||
userDest: false,
|
||||
});
|
||||
});
|
||||
|
||||
28
packages/now-cli/test/dev-server.unit.js
vendored
28
packages/now-cli/test/dev-server.unit.js
vendored
@@ -1,6 +1,8 @@
|
||||
import url from 'url';
|
||||
import test from 'ava';
|
||||
import path from 'path';
|
||||
import execa from 'execa';
|
||||
import fs from 'fs-extra';
|
||||
import fetch from 'node-fetch';
|
||||
import listen from 'async-listen';
|
||||
import { request, createServer } from 'http';
|
||||
@@ -9,16 +11,27 @@ import DevServer from '../src/util/dev/server';
|
||||
import { installBuilders, getBuildUtils } from '../src/util/dev/builder-cache';
|
||||
import parseListen from '../src/util/dev/parse-listen';
|
||||
|
||||
async function runNpmInstall(fixturePath) {
|
||||
if (await fs.exists(path.join(fixturePath, 'package.json'))) {
|
||||
return execa('yarn', ['install'], { cwd: fixturePath });
|
||||
}
|
||||
}
|
||||
|
||||
function testFixture(name, fn) {
|
||||
return async t => {
|
||||
let server;
|
||||
|
||||
const fixturePath = path.join(__dirname, 'fixtures', 'unit', name);
|
||||
|
||||
await runNpmInstall(fixturePath);
|
||||
|
||||
try {
|
||||
let readyResolve;
|
||||
let readyPromise = new Promise(resolve => {
|
||||
readyResolve = resolve;
|
||||
});
|
||||
|
||||
const debug = false;
|
||||
const debug = true;
|
||||
const output = createOutput({ debug });
|
||||
const origReady = output.ready;
|
||||
|
||||
@@ -29,7 +42,6 @@ function testFixture(name, fn) {
|
||||
origReady(msg);
|
||||
};
|
||||
|
||||
const fixturePath = path.join(__dirname, `fixtures/unit/${name}`);
|
||||
server = new DevServer(fixturePath, { output, debug });
|
||||
|
||||
await server.start(0);
|
||||
@@ -317,8 +329,8 @@ test(
|
||||
// HTML response
|
||||
const res = await fetch(`${server.address}/does-not-exist`, {
|
||||
headers: {
|
||||
Accept: 'text/html'
|
||||
}
|
||||
Accept: 'text/html',
|
||||
},
|
||||
});
|
||||
t.is(res.status, 404);
|
||||
t.is(res.headers.get('content-type'), 'text/html; charset=utf-8');
|
||||
@@ -330,8 +342,8 @@ test(
|
||||
// JSON response
|
||||
const res = await fetch(`${server.address}/does-not-exist`, {
|
||||
headers: {
|
||||
Accept: 'application/json'
|
||||
}
|
||||
Accept: 'application/json',
|
||||
},
|
||||
});
|
||||
t.is(res.status, 404);
|
||||
t.is(res.headers.get('content-type'), 'application/json');
|
||||
@@ -389,10 +401,10 @@ test('[DevServer] parseListen()', t => {
|
||||
t.deepEqual(parseListen('127.0.0.1:3005'), [3005, '127.0.0.1']);
|
||||
t.deepEqual(parseListen('tcp://127.0.0.1:5000'), [5000, '127.0.0.1']);
|
||||
t.deepEqual(parseListen('unix:/home/user/server.sock'), [
|
||||
'/home/user/server.sock'
|
||||
'/home/user/server.sock',
|
||||
]);
|
||||
t.deepEqual(parseListen('pipe:\\\\.\\pipe\\PipeName'), [
|
||||
'\\\\.\\pipe\\PipeName'
|
||||
'\\\\.\\pipe\\PipeName',
|
||||
]);
|
||||
|
||||
let err;
|
||||
|
||||
8
packages/now-cli/test/dev/fixtures/01-node/yarn.lock
Normal file
8
packages/now-cli/test/dev/fixtures/01-node/yarn.lock
Normal file
@@ -0,0 +1,8 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
moment@^2.24.0:
|
||||
version "2.24.0"
|
||||
resolved "https://registry.yarnpkg.com/moment/-/moment-2.24.0.tgz#0d055d53f5052aa653c9f6eb68bb5d12bf5c2b5b"
|
||||
integrity sha512-bV7f+6l2QigeBBZSM/6yTNq4P2fNpSWj/0e7jQcy87A8e7o2nAfP/34/2ky5Vw4B9S446EtIhodAzkFCcR4dQg==
|
||||
@@ -14,37 +14,37 @@
|
||||
},
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@angular/animations": "~8.1.0",
|
||||
"@angular/common": "~8.1.0",
|
||||
"@angular/compiler": "~8.1.0",
|
||||
"@angular/core": "~8.1.0",
|
||||
"@angular/forms": "~8.1.0",
|
||||
"@angular/platform-browser": "~8.1.0",
|
||||
"@angular/platform-browser-dynamic": "~8.1.0",
|
||||
"@angular/router": "~8.1.0",
|
||||
"rxjs": "~6.4.0",
|
||||
"tslib": "^1.9.0",
|
||||
"zone.js": "~0.9.1"
|
||||
"@angular/animations": "8.1.0",
|
||||
"@angular/common": "8.1.0",
|
||||
"@angular/compiler": "8.1.0",
|
||||
"@angular/core": "8.1.0",
|
||||
"@angular/forms": "8.1.0",
|
||||
"@angular/platform-browser": "8.1.0",
|
||||
"@angular/platform-browser-dynamic": "8.1.0",
|
||||
"@angular/router": "8.1.0",
|
||||
"rxjs": "6.4.0",
|
||||
"tslib": "1.9.0",
|
||||
"zone.js": "0.9.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@angular-devkit/build-angular": "~0.801.0",
|
||||
"@angular/cli": "~8.1.0",
|
||||
"@angular/compiler-cli": "~8.1.0",
|
||||
"@angular/language-service": "~8.1.0",
|
||||
"@types/node": "~8.9.4",
|
||||
"@types/jasmine": "~3.3.8",
|
||||
"@types/jasminewd2": "~2.0.3",
|
||||
"codelyzer": "^5.0.0",
|
||||
"jasmine-core": "~3.4.0",
|
||||
"jasmine-spec-reporter": "~4.2.1",
|
||||
"karma": "~4.1.0",
|
||||
"karma-chrome-launcher": "~2.2.0",
|
||||
"karma-coverage-istanbul-reporter": "~2.0.1",
|
||||
"karma-jasmine": "~2.0.1",
|
||||
"karma-jasmine-html-reporter": "^1.4.0",
|
||||
"protractor": "~5.4.0",
|
||||
"ts-node": "~7.0.0",
|
||||
"tslint": "~5.15.0",
|
||||
"typescript": "~3.4.3"
|
||||
"@angular-devkit/build-angular": "0.801.0",
|
||||
"@angular/cli": "8.1.0",
|
||||
"@angular/compiler-cli": "8.1.0",
|
||||
"@angular/language-service": "8.1.0",
|
||||
"@types/node": "8.9.4",
|
||||
"@types/jasmine": "3.3.8",
|
||||
"@types/jasminewd2": "2.0.3",
|
||||
"codelyzer": "5.0.0",
|
||||
"jasmine-core": "3.4.0",
|
||||
"jasmine-spec-reporter": "4.2.1",
|
||||
"karma": "4.1.0",
|
||||
"karma-chrome-launcher": "2.2.0",
|
||||
"karma-coverage-istanbul-reporter": "2.0.1",
|
||||
"karma-jasmine": "2.0.1",
|
||||
"karma-jasmine-html-reporter": "1.4.0",
|
||||
"protractor": "5.4.0",
|
||||
"ts-node": "7.0.0",
|
||||
"tslint": "5.15.0",
|
||||
"typescript": "3.4.3"
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user