Compare commits

...

41 Commits

Author SHA1 Message Date
Steven
71c297de32 Publish
- @now/go@0.5.10
 - @now/next@0.6.0
 - @now/node@0.12.6
 - @now/python@0.2.15
 - @now/static-build@0.9.7
2019-08-13 09:18:59 -07:00
Steven
9569c1babc [now-static-build] Fix zero config vue headers (#918)
* [now-static-build] Add test probes for vue headers

* Add mustContain probe

* Fix vue caching headers
2019-08-13 09:17:42 -07:00
Steven
16a573ec6e [now-node][now-next] Bump node-file-trace to 0.2.9 (#920) 2019-08-13 09:17:37 -07:00
Steven
79a6c8e1d2 [docs] Add versioning documentation (#919)
* [docs] Add versioning documentation

* Separate readme into multiple files
2019-08-13 09:17:31 -07:00
Contextualist
e7f62d194b [now-python] WSGI encoding dance for QUERY_STRING (#915) 2019-08-13 09:17:24 -07:00
Steven
3c4306662e [now-node][now-next] Bump node-file-trace to 0.2.8 (#917) 2019-08-13 09:17:17 -07:00
Sophearak Tha
2ed4e7e9a2 Update link to docs (#912) 2019-08-13 09:17:11 -07:00
Joe Haddad
d7a65cc835 Revert "Disable compression for Zips" (#907)
This reverts commit 57d26761fdc3ef322a77e31c0c7c6f090694323c.
2019-08-13 09:16:50 -07:00
Joe Haddad
0eb5dac904 Next.js experimental for canary only (#910) 2019-08-13 09:16:45 -07:00
Joe Haddad
9ad63f9ed2 Disable compression for Zips (#901) 2019-08-13 09:16:36 -07:00
Joe Haddad
4ad4765034 [now-next] Add Serverless Trace to Next.js Builder (#884)
* Add Serverless Trace to Next.js Builder

* Correctly specify the experimental target

* Extract version

* Capture real Next.js version

* Disable asset folder on versions with trace mode

* Adjust console output

* Start on node-file-trace integration

* Remove ESNext syntax

* Add firebase fixture

* Add message

* Add 2x logs

* Use resolve from

* Use correct path

* Check against realNextVersion

* Populate traced files

* Put logging behind some debug flags

* Add another TODO

* Update message

* Set default config if missing

* Inject the correct page require path into build

* Add a Next.js 8 sanity test

* Do not trace individual lambdas

* Limit zipping to 5 to not saturate CPU/Memory

* Revert "Limit zipping to 5 to not saturate CPU/Memory"

This reverts commit b826be6c927f083555cceb61c3d20938427408c1.

* Try to compress traced files separate

* Ensure it works with 2x lambdas

* Revert "Try to compress traced files separate"

This reverts commit 46fd20938cd8c7a1eecc878ae8ecbbefa27a72da.
2019-08-13 09:15:22 -07:00
Andy Bitz
162f06e4a6 Publish
- @now/build-utils@0.9.12
2019-08-09 21:48:13 +02:00
Andy
4b5b573a1e [now-build-utils] Fix 404 trailing slash for index files and add tests (#904)
* [now-build-utils] Fix 404 trailing slash for index files and add tests

* Don't fail on 404

* Consider status probe

* Fix slash only route

* Adjust tests

* Add another test
2019-08-09 21:48:01 +02:00
Steven
86a2640fff Publish
- @now/go@0.5.9
 - @now/next@0.5.10
 - @now/node@0.12.5
 - @now/routing-utils@1.2.2
2019-08-08 17:48:44 -07:00
Steven
351a85f875 [now-go] Fix go concurrent/parallel builds (#900)
* [now-go] Fix concurrent go install

* Add test

* Fix typo

* Add config.parallel
2019-08-08 17:47:53 -07:00
Connor Davis
00fd570ce5 [now-routing-utils] Allow the combined use of continue and dest (#897)
* allow continue dest

* remove test
2019-08-08 17:47:47 -07:00
Steven
4ff1f05cf3 [now-node] Fix windows typescript entrypoint bug (#899) 2019-08-08 17:47:39 -07:00
Steven
fb9035bc00 [now-node][now-next] Bump node-file-trace to 0.2.7 (#892) 2019-08-08 17:47:28 -07:00
Steven
9258a74986 [tests] Add automerge (#893) 2019-08-08 17:46:02 -07:00
Leo Lamprecht
f7c21bbde6 Publish
- @now/static-build@0.9.6
2019-08-07 23:10:28 +00:00
Leo Lamprecht
4c6c17af8a [now-static-build] Capture file system routes for CRA apps (#891)
* Capture file system routes for CRA apps

* Added integration test

* Fixed defaults
2019-08-07 23:09:55 +00:00
Steven
99410a0c06 Publish
- @now/build-utils@0.9.11
 - @now/go@0.5.8
 - @now/next@0.5.9
 - @now/node@0.12.4
 - @now/python@0.2.14
 - @now/ruby@0.1.4
 - @now/static-build@0.9.5
2019-08-07 08:28:33 -07:00
Timothy
9df2e3a62d [docs] Add Builders Developer Reference (#888)
* Add Builders developer reference

* Updates to the README
2019-08-07 08:27:59 -07:00
Steven
3cc786412d [now-bash] Move source code to external repository (#887)
* [now-bash] Remove source code

* Change tests to run from root instead of dist
2019-08-07 08:27:51 -07:00
Steven
973229e02e [now-node][now-next][now-static-build] Enable node 10 for zero c… (#879) 2019-08-07 08:27:10 -07:00
Steven
1493b0657b [now-build-utils] Default to Node 10 for zero config (#773)
* [now-build-utils] Default to Node 10

* Revert back to 8

* Enable node 10 for zero config

* Add silent flag
2019-08-07 08:26:56 -07:00
Steven
66df087faa [now-python] Use ncc before publishing to npm (#875) 2019-08-07 08:26:43 -07:00
Steven
4401799eb3 [now-ruby] Use ncc before publishing to npm (#877) 2019-08-07 08:26:37 -07:00
Steven
313cad8e20 [now-go] Use ncc before publishing to npm (#876)
* [now-go] Use ncc before publishing to npm

* Add missing main
2019-08-07 08:26:15 -07:00
Steven
3a51773693 [now-php][now-rust] Remove deprecated community builders (#871)
* Remove now-rust

* Remove now-php

* Remove now-php-bridge

* Remove unused eslintignore

* Change a comment
2019-08-07 08:25:56 -07:00
Andy Bitz
ddab628034 Publish
- @now/build-utils@0.9.10
2019-08-06 19:33:36 +02:00
Steven
2c10cdcbca [now-build-utils] Fix TS globbing for zero config (#886)
* [now-build-utils] Fix TS globbing for zero config

* Fix tests

* [now-build-utils] Fix TS globbing for zero config
2019-08-06 19:30:13 +02:00
Steven
c30eac53f1 [now-build-utils] Fix TS globbing for zero config (#885)
* [now-build-utils] Fix TS globbing for zero config

* Fix tests
2019-08-06 19:30:07 +02:00
Andy Bitz
2d3e32e95b Publish
- @now/build-utils@0.9.9
2019-08-06 03:01:43 +02:00
Andy
bd8d41cadc [now-build-utils] Fix 404 for index routes (#882) 2019-08-06 03:01:06 +02:00
Andy Bitz
0a6e7d8e23 Publish
- @now/static-build@0.9.4
2019-08-05 16:12:58 +02:00
Andy
e0a8cb5011 [now-static-build] Choose public dir when there is no dist dir (#874)
* [now-static-build] Choose `public` dir when there is no `dist` dir

* Fix `path.join`
2019-08-05 16:12:30 +02:00
Andy Bitz
bcdc27139f Publish
- @now/build-utils@0.9.8
2019-08-03 00:31:02 +02:00
Andy
8cfaef7e6c [now-build-utils] Fix 404 route (#872) 2019-08-03 00:30:33 +02:00
Andy Bitz
79c096b80e Publish
- @now/build-utils@0.9.7
2019-08-02 22:32:16 +02:00
Andy
2cacb95c7d [now-build-utils] Disable directory listing for api routes (#870) 2019-08-02 22:26:37 +02:00
154 changed files with 1308 additions and 4993 deletions

View File

@@ -7,12 +7,11 @@
/packages/now-build-utils/src/fs/*.js
/packages/now-node/dist/*
/packages/now-next/dist/*
/packages/now-next/test/fixtures/**
/packages/now-node-bridge/*
/packages/now-python/dist/*
/packages/now-optipng/dist/*
/packages/now-go/*
/packages/now-rust/dist/*
/packages/now-ruby/dist/*
/packages/now-static-build/dist/*
/packages/now-static-build/test/fixtures/**
/packages/now-routing-utils/dist/*
/packages/now-routing-utils/dist/*

1
.github/CODEOWNERS vendored
View File

@@ -8,7 +8,6 @@
/packages/now-next @timer
/packages/now-go @styfle @sophearak
/packages/now-python @styfle @sophearak
/packages/now-rust @styfle @mike-engel @anmonteiro
/packages/now-ruby @styfle @coetry @nathancahill
/packages/now-static-build @styfle @AndyBitz
/packages/now-routing-utils @dav-is

16
.kodiak.toml Normal file
View File

@@ -0,0 +1,16 @@
version = 1
[merge]
automerge_label = "automerge"
blacklist_title_regex = "^WIP.*"
blacklist_labels = ["work in progress"]
method = "squash"
delete_branch_on_merge = true
block_on_reviews_requested = false
notify_on_conflict = true
optimistic_updates = true
[merge.message]
title = "pull_request_title"
include_pr_number = true
body_type = "markdown"

447
DEVELOPING_A_BUILDER.md Normal file
View File

@@ -0,0 +1,447 @@
# Builders Developer Reference
The following page is a reference for how to create a Builder using the available Builder's API.
A Builder is an npm module that exposes a `build` function and optionally an `analyze` function and `prepareCache` function.
Official Builders are published to [npmjs.com](https://npmjs.com) as a package and referenced in the `use` property of the `now.json` configuration file.
However, the `use` property will work with any [npm install argument](https://docs.npmjs.com/cli/install) such as a git repo url which is useful for testing your Builder.
See the [Builders Documentation](https://zeit.co/docs/v2/advanced/builders) to view example usage.
## Builder Exports
### `version`
A **required** exported constant that decides which version of the Builder API to use.
The latest and suggested version is `2`.
### `analyze`
An **optional** exported function that returns a unique fingerprint used for the purpose of [build de-duplication](https://zeit.co/docs/v2/advanced/concepts/immutability#deduplication-algorithm). If the `analyze` function is not supplied, a random fingerprint is assigned to each build.
```js
export analyze({
files: Files,
entrypoint: String,
workPath: String,
config: Object
}) : String fingerprint
```
If you are using TypeScript, you should use the following types:
```ts
import { AnalyzeOptions } from '@now/build-utils'
export analyze(options: AnalyzeOptions) {
return 'fingerprint goes here'
}
```
### `build`
A **required** exported function that returns a [Files](#files) data structure that contains the Build outputs, which can be a [Static File](#file) or a [Serverless Function](#serverless-function).
What's a Serverless Function? Read about [Serverless Function concepts](https://zeit.co/docs/v2/deployments/concepts/lambdas) to learn more.
```js
build({
files: Files,
entrypoint: String,
workPath: String,
config: Object,
meta?: {
isDev?: Boolean,
requestPath?: String,
filesChanged?: Array<String>,
filesRemoved?: Array<String>
}
}) : {
watch: Array<String>,
output: Files output,
routes: Object
}
```
If you are using TypeScript, you should use the following types:
```ts
import { BuildOptions } from '@now/build-utils'
export build(options: BuildOptions) {
// Build the code here
return {
output: {
'path-to-file': File,
'path-to-lambda': Lambda
},
watch: [],
routes: {}
}
}
```
### `prepareCache`
An **optional** exported function that is equivalent to [`build`](#build), but it executes the instructions necessary to prepare a cache for the next run.
```js
prepareCache({
files: Files,
entrypoint: String,
workPath: String,
cachePath: String,
config: Object
}) : Files cacheOutput
```
If you are using TypeScript, you can import the types for each of these functions by using the following:
```ts
import { PrepareCacheOptions } from '@now/build-utils'
export prepareCache(options: PrepareCacheOptions) {
return { 'path-to-file': File }
}
```
### `shouldServe`
An **optional** exported function that is only used by `now dev` in [Now CLI](https:///download) and indicates whether a [Builder](https://zeit.co/docs/v2/advanced/builders) wants to be responsible for building a certain request path.
```js
shouldServe({
entrypoint: String,
files: Files,
config: Object,
requestPath: String,
workPath: String
}) : Boolean
```
If you are using TypeScript, you can import the types for each of these functions by using the following:
```ts
import { ShouldServeOptions } from '@now/build-utils'
export shouldServe(options: ShouldServeOptions) {
return Boolean
}
```
If this method is not defined, Now CLI will default to [this function](https://github.com/zeit/now-builders/blob/52994bfe26c5f4f179bdb49783ee57ce19334631/packages/now-build-utils/src/should-serve.ts).
### Builder Options
The exported functions [`analyze`](#analyze), [`build`](#build), and [`prepareCache`](#preparecache) receive one argument with the following properties.
**Properties:**
- `files`: All source files of the project as a [Files](#files) data structure.
- `entrypoint`: Name of entrypoint file for this particular build job. Value `files[entrypoint]` is guaranteed to exist and be a valid [File](#files) reference. `entrypoint` is always a discrete file and never a glob, since globs are expanded into separate builds at deployment time.
- `workPath`: A writable temporary directory where you are encouraged to perform your build process. This directory will be populated with the restored cache from the previous run (if any) for [`analyze`](#analyze) and [`build`](#build).
- `cachePath`: A writable temporary directory where you can build a cache for the next run. This is only passed to `prepareCache`.
- `config`: An arbitrary object passed from by the user in the [Build definition](#defining-the-build-step) in `now.json`.
## Example: html-minifier
Let's walk through what it takes to create a simple builder that takes in a HTML source file and yields a minified HTML static file as its build output.
While this is a very simple builder, the approach demonstrated here can be used to return anything: one or more static files and/or one or more lambdas.
## Setting up the module
### Defining the analyze step
The `analyze` hook is optional. Its goal is to give the developer a tool to avoid wasting time _re-computing a build_ that has already occurred.
The return value of `analyze` is a _fingerprint_: a simple string that uniquely identifies the build process.
If `analyze` is not specified, its behavior is to use as the fingerprint the combined checksums of **all the files in the same directory level as the entrypoint**. This is a default that errs on making sure that we re-execute builds when files _other than the entrypoint_ (like dependencies, manifest files, etc) have changed.
For our `html-minify` example, we know that HTML files don't have dependencies. Therefore, our analyze step can just return the `digest` of the entrypoint.
Our `index.js` file looks as follows:
```js
exports.analyze = function({ files, entrypoint }) {
return files[entrypoint].digest
}
```
This means that we will only re-minify and re-create the build output _only if the file contents (and therefore its digest) change._
### Defining the build step
Your module will need some utilities to manipulate the data structures we pass you, create new ones and alter the filesystem.
To that end, we expose our API as part of a `@now/build-utils` package. This package is always loaded on your behalf, so make sure it's only included as `peerDependencies` in your `package.json`.
Builders can include dependencies of their liking:
```js
const htmlMinifier = require('html-minifier')
exports.version = 2
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest
exports.build = async ({ files, entrypoint, config }) => {
const stream = files[entrypoint].toStream()
const options = Object.assign({}, config || {})
const { data } = await FileBlob.fromStream({ stream })
const content = data.toString()
const minified = htmlMinifier(content, options)
const result = new FileBlob({ data: minified })
return {
output: {
[entrypoint]: result
},
watch: [],
routes: {}
}
}
```
### Defining a `prepareCache` step
If our builder had performed work that could be re-used in the next build invocation, we could define a `prepareCache` step.
In this case, there are not intermediate artifacts that we can cache, and our `analyze` step already takes care of caching the full output based on the fingerprint of the input.
## Technical Details
### Execution Context
A [Serverless Function](https://zeit.co/docs/v2/advanced/concepts/lambdas) is created where the builder logic is executed. The lambda is run using the Node.js 8 runtime. A brand new sandbox is created for each deployment, for security reasons. The sandbox is cleaned up between executions to ensure no lingering temporary files are shared from build to build.
All the APIs you export ([`analyze`](#analyze), [`build`](#build) and [`prepareCache`](#preparecache)) are not guaranteed to be run in the same process, but the filesystem we expose (e.g.: `workPath` and the results of calling [`getWriteableDirectory`](#getWriteableDirectory) ) is retained.
If you need to share state between those steps, use the filesystem.
### Directory and Cache Lifecycle
When a new build is created, we pre-populate the `workPath` supplied to `analyze` with the results of the `prepareCache` step of the previous build.
The `analyze` step can modify that directory, and it will not be re-created when it's supplied to `build` and `prepareCache`.
To learn how the cache key is computed and invalidated, refer to the [overview](https://zeit.co/docs/v2/advanced/builders#technical-details).
### Accessing Environment and Secrets
The env and secrets specified by the user as `build.env` are passed to the builder process. This means you can access user env via `process.env` in Node.js.
### Utilities as peerDependencies
When you publish your builder to npm, make sure to not specify `@now/build-utils` (as seen below in the API definitions) as a dependency, but rather as part of `peerDependencies`.
## Types
### `Files`
```ts
import { File } from '@now/build-utils'
type Files = { [filePath: string]: File }
```
This is an abstract type that is implemented as a plain [JavaScript Object](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object). It's helpful to think of it as a virtual filesystem representation.
When used as an input, the `Files` object will only contain `FileRefs`. When `Files` is an output, it may consist of `Lambda` (Serverless Functions) types as well as `FileRefs`.
An example of a valid output `Files` object is:
```json
{
"index.html": FileRef,
"api/index.js": Lambda
}
```
### `File`
This is an abstract type that can be imported if you are using TypeScript.
```ts
import { File } from '@now/build-utils'
```
Valid `File` types include:
- [`FileRef`](#fileref)
- [`FileFsRef`](#filefsref)
- [`FileBlob`](#fileblob)
### `FileRef`
```ts
import { FileRef } from '@now/build-utils'
```
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes) that represents an abstract file instance stored in our platform, based on the file identifier string (its checksum). When a `Files` object is passed as an input to `analyze` or `build`, all its values will be instances of `FileRef`.
**Properties:**
- `mode : Number` file mode
- `digest : String` a checksum that represents the file
**Methods:**
- `toStream() : Stream` creates a [Stream](https://nodejs.org/api/stream.html) of the file body
### `FileFsRef`
```ts
import { FileFsRef } from '@now/build-utils'
```
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes) that represents an abstract instance of a file present in the filesystem that the build process is executing in.
**Properties:**
- `mode : Number` file mode
- `fsPath : String` the absolute path of the file in file system
**Methods:**
- `static async fromStream({ mode : Number, stream : Stream, fsPath : String }) : FileFsRef` creates an instance of a [FileFsRef](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) from `Stream`, placing file at `fsPath` with `mode`
- `toStream() : Stream` creates a [Stream](https://nodejs.org/api/stream.html) of the file body
### `FileBlob`
```ts
import { FileBlob } from '@now/build-utils'
```
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes) that represents an abstract instance of a file present in memory.
**Properties:**
- `mode : Number` file mode
- `data : String | Buffer` the body of the file
**Methods:**
- `static async fromStream({ mode : Number, stream : Stream }) :FileBlob` creates an instance of a [FileBlob](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object) from [`Stream`](https://nodejs.org/api/stream.html) with `mode`
- `toStream() : Stream` creates a [Stream](https://nodejs.org/api/stream.html) of the file body
### `Lambda`
```ts
import { Lambda } from '@now/build-utils'
```
This is a [JavaScript class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Classes), called a Serverless Function, that can be created by supplying `files`, `handler`, `runtime`, and `environment` as an object to the [`createLambda`](#createlambda) helper. The instances of this class should not be created directly. Instead use a call to [`createLambda`](#createlambda).
**Properties:**
- `files : Files` the internal filesystem of the lambda
- `handler : String` path to handler file and (optionally) a function name it exports
- `runtime : LambdaRuntime` the name of the lambda runtime
- `environment : Object` key-value map of handler-related (aside of those passed by user) environment variables
### `LambdaRuntime`
This is an abstract enumeration type that is implemented by one of the following possible `String` values:
- `nodejs10.x`
- `nodejs8.10`
- `go1.x`
- `java-1.8.0-openjdk`
- `python3.6`
- `python2.7`
- `dotnetcore2.1`
- `dotnetcore2.0`
- `dotnetcore1.0`
## JavaScript API
The following is exposed by `@now/build-utils` to simplify the process of writing Builders, manipulating the file system, using the above types, etc.
### `createLambda`
Signature: `createLambda(Object spec) : Lambda`
```ts
import { createLambda } from '@now/build-utils'
```
Constructor for the [`Lambda`](#lambda) type.
```js
const { createLambda, FileBlob } = require('@now/build-utils')
await createLambda({
runtime: 'nodejs8.10',
handler: 'index.main',
files: {
'index.js': new FileBlob({ data: 'exports.main = () => {}' })
}
})
```
### `download`
Signature: `download() : Files`
```ts
import { download } from '@now/build-utils'
```
This utility allows you to download the contents of a [`Files`](#files) data structure, therefore creating the filesystem represented in it.
Since `Files` is an abstract way of representing files, you can think of `download` as a way of making that virtual filesystem _real_.
If the **optional** `meta` property is passed (the argument for [build](#build)), only the files that have changed are downloaded. This is decided using `filesRemoved` and `filesChanged` inside that object.
```js
await download(files, workPath, meta)
```
### `glob`
Signature: `glob() : Files`
```ts
import { glob } from '@now/build-utils'
```
This utility allows you to _scan_ the filesystem and return a [`Files`](#files) representation of the matched glob search string. It can be thought of as the reverse of [`download`](#download).
The following trivial example downloads everything to the filesystem, only to return it back (therefore just re-creating the passed-in [`Files`](#files)):
```js
const { glob, download } = require('@now/build-utils')
exports.build = ({ files, workPath }) => {
await download(files, workPath)
return glob('**', workPath)
}
```
### `getWriteableDirectory`
Signature: `getWriteableDirectory() : String`
```ts
import { getWriteableDirectory } from '@now/build-utils'
```
In some occasions, you might want to write to a temporary directory.
### `rename`
Signature: `rename(Files) : Files`
```ts
import { rename } from '@now/build-utils'
```
Renames the keys of the [`Files`](#files) object, which represent the paths. For example, to remove the `*.go` suffix you can use:
```js
const rename = require('@now/build-utils')
const originalFiles = { 'one.go': fileFsRef1, 'two.go': fileFsRef2 }
const renamedFiles = rename(originalFiles, path => path.replace(/\.go$/, '')
```

40
PUBLISHING.md Normal file
View File

@@ -0,0 +1,40 @@
# Publishing to npm
Always publish to the Canary Channel as soon as a PR is merged into the `canary` branch.
```
yarn publish-canary
```
Publish the Stable Channel weekly.
- Cherry pick each commit from `canary` to `master` branch
- Verify that you are _in-sync_ with canary (with the exception of the `version` line in `package.json`)
- Deploy the modified Builders
```
# View differences excluding "Publish" commits
git checkout canary && git pull
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/canary.txt
git checkout master && git pull
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/master.txt
diff ~/Desktop/canary.txt ~/Desktop/master.txt
# Cherry pick all PRs from canary into master ...
git cherry-pick <PR501_COMMIT_SHA>
git cherry-pick <PR502_COMMIT_SHA>
git cherry-pick <PR503_COMMIT_SHA>
git cherry-pick <PR504_COMMIT_SHA>
# Verify the only difference is "version" in package.json
git diff origin/canary
# Ship it
yarn publish-stable
```
After running this publish step, GitHub Actions will take care of publishing the modified Builder packages to npm.
If for some reason GitHub Actions fails to publish the npm package, you may do so
manually by running `npm publish` from the package directory. Make sure to
use `npm publish --tag canary` if you are publishing a canary release!

View File

@@ -1,61 +1,19 @@
# now-builders
This is a monorepo containing the [Official Builders](https://zeit.co/docs/v2/deployments/builders/overview) provided by the ZEIT team.
This is a monorepo containing the [Official Builders](https://zeit.co/docs/v2/advanced/builders) provided by the ZEIT team.
## Channels
## Versioning and advanced usage
There are two Channels:
See [VERSIONING.md](VERSIONING.md).
| Channel | Git Branch | npm dist-tag | use example |
| ------- | ------------------------------------------------------------- | ------------ | ------------------ |
| Canary | [canary](https://github.com/zeit/now-builders/commits/canary) | `@canary` | `@now/node@canary` |
| Stable | [master](https://github.com/zeit/now-builders/commits/master) | `@latest` | `@now/node@latest` |
## Publishing to npm
All PRs should be submitted to the `canary` branch.
See [PUBLISHING.md](PUBLISHING.md).
Once a PR is merged into the `canary` branch, it should be published to npm immediately using the Canary Channel.
## Contributing
### Publishing to npm
See [CONTRIBUTING.md](CONTRIBUTING.md).
For the Canary Channel, publish the modified Builders to npm with the following:
### Creating Your Own Builder
```
yarn publish-canary
```
For the Stable Channel, you must do the following:
- Cherry pick each commit from canary to master
- Verify that you are _in-sync_ with canary (with the exception of the `version` line in `package.json`)
- Deploy the modified Builders
```
# View differences excluding "Publish" commits
git checkout canary && git pull
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/canary.txt
git checkout master && git pull
git log --pretty=format:"$ad- %s [%an]" | grep -v Publish > ~/Desktop/master.txt
diff ~/Desktop/canary.txt ~/Desktop/master.txt
# Cherry pick all PRs from canary into master ...
git cherry-pick <PR501_COMMIT_SHA>
git cherry-pick <PR502_COMMIT_SHA>
git cherry-pick <PR503_COMMIT_SHA>
git cherry-pick <PR504_COMMIT_SHA>
# Verify the only difference is "version" in package.json
git diff origin/canary
# Ship it
yarn publish-stable
```
After running this publish step, GitHub Actions will take care of publishing the modified Builder packages to npm.
If for some reason GitHub Actions fails to publish the npm package, you may do so
manually by running `npm publish` from the package directory. Make sure to
use `npm publish --tag canary` if you are publishing a canary release!
### Contributing
See the [Contribution guidelines for this project](CONTRIBUTING.md), it also contains guidance on interpreting tests failures.
See [DEVELOPING_A_BUILDER.md](DEVELOPING_A_BUILDER.md).

25
VERSIONING.md Normal file
View File

@@ -0,0 +1,25 @@
# Versioning
Builders are released to two different channels.
## Channels
| Channel | Git Branch | npm dist-tag | use example |
| ------- | ------------------------------------------------------------- | ------------ | ------------------ |
| Canary | [canary](https://github.com/zeit/now-builders/commits/canary) | `@canary` | `@now/node@canary` |
| Stable | [master](https://github.com/zeit/now-builders/commits/master) | `@latest` | `@now/node@latest` |
All PRs are submitted to the `canary` branch. Once a PR is merged into the `canary` branch, it should be published to npm immediately using the Canary Channel.
## Version Selection
Since Builders are published to [npmjs.com](https://npmjs.com), this makes versioning works the same for Builders as it does for any npm package. The `use` statement in [now.json](https://zeit.co/docs/v2/advanced/configuration#builds) has a similar syntax to `npm install`.
The following are valid examples [@now/node](https://www.npmjs.com/package/@now/node?activeTab=versions):
- `@now/node`
- `@now/node@0.7.3`
- `@now/node@canary`
- `@now/node@0.7.2-canary.2`
We always recommend using the latest version by leaving off the dist-tag suffix, `@now/node` for example.

View File

@@ -1,32 +0,0 @@
root = true
[*]
indent_style = tab
indent_size = 4
tab_width = 4
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[{*.json,*.json.example,*.gyp,*.yml}]
indent_style = space
indent_size = 2
[*.py]
indent_style = space
indent_size = 4
[*.md]
trim_trailing_whitespace = false
# Ideal settings - some plugins might support these.
[*.js]
quote_type = single
[{*.c,*.cc,*.h,*.hh,*.cpp,*.hpp,*.m,*.mm,*.mpp,*.js,*.java,*.go,*.rs,*.php,*.ng,*.jsx,*.ts,*.d,*.cs,*.swift}]
curly_bracket_next_line = false
spaces_around_operators = true
spaces_around_brackets = outside
# close enough to 1TB
indent_brace_style = K&R

View File

@@ -1,2 +0,0 @@
node_modules
handler

View File

@@ -1,16 +0,0 @@
#!/bin/bash
set -euo pipefail
cd "$LAMBDA_TASK_ROOT"
# Configure `import`
export IMPORT_CACHE="$LAMBDA_TASK_ROOT/.import-cache"
export PATH="$IMPORT_CACHE/bin:$PATH"
# Load `import` and runtime
# shellcheck disable=SC1090
. "$(which import)"
# shellcheck disable=SC1090
. "$IMPORT_CACHE/runtime.sh"
# Load user code and process events in a loop forever
_lambda_runtime_init

View File

@@ -1,40 +0,0 @@
#!/bin/bash
set -euo pipefail
# `import` debug logs are always enabled during build
export IMPORT_DEBUG=1
# Install `import`
IMPORT_BIN="$IMPORT_CACHE/bin/import"
mkdir -p "$(dirname "$IMPORT_BIN")"
curl -sfLS https://import.pw > "$IMPORT_BIN"
chmod +x "$IMPORT_BIN"
# For now only the entrypoint file is copied into the lambda
mkdir -p "$(dirname "$DIST/$ENTRYPOINT")"
cp "$ENTRYPOINT" "$DIST/$ENTRYPOINT"
# Copy in the runtime
cp "$BUILDER/runtime.sh" "$IMPORT_CACHE"
cp "$BUILDER/bootstrap" "$DIST"
# Load `import`
. "$(which import)"
# Cache runtime and user dependencies
echo "Caching imports in \"$ENTRYPOINT\"…"
. "$IMPORT_CACHE/runtime.sh"
. "$DIST/$ENTRYPOINT"
echo "Done caching imports"
# Run user build script
if declare -f build > /dev/null; then
echo "Running \`build\` function in \"$ENTRYPOINT\"…"
build "$@"
fi
# Ensure the entrypoint defined a `handler` function
if ! declare -f handler > /dev/null; then
echo "ERROR: A \`handler\` function must be defined in \"$ENTRYPOINT\"!" >&2
exit 1
fi

View File

@@ -1,79 +0,0 @@
const execa = require('execa');
const { join } = require('path');
const snakeCase = require('snake-case');
const {
glob,
download,
createLambda,
shouldServe,
} = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
// From this list: https://import.pw/importpw/import/docs/config.md
const allowedConfigImports = new Set([
'CACHE',
'CURL_OPTS',
'DEBUG',
'RELOAD',
'SERVER',
]);
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
exports.build = async ({
workPath, files, entrypoint, meta, config,
}) => {
console.log('downloading files...');
await download(files, workPath, meta);
const distPath = join(workPath, 'dist');
const configEnv = Object.keys(config).reduce((o, v) => {
const name = snakeCase(v).toUpperCase();
if (allowedConfigImports.has(name)) {
o[`IMPORT_${name}`] = config[v]; // eslint-disable-line no-param-reassign
}
return o;
}, {});
if (config && config.import) {
Object.keys(config.import).forEach((key) => {
const name = snakeCase(key).toUpperCase();
// eslint-disable-next-line no-param-reassign
configEnv[`IMPORT_${name}`] = config.import[key];
});
}
const IMPORT_CACHE = `${distPath}/.import-cache`;
const env = Object.assign({}, process.env, configEnv, {
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
IMPORT_CACHE,
DIST: distPath,
BUILDER: __dirname,
ENTRYPOINT: entrypoint,
});
const builderPath = join(__dirname, 'builder.sh');
await execa(builderPath, [entrypoint], {
env,
cwd: workPath,
stdio: 'inherit',
});
const lambda = await createLambda({
files: await glob('**', distPath),
handler: entrypoint, // not actually used in `bootstrap`
runtime: 'provided',
environment: Object.assign({}, configEnv, {
SCRIPT_FILENAME: entrypoint,
}),
});
return {
[entrypoint]: lambda,
};
};
exports.shouldServe = shouldServe;

View File

@@ -1,28 +0,0 @@
{
"name": "@now/bash",
"version": "1.0.3",
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
"main": "index.js",
"author": "Nathan Rajlich <nate@zeit.co>",
"license": "MIT",
"homepage": "https://zeit.co/docs/v2/deployments/official-builders/bash-now-bash",
"repository": {
"type": "git",
"url": "https://github.com/zeit/now-builders.git",
"directory": "packages/now-bash"
},
"files": [
"builder.sh",
"runtime.sh",
"bootstrap",
"index.js",
"package.json"
],
"dependencies": {
"execa": "^1.0.0",
"snake-case": "^2.1.0"
},
"scripts": {
"test": "jest"
}
}

View File

@@ -1,119 +0,0 @@
#!/bin/bash
import "static-binaries@1.0.0"
static_binaries jq
# These get reset upon each request
_STATUS_CODE="$(mktemp)"
_HEADERS="$(mktemp)"
_lambda_runtime_api() {
local endpoint="$1"
shift
curl -sfLS "http://$AWS_LAMBDA_RUNTIME_API/2018-06-01/runtime/$endpoint" "$@"
}
_lambda_runtime_init() {
# Initialize user code
# shellcheck disable=SC1090
. "$SCRIPT_FILENAME" || {
local exit_code="$?"
local error_message="Initialization failed for '$SCRIPT_FILENAME' (exit code $exit_code)"
echo "$error_message" >&2
local error='{"errorMessage":"'"$error_message"'"}'
_lambda_runtime_api "init/error" -X POST -d "$error"
exit "$exit_code"
}
# Process events
while true; do _lambda_runtime_next; done
}
_lambda_runtime_next() {
echo 200 > "$_STATUS_CODE"
echo '{"content-type":"text/plain; charset=utf8"}' > "$_HEADERS"
local headers
headers="$(mktemp)"
# Get an event
local event
event="$(mktemp)"
_lambda_runtime_api invocation/next -D "$headers" | jq --raw-output --monochrome-output '.body' > "$event"
local request_id
request_id="$(grep -Fi Lambda-Runtime-Aws-Request-Id "$headers" | tr -d '[:space:]' | cut -d: -f2)"
rm -f "$headers"
# Execute the handler function from the script
local body
body="$(mktemp)"
# Stdin of the `handler` function is the HTTP request body.
# Need to use a fifo here instead of bash <() because Lambda
# errors with "/dev/fd/63 not found" for some reason :/
local stdin
stdin="$(mktemp -u)"
mkfifo "$stdin"
_lambda_runtime_body < "$event" > "$stdin" &
local exit_code=0
handler "$event" < "$stdin" > "$body" || exit_code="$?"
rm -f "$event" "$stdin"
if [ "$exit_code" -eq 0 ]; then
# Send the response
jq --raw-input --raw-output --compact-output --slurp --monochrome-output \
--arg statusCode "$(cat "$_STATUS_CODE")" \
--argjson headers "$(cat "$_HEADERS")" \
'{statusCode:$statusCode|tonumber, headers:$headers, encoding:"base64", body:.|@base64}' < "$body" \
| _lambda_runtime_api "invocation/$request_id/response" -X POST -d @- > /dev/null
rm -f "$body" "$_HEADERS"
else
local error_message="Invocation failed for 'handler' function in '$SCRIPT_FILENAME' (exit code $exit_code)"
echo "$error_message" >&2
_lambda_runtime_api "invocation/$request_id/error" -X POST -d '{"errorMessage":"'"$error_message"'"}' > /dev/null
fi
}
_lambda_runtime_body() {
local event
event="$(cat)"
if [ "$(jq --raw-output '.body | type' <<< "$event")" = "string" ]; then
if [ "$(jq --raw-output '.encoding' <<< "$event")" = "base64" ]; then
jq --raw-output '.body' <<< "$event" | base64 --decode
else
# assume plain-text body
jq --raw-output '.body' <<< "$event"
fi
fi
}
# Set the response status code.
http_response_code() {
echo "$1" > "$_STATUS_CODE"
}
# Sets a response header.
# Overrides existing header if it has already been set.
http_response_header() {
local name="$1"
local value="$2"
local tmp
tmp="$(mktemp)"
jq \
--arg name "$name" \
--arg value "$value" \
'.[$name] = $value' < "$_HEADERS" > "$tmp"
mv -f "$tmp" "$_HEADERS"
}
http_response_redirect() {
http_response_code "${2:-302}"
http_response_header "location" "$1"
}
http_response_json() {
http_response_header "content-type" "application/json; charset=utf8"
}

View File

@@ -1,3 +0,0 @@
handler() {
echo "cow:RANDOMNESS_PLACEHOLDER"
}

View File

@@ -1,11 +0,0 @@
{
"version": 2,
"builds": [
{ "src": "index.sh", "use": "@now/bash" },
{ "src": "subdirectory/index.sh", "use": "@now/bash" }
],
"probes": [
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
{ "path": "/subdirectory/", "mustContain": "yoda:RANDOMNESS_PLACEHOLDER" }
]
}

View File

@@ -1,3 +0,0 @@
handler() {
echo "yoda:RANDOMNESS_PLACEHOLDER"
}

View File

@@ -1,33 +0,0 @@
/* global beforeAll, expect, it, jest */
const fs = require('fs');
const path = require('path');
const {
packAndDeploy,
testDeployment,
} = require('../../../test/lib/deployment/test-deployment.js');
jest.setTimeout(4 * 60 * 1000);
const buildUtilsUrl = '@canary';
let builderUrl;
beforeAll(async () => {
const builderPath = path.resolve(__dirname, '..');
builderUrl = await packAndDeploy(builderPath);
console.log('builderUrl', builderUrl);
});
const fixturesPath = path.resolve(__dirname, 'fixtures');
// eslint-disable-next-line no-restricted-syntax
for (const fixture of fs.readdirSync(fixturesPath)) {
// eslint-disable-next-line no-loop-func
it(`should build ${fixture}`, async () => {
await expect(
testDeployment(
{ builderUrl, buildUtilsUrl },
path.join(fixturesPath, fixture),
),
).resolves.toBeDefined();
});
}

View File

@@ -1,6 +1,6 @@
{
"name": "@now/build-utils",
"version": "0.9.6",
"version": "0.9.12",
"license": "MIT",
"main": "./dist/index.js",
"types": "./dist/index.d.js",

View File

@@ -66,6 +66,10 @@ export function ignoreApiFilter(file: string) {
return false;
}
if (file.endsWith('.d.ts')) {
return false;
}
// If the file does not match any builder we also
// don't want to create a route e.g. `package.json`
if (API_BUILDERS.every(({ src }) => !minimatch(file, src))) {

View File

@@ -2,6 +2,16 @@ import { Route, Builder } from './types';
import { parse as parsePath } from 'path';
import { ignoreApiFilter, sortFiles } from './detect-builders';
function escapeName(name: string) {
const special = '[]^$.|?*+()'.split('');
for (const char of special) {
name = name.replace(new RegExp(`\\${char}`, 'g'), `\\${char}`);
}
return name;
}
function joinPath(...segments: string[]) {
const joinedPath = segments.join('/');
return joinedPath.replace(/\/{2,}/g, '/');
@@ -46,15 +56,32 @@ function createRouteFromPath(filePath: string): Route {
query.push(`${name}=$${counter++}`);
return `([^\\/]+)`;
} else if (isLast) {
const { name: fileName } = parsePath(segment);
return fileName;
const { name: fileName, ext } = parsePath(segment);
const isIndex = fileName === 'index';
const prefix = isIndex ? '\\/' : '';
const names = [
prefix,
prefix + escapeName(fileName),
prefix + escapeName(fileName) + escapeName(ext),
].filter(Boolean);
// Either filename with extension, filename without extension
// or nothing when the filename is `index`
return `(${names.join('|')})${isIndex ? '?' : ''}`;
}
return segment;
}
);
const src = `^/${srcParts.join('/')}$`;
const { name: fileName } = parsePath(filePath);
const isIndex = fileName === 'index';
const src = isIndex
? `^/${srcParts.slice(0, -1).join('/')}${srcParts.slice(-1)[0]}$`
: `^/${srcParts.join('/')}$`;
const dest = `/${filePath}${query.length ? '?' : ''}${query.join('&')}`;
return { src, dest };
@@ -232,6 +259,14 @@ async function detectApiRoutes(files: string[]): Promise<RoutesResult> {
defaultRoutes.push(createRouteFromPath(file));
}
// 404 Route to disable directory listing
if (defaultRoutes.length) {
defaultRoutes.push({
status: 404,
src: '/api(\\/.*)?$',
});
}
return { defaultRoutes, error: null };
}

View File

@@ -4,7 +4,7 @@ import path from 'path';
import spawn from 'cross-spawn';
import { SpawnOptions } from 'child_process';
import { deprecate } from 'util';
import { Meta, PackageJson, NodeVersion } from '../types';
import { Meta, PackageJson, NodeVersion, Config } from '../types';
import { getSupportedNodeVersion } from './node-version';
function spawnAsync(
@@ -75,13 +75,23 @@ export function getSpawnOptions(
export async function getNodeVersion(
destPath: string,
minNodeVersion?: string
minNodeVersion?: string,
config?: Config
): Promise<NodeVersion> {
const { packageJson } = await scanParentDirs(destPath, true);
const range =
(packageJson && packageJson.engines && packageJson.engines.node) ||
minNodeVersion;
return getSupportedNodeVersion(range, typeof minNodeVersion !== 'undefined');
let range: string | undefined;
let silent = false;
if (packageJson && packageJson.engines && packageJson.engines.node) {
range = packageJson.engines.node;
} else if (minNodeVersion) {
range = minNodeVersion;
silent = true;
} else if (config && config.zeroConfig) {
// Use latest node version zero config detected
range = '10.x';
silent = true;
}
return getSupportedNodeVersion(range, silent);
}
async function scanParentDirs(destPath: string, readPackageJson = false) {

View File

@@ -0,0 +1 @@
now.json

View File

@@ -0,0 +1,3 @@
module.exports = (req, res) => {
res.end(req.query.endpoint);
};

View File

@@ -0,0 +1,3 @@
module.exports = (req, res) => {
res.end(`${req.query.endpoint}/${req.query.id}`);
};

View File

@@ -0,0 +1,5 @@
{
"scripts": {
"build": "mkdir -p public && echo 'hello from index.txt' > public/index.txt"
}
}

View File

@@ -0,0 +1 @@
now.json

View File

@@ -0,0 +1,3 @@
module.exports = (req, res) => {
res.end('hello from api/date.js');
};

View File

@@ -0,0 +1,3 @@
module.exports = (req, res) => {
res.end('hello from api/date/index.js');
};

View File

@@ -0,0 +1,3 @@
module.exports = (req, res) => {
res.end('hello from api/index.js');
};

View File

@@ -0,0 +1,5 @@
{
"scripts": {
"build": "mkdir -p public && echo 'hello from index.txt' > public/index.txt"
}
}

View File

@@ -4,21 +4,21 @@ const fs = require('fs-extra');
// eslint-disable-next-line import/no-extraneous-dependencies
const execa = require('execa');
const assert = require('assert');
const { glob, download } = require('../');
const { createZip } = require('../dist/lambda');
const {
glob, download, detectBuilders, detectRoutes,
} = require('../');
const {
getSupportedNodeVersion,
defaultSelection,
} = require('../dist/fs/node-version');
const {
packAndDeploy,
testDeployment,
} = require('../../../test/lib/deployment/test-deployment.js');
const { detectBuilders, detectRoutes } = require('../dist');
} = require('../../../test/lib/deployment/test-deployment');
jest.setTimeout(4 * 60 * 1000);
const builderUrl = '@canary';
let buildUtilsUrl;
@@ -152,6 +152,11 @@ const fixturesPath = path.resolve(__dirname, 'fixtures');
// eslint-disable-next-line no-restricted-syntax
for (const fixture of fs.readdirSync(fixturesPath)) {
if (fixture.includes('zero-config')) {
// Those have separate tests
continue; // eslint-disable-line no-continue
}
// eslint-disable-next-line no-loop-func
it(`should build ${fixture}`, async () => {
await expect(
@@ -454,9 +459,11 @@ it('Test `detectRoutes`', async () => {
const { builders } = await detectBuilders(files);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(defaultRoutes.length).toBe(2);
expect(defaultRoutes.length).toBe(3);
expect(defaultRoutes[0].dest).toBe('/api/team.js');
expect(defaultRoutes[1].dest).toBe('/api/user.go');
expect(defaultRoutes[2].dest).not.toBeDefined();
expect(defaultRoutes[2].status).toBe(404);
}
{
@@ -483,12 +490,21 @@ it('Test `detectRoutes`', async () => {
expect(error.code).toBe('conflicting_path_segment');
}
{
const files = ['api/date/index.js', 'api/date/index.go'];
const { builders } = await detectBuilders(files);
const { defaultRoutes, error } = await detectRoutes(files, builders);
expect(defaultRoutes).toBe(null);
expect(error.code).toBe('conflicting_file_path');
}
{
const files = ['api/[endpoint].js', 'api/[endpoint]/[id].js'];
const { builders } = await detectBuilders(files);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(defaultRoutes.length).toBe(2);
expect(defaultRoutes.length).toBe(3);
}
{
@@ -500,9 +516,11 @@ it('Test `detectRoutes`', async () => {
const { builders } = await detectBuilders(files);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(defaultRoutes[2].src).toBe('/(.*)');
expect(defaultRoutes[2].dest).toBe('/public/$1');
expect(defaultRoutes.length).toBe(3);
expect(defaultRoutes[2].status).toBe(404);
expect(defaultRoutes[2].src).toBe('/api(\\/.*)?$');
expect(defaultRoutes[3].src).toBe('/(.*)');
expect(defaultRoutes[3].dest).toBe('/public/$1');
expect(defaultRoutes.length).toBe(4);
}
{
@@ -514,6 +532,210 @@ it('Test `detectRoutes`', async () => {
const { builders } = await detectBuilders(files, pkg);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(defaultRoutes[1].status).toBe(404);
expect(defaultRoutes[1].src).toBe('/api(\\/.*)?$');
expect(defaultRoutes.length).toBe(2);
}
{
const files = ['public/index.html'];
const { builders } = await detectBuilders(files);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(defaultRoutes.length).toBe(1);
}
{
const files = ['api/date/index.js', 'api/date.js'];
const { builders } = await detectBuilders(files);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(defaultRoutes.length).toBe(3);
expect(defaultRoutes[0].src).toBe(
'^/api/date(\\/|\\/index|\\/index\\.js)?$',
);
expect(defaultRoutes[0].dest).toBe('/api/date/index.js');
expect(defaultRoutes[1].src).toBe('^/api/(date|date\\.js)$');
expect(defaultRoutes[1].dest).toBe('/api/date.js');
}
{
const files = ['api/date.js', 'api/[date]/index.js'];
const { builders } = await detectBuilders(files);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(defaultRoutes.length).toBe(3);
expect(defaultRoutes[0].src).toBe(
'^/api/([^\\/]+)(\\/|\\/index|\\/index\\.js)?$',
);
expect(defaultRoutes[0].dest).toBe('/api/[date]/index.js?date=$1');
expect(defaultRoutes[1].src).toBe('^/api/(date|date\\.js)$');
expect(defaultRoutes[1].dest).toBe('/api/date.js');
}
{
const files = [
'api/index.ts',
'api/index.d.ts',
'api/users/index.ts',
'api/users/index.d.ts',
'api/food.ts',
'api/ts/gold.ts',
];
const { builders } = await detectBuilders(files);
const { defaultRoutes } = await detectRoutes(files, builders);
expect(builders.length).toBe(4);
expect(builders[0].use).toBe('@now/node');
expect(builders[1].use).toBe('@now/node');
expect(builders[2].use).toBe('@now/node');
expect(builders[3].use).toBe('@now/node');
expect(defaultRoutes.length).toBe(5);
}
});
it('Test `detectBuilders` and `detectRoutes`', async () => {
const fixture = path.join(__dirname, 'fixtures', '01-zero-config-api');
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
const fileList = await glob('**', fixture);
const files = Object.keys(fileList);
const probes = [
{
path: '/api/my-endpoint',
mustContain: 'my-endpoint',
status: 200,
},
{
path: '/api/other-endpoint',
mustContain: 'other-endpoint',
status: 200,
},
{
path: '/api/team/zeit',
mustContain: 'team/zeit',
status: 200,
},
{
path: '/api/user/myself',
mustContain: 'user/myself',
status: 200,
},
{
path: '/api/not-okay/',
status: 404,
},
{
path: '/api',
status: 404,
},
{
path: '/api/',
status: 404,
},
{
path: '/',
mustContain: 'hello from index.txt',
},
];
const { builders } = await detectBuilders(files, pkg);
const { defaultRoutes } = await detectRoutes(files, builders);
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
await fs.writeFile(
path.join(fixture, 'now.json'),
JSON.stringify(nowConfig, null, 2),
);
const deployment = await testDeployment(
{ builderUrl, buildUtilsUrl },
fixture,
);
expect(deployment).toBeDefined();
});
it('Test `detectBuilders` and `detectRoutes` with `index` files', async () => {
const fixture = path.join(__dirname, 'fixtures', '02-zero-config-api');
const pkg = await fs.readJSON(path.join(fixture, 'package.json'));
const fileList = await glob('**', fixture);
const files = Object.keys(fileList);
const probes = [
{
path: '/api/not-okay',
status: 404,
},
{
path: '/api',
mustContain: 'hello from api/index.js',
status: 200,
},
{
path: '/api/',
mustContain: 'hello from api/index.js',
status: 200,
},
{
path: '/api/index',
mustContain: 'hello from api/index.js',
status: 200,
},
{
path: '/api/index.js',
mustContain: 'hello from api/index.js',
status: 200,
},
{
path: '/api/date.js',
mustContain: 'hello from api/date.js',
status: 200,
},
{
// Someone might expect this to be `date.js`,
// but I doubt that there is any case were both
// `date/index.js` and `date.js` exists,
// so it is not special cased
path: '/api/date',
mustContain: 'hello from api/date/index.js',
status: 200,
},
{
path: '/api/date/',
mustContain: 'hello from api/date/index.js',
status: 200,
},
{
path: '/api/date/index',
mustContain: 'hello from api/date/index.js',
status: 200,
},
{
path: '/api/date/index.js',
mustContain: 'hello from api/date/index.js',
status: 200,
},
{
path: '/',
mustContain: 'hello from index.txt',
},
];
const { builders } = await detectBuilders(files, pkg);
const { defaultRoutes } = await detectRoutes(files, builders);
const nowConfig = { builds: builders, routes: defaultRoutes, probes };
await fs.writeFile(
path.join(fixture, 'now.json'),
JSON.stringify(nowConfig, null, 2),
);
const deployment = await testDeployment(
{ builderUrl, buildUtilsUrl },
fixture,
);
expect(deployment).toBeDefined();
});

View File

@@ -1,4 +1,5 @@
node_modules
dist
*.log
/go
/analyze

View File

@@ -1,5 +0,0 @@
*.ts
test
tsconfig.json
package-lock.json
yarn.lock

4
packages/now-go/build.sh Executable file
View File

@@ -0,0 +1,4 @@
ncc build index.ts -o dist
ncc build install.ts -o dist/install
mv dist/install/index.js dist/install.js
rm -rf dist/install

View File

@@ -106,7 +106,7 @@ Learn more: https://github.com/golang/go/wiki/Modules
if (!analyzed) {
const err = new Error(
`Could not find an exported function in "${entrypoint}"
Learn more: https://zeit.co/docs/v2/deployments/official-builders/go-now-go/#entrypoint
Learn more: https://zeit.co/docs/v2/advanced/builders/#go
`
);
console.log(err.message);

View File

@@ -1,7 +1,8 @@
{
"name": "@now/go",
"version": "0.5.7",
"version": "0.5.10",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://zeit.co/docs/v2/deployments/official-builders/go-now-go",
"repository": {
"type": "git",
@@ -9,30 +10,25 @@
"directory": "packages/now-go"
},
"scripts": {
"build": "tsc",
"test": "tsc && jest",
"prepublish": "tsc",
"now-postinstall": "node install.js"
"build": "./build.sh",
"test": "./build.sh && jest",
"prepublish": "./build.sh",
"now-postinstall": "node dist/install.js"
},
"files": [
"*.js",
"main.go",
"main__mod__.go",
"util"
"dist"
],
"dependencies": {
"debug": "^4.1.1",
"execa": "^1.0.0",
"fs-extra": "^7.0.0",
"node-fetch": "^2.2.1",
"tar": "4.4.6"
},
"devDependencies": {
"@types/debug": "^4.1.3",
"@types/execa": "^0.9.0",
"@types/fs-extra": "^5.0.5",
"@types/node-fetch": "^2.3.0",
"@types/tar": "^4.0.0",
"debug": "^4.1.1",
"execa": "^1.0.0",
"fs-extra": "^7.0.0",
"node-fetch": "^2.2.1",
"tar": "4.4.6",
"typescript": "3.5.2"
}
}

View File

@@ -0,0 +1,9 @@
package handler
import (
"fmt"
"net/http"
)
// Handler is cool
func Handler(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "ONE:RANDOMNESS_PLACEHOLDER")
}

View File

@@ -0,0 +1,9 @@
package handler
import (
"fmt"
"net/http"
)
// Handler is cool
func Handler(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "TWO:RANDOMNESS_PLACEHOLDER")
}

View File

@@ -0,0 +1,3 @@
module go-example
go 1.12

View File

@@ -0,0 +1,25 @@
{
"version": 2,
"builds": [
{
"src": "/api/go-one/one.go",
"use": "@now/go",
"config": { "parallel": true }
},
{
"src": "/api/go-two/two.go",
"use": "@now/go",
"config": { "parallel": true }
}
],
"probes": [
{
"path": "/api/go-one/one.go",
"mustContain": "ONE:RANDOMNESS_PLACEHOLDER"
},
{
"path": "/api/go-two/two.go",
"mustContain": "TWO:RANDOMNESS_PLACEHOLDER"
}
]
}

View File

@@ -1,6 +1,6 @@
{
"name": "@now/next",
"version": "0.5.8",
"version": "0.6.0",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://zeit.co/docs/v2/deployments/official-builders/next-js-now-next",
@@ -21,6 +21,7 @@
"@types/next-server": "8.0.0",
"@types/resolve-from": "5.0.1",
"@types/semver": "6.0.0",
"@zeit/node-file-trace": "0.2.9",
"fs-extra": "7.0.0",
"get-port": "5.0.0",
"resolve-from": "5.0.0",

View File

@@ -1,13 +1,16 @@
import path from 'path';
import fs from 'fs-extra';
import path from 'path';
import semver from 'semver';
function getCustomData(importName: string) {
import { ExperimentalTraceVersion } from './utils';
function getCustomData(importName: string, target: string) {
return `
module.exports = function(...args) {
let original = require('./${importName}');
const finalConfig = {};
const target = { target: 'serverless' };
const target = { target: '${target}' };
if (typeof original === 'function' && original.constructor.name === 'AsyncFunction') {
// AsyncFunctions will become promises
@@ -37,14 +40,29 @@ function getDefaultData() {
return `module.exports = { target: 'serverless' };`;
}
export default async function createServerlessConfig(workPath: string) {
export default async function createServerlessConfig(
workPath: string,
nextVersion: string | undefined
) {
let target = 'serverless';
if (nextVersion) {
try {
if (
nextVersion.includes('canary') &&
semver.satisfies(nextVersion, `>=${ExperimentalTraceVersion}`)
) {
target = 'experimental-serverless-trace';
}
} catch (_ignored) {}
}
const configPath = path.join(workPath, 'next.config.js');
const backupConfigName = `next.config.original.${Date.now()}.js`;
const backupConfigPath = path.join(workPath, backupConfigName);
if (fs.existsSync(configPath)) {
await fs.rename(configPath, backupConfigPath);
await fs.writeFile(configPath, getCustomData(backupConfigName));
await fs.writeFile(configPath, getCustomData(backupConfigName, target));
} else {
await fs.writeFile(configPath, getDefaultData());
}

View File

@@ -7,10 +7,12 @@ import {
} from 'fs-extra';
import os from 'os';
import path from 'path';
import resolveFrom from 'resolve-from';
import semver from 'semver';
import {
BuildOptions,
Config,
createLambda,
download,
FileBlob,
@@ -21,16 +23,18 @@ import {
glob,
Lambda,
PrepareCacheOptions,
Route,
runNpmInstall,
runPackageJsonScript,
Route,
} from '@now/build-utils';
import nodeFileTrace from '@zeit/node-file-trace';
import createServerlessConfig from './create-serverless-config';
import nextLegacyVersions from './legacy-versions';
import {
EnvConfig,
excludeFiles,
ExperimentalTraceVersion,
filesFromDirectory,
getDynamicRoutes,
getNextConfig,
@@ -156,6 +160,7 @@ export const build = async ({
files,
workPath,
entrypoint,
config = {} as Config,
meta = {} as BuildParamsMeta,
}: BuildParamsType): Promise<{
routes: Route[];
@@ -175,11 +180,7 @@ export const build = async ({
const pkg = await readPackageJson(entryPath);
const nextVersion = getNextVersion(pkg);
if (!meta.isDev) {
await createServerlessConfig(workPath);
}
const nodeVersion = await getNodeVersion(entryPath);
const nodeVersion = await getNodeVersion(entryPath, undefined, config);
const spawnOpts = getSpawnOptions(meta, nodeVersion);
if (!nextVersion) {
@@ -283,6 +284,20 @@ export const build = async ({
console.log('installing dependencies...');
await runNpmInstall(entryPath, ['--prefer-offline'], spawnOpts);
let realNextVersion: string | undefined;
try {
realNextVersion = require(resolveFrom(entryPath, 'next/package.json'))
.version;
console.log(`detected Next.js version: ${realNextVersion}`);
} catch (_ignored) {
console.warn(`could not identify real Next.js version, that's OK!`);
}
if (!isLegacy) {
await createServerlessConfig(workPath, realNextVersion);
}
console.log('running user script...');
const memoryToConsume = Math.floor(os.totalmem() / 1024 ** 2) - 128;
const env = { ...spawnOpts.env } as any;
@@ -393,14 +408,6 @@ export const build = async ({
);
} else {
console.log('preparing lambda files...');
const launcherFiles = {
'now__bridge.js': new FileFsRef({
fsPath: path.join(__dirname, 'now__bridge.js'),
}),
'now__launcher.js': new FileFsRef({
fsPath: path.join(__dirname, 'launcher.js'),
}),
};
const pagesDir = path.join(entryPath, '.next', 'serverless', 'pages');
const pages = await glob('**/*.js', pagesDir);
@@ -439,18 +446,77 @@ export const build = async ({
);
}
// An optional assets folder that is placed alongside every page entrypoint
const assets = await glob(
'assets/**',
path.join(entryPath, '.next', 'serverless')
);
const assetKeys = Object.keys(assets);
if (assetKeys.length > 0) {
console.log('detected assets to be bundled with lambda:');
assetKeys.forEach(assetFile => console.log(`\t${assetFile}`));
// Assume tracing to be safe, bail if we know we don't need it.
let requiresTracing = true;
try {
if (
realNextVersion &&
semver.satisfies(realNextVersion, `<${ExperimentalTraceVersion}`)
) {
if (config.debug) {
console.log(
'Next.js version is too old for us to trace the required dependencies.\n' +
'Assuming Next.js has handled it!'
);
}
requiresTracing = false;
}
} catch (err) {
if (config.debug) {
console.log(
'Failed to check Next.js version for tracing compatibility: ' + err
);
}
}
let assets:
| {
[filePath: string]: FileFsRef;
}
| undefined;
const tracedFiles: {
[filePath: string]: FileFsRef;
} = {};
if (requiresTracing) {
const tracingLabel = 'Tracing Next.js lambdas for external files ...';
console.time(tracingLabel);
const { fileList } = await nodeFileTrace(
Object.keys(pages).map(page => pages[page].fsPath),
{ base: workPath }
);
if (config.debug) {
console.log(`node-file-trace result for pages: ${fileList}`);
}
fileList.forEach(file => {
tracedFiles[file] = new FileFsRef({
fsPath: path.join(workPath, file),
});
});
console.timeEnd(tracingLabel);
} else {
// An optional assets folder that is placed alongside every page
// entrypoint.
// This is a legacy feature that was needed before we began tracing
// lambdas.
assets = await glob(
'assets/**',
path.join(entryPath, '.next', 'serverless')
);
const assetKeys = Object.keys(assets!);
if (assetKeys.length > 0) {
console.log('detected (legacy) assets to be bundled with lambda:');
assetKeys.forEach(assetFile => console.log(`\t${assetFile}`));
console.log(
'\nPlease upgrade to Next.js 9.1 to leverage modern asset handling.'
);
}
}
const launcherPath = path.join(__dirname, 'templated-launcher.js');
const launcherData = await readFile(launcherPath, 'utf8');
await Promise.all(
pageKeys.map(async page => {
// These default pages don't have to be handled as they'd always 404
@@ -464,17 +530,34 @@ export const build = async ({
dynamicPages.push(normalizePage(pathname));
}
console.log(`Creating lambda for page: "${page}"...`);
const label = `Creating lambda for page: "${page}"...`;
console.time(label);
const pageFileName = path.relative(workPath, pages[page].fsPath);
const launcher = launcherData.replace(
/__LAUNCHER_PAGE_PATH__/g,
JSON.stringify(
requiresTracing ? path.join('./', pageFileName) : './page'
)
);
const launcherFiles = {
'now__bridge.js': new FileFsRef({
fsPath: path.join(__dirname, 'now__bridge.js'),
}),
'now__launcher.js': new FileBlob({ data: launcher }),
};
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
files: {
...launcherFiles,
...assets,
'page.js': pages[page],
...tracedFiles,
[requiresTracing ? pageFileName : 'page.js']: pages[page],
},
handler: 'now__launcher.launcher',
runtime: nodeVersion.runtime,
});
console.log(`Created lambda for page: "${page}"`);
console.timeEnd(label);
})
);
}

View File

@@ -5,7 +5,8 @@ if (!process.env.NODE_ENV) {
import { Server } from 'http';
import { Bridge } from './now__bridge';
const page = require('./page');
// @ts-ignore
const page = require(__LAUNCHER_PAGE_PATH__);
// page.render is for React rendering
// page.default is for /api rendering

View File

@@ -356,6 +356,8 @@ function syncEnvVars(base: EnvConfig, removeEnv: EnvConfig, addEnv: EnvConfig) {
Object.assign(base, addEnv);
}
export const ExperimentalTraceVersion = `9.0.4-canary.1`;
export {
excludeFiles,
validateEntrypoint,

View File

@@ -0,0 +1,5 @@
module.exports = {
generateBuildId() {
return 'testing-build-id';
},
};

View File

@@ -0,0 +1,8 @@
{
"version": 2,
"builds": [{ "src": "package.json", "use": "@now/next" }],
"probes": [
{ "path": "/nested/fb", "mustContain": "Hello Firebase: <!-- -->0" },
{ "path": "/nested/moar/fb", "mustContain": "Hello Firebase: <!-- -->0" }
]
}

View File

@@ -0,0 +1,8 @@
{
"dependencies": {
"next": "canary",
"react": "^16.8.6",
"react-dom": "^16.8.6",
"firebase": "6.3.4"
}
}

View File

@@ -0,0 +1,19 @@
import firebase from 'firebase/app';
import 'firebase/firestore';
if (!firebase.apps.length) {
firebase.initializeApp({ projectId: 'noop' });
}
const store = firebase.firestore();
const Comp = ({ results }) => {
return <div>Hello Firebase: {results}</div>;
};
Comp.getInitialProps = async () => {
const query = await store.collection('users').get();
return { results: query.size };
};
export default Comp;

View File

@@ -0,0 +1,19 @@
import firebase from 'firebase/app';
import 'firebase/firestore';
if (!firebase.apps.length) {
firebase.initializeApp({ projectId: 'noop' });
}
const store = firebase.firestore();
const Comp = ({ results }) => {
return <div>Hello Firebase: {results}</div>;
};
Comp.getInitialProps = async () => {
const query = await store.collection('users').get();
return { results: query.size };
};
export default Comp;

View File

@@ -0,0 +1,5 @@
module.exports = {
generateBuildId() {
return 'testing-build-id';
},
};

View File

@@ -0,0 +1,8 @@
{
"version": 2,
"builds": [{ "src": "package.json", "use": "@now/next" }],
"probes": [
{ "path": "/hello1", "mustContain": "Hello World 1" },
{ "path": "/nested/hello2", "mustContain": "Hello World 2" }
]
}

View File

@@ -0,0 +1,7 @@
{
"dependencies": {
"next": "8.x.x",
"react": "^16.8.6",
"react-dom": "^16.8.6"
}
}

View File

@@ -0,0 +1,7 @@
function A({ data }) {
return <div>{data}</div>;
}
A.getInitialProps = () => ({ data: 'Hello World 1' });
export default A;

View File

@@ -0,0 +1,7 @@
function B({ data }) {
return <div>{data}</div>;
}
B.getInitialProps = () => ({ data: 'Hello World 2' });
export default B;

View File

@@ -1,6 +1,6 @@
{
"name": "@now/node",
"version": "0.12.3",
"version": "0.12.6",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://zeit.co/docs/v2/deployments/official-builders/node-js-now-node",
@@ -28,7 +28,7 @@
"@types/etag": "1.8.0",
"@types/test-listen": "1.1.0",
"@zeit/ncc": "0.20.4",
"@zeit/node-file-trace": "0.2.6",
"@zeit/node-file-trace": "0.2.9",
"content-type": "1.0.4",
"cookie": "0.4.0",
"etag": "1.8.1",

View File

@@ -16,6 +16,7 @@ import {
PrepareCacheOptions,
BuildOptions,
shouldServe,
Config,
} from '@now/build-utils';
export { NowRequest, NowResponse } from './types';
import { makeLauncher } from './launcher';
@@ -32,6 +33,7 @@ interface DownloadOptions {
files: Files;
entrypoint: string;
workPath: string;
config: Config;
meta: Meta;
}
@@ -53,6 +55,7 @@ async function downloadInstallAndBundle({
files,
entrypoint,
workPath,
config,
meta,
}: DownloadOptions) {
console.log('downloading user files...');
@@ -63,7 +66,11 @@ async function downloadInstallAndBundle({
console.log("installing dependencies for user's code...");
const installTime = Date.now();
const entrypointFsDirname = join(workPath, dirname(entrypoint));
const nodeVersion = await getNodeVersion(entrypointFsDirname);
const nodeVersion = await getNodeVersion(
entrypointFsDirname,
undefined,
config
);
const spawnOpts = getSpawnOptions(meta, nodeVersion);
await runNpmInstall(entrypointFsDirname, ['--prefer-offline'], spawnOpts);
console.log(`install complete [${Date.now() - installTime}ms]`);
@@ -232,7 +239,12 @@ async function compile(
}
}
// Rename .ts -> .js (except for entry)
if (path !== entrypoint && tsCompiled.has(path)) {
// There is a bug on Windows where entrypoint uses forward slashes
// and workPath uses backslashes so we use resolve before comparing.
if (
resolve(workPath, path) !== resolve(workPath, entrypoint) &&
tsCompiled.has(path)
) {
preparedFiles[
path.slice(0, -3 - Number(path.endsWith('x'))) + '.js'
] = entry;
@@ -297,6 +309,7 @@ export async function build({
files,
entrypoint,
workPath,
config,
meta,
});

View File

@@ -8,7 +8,7 @@ import _ts from 'typescript';
*/
/**
* Debugging `ts-node`.
* Debugging.
*/
const shouldDebug = false;
const debug = shouldDebug

View File

@@ -1 +0,0 @@
/build

View File

@@ -1,20 +0,0 @@
FROM library/centos:6.8
RUN yum -y install wget git
RUN rpm -Uvh https://mirror.webtatic.com/yum/el6/latest.rpm
RUN yum -y install php71w-cli php71w-fpm php71w-mbstring php71w-mysql php71w-opcache
RUN yum -y install epel-release
RUN yum -y install patchelf
RUN mkdir -p /root/app/public
WORKDIR /root/app
COPY ./php.ini /root/app/php.ini
COPY ./php-fpm.ini /root/app/php-fpm.ini
COPY ./test.php /root/app/test.php
COPY ./test.sh /root/app/test.sh
RUN patchelf --set-rpath '$ORIGIN' /usr/bin/php
RUN patchelf --set-rpath '$ORIGIN' /usr/sbin/php-fpm
RUN patchelf --set-rpath '$ORIGIN' /usr/lib64/php/modules/mysqli.so
CMD ["/bin/bash", "test.sh"]

View File

@@ -1,15 +0,0 @@
rm -rf ../native
mkdir -p ../native/modules
docker rmi now-php-docker-image --force
docker build . -t now-php-docker-image
docker run now-php-docker-image
docker run now-php-docker-image /bin/cat /usr/sbin/php-fpm > ../native/php-fpm
docker run now-php-docker-image /bin/cat /root/app/php.ini > ../native/php.ini
docker run now-php-docker-image /bin/cat /root/app/php-fpm.ini > ../native/php-fpm.ini
docker run now-php-docker-image /bin/cat /usr/lib64/php/modules/curl.so > ../native/modules/curl.so
docker run now-php-docker-image /bin/cat /usr/lib64/php/modules/json.so > ../native/modules/json.so
docker run now-php-docker-image /bin/cat /usr/lib64/php/modules/mbstring.so > ../native/modules/mbstring.so
docker run now-php-docker-image /bin/cat /usr/lib64/php/modules/mysqli.so > ../native/modules/mysqli.so
docker run now-php-docker-image /bin/cat /usr/lib64/mysql/libmysqlclient.so.16 > ../native/modules/libmysqlclient.so.16
docker run now-php-docker-image /bin/cat /usr/lib64/php/modules/opcache.so > ../native/modules/opcache.so
chmod +x ../native/php-fpm

View File

@@ -1,9 +0,0 @@
[global]
error_log=/tmp/fpm-error.log
[www]
listen=0.0.0.0:9000
pm=static
pm.max_children=1
catch_workers_output=yes
clear_env=no

View File

@@ -1,8 +0,0 @@
extension=/root/app/modules/curl.so
extension=/root/app/modules/json.so
extension=/root/app/modules/mbstring.so
extension=/root/app/modules/mysqli.so
zend_extension=/root/app/modules/opcache.so
opcache.enable_cli=1
mysqli.max_links=10
mysqli.max_persistent=10

View File

@@ -1,4 +0,0 @@
<?php
mysqli_connect();
print('php_sapi_name=' . php_sapi_name() . PHP_EOL);
print('opcache_enabled=' . opcache_get_status()['opcache_enabled'] . PHP_EOL);

View File

@@ -1,18 +0,0 @@
mkdir -p /root/app/modules
cp /usr/bin/php /root/app/php
cp /usr/sbin/php-fpm /root/app/php-fpm
cp /usr/lib64/php/modules/curl.so /root/app/modules/curl.so
cp /usr/lib64/php/modules/json.so /root/app/modules/json.so
cp /usr/lib64/php/modules/mbstring.so /root/app/modules/mbstring.so
cp /usr/lib64/php/modules/mysqli.so /root/app/modules/mysqli.so
cp /usr/lib64/mysql/libmysqlclient.so.16 /root/app/modules/libmysqlclient.so.16
cp /usr/lib64/php/modules/opcache.so /root/app/modules/opcache.so
rm -rf $(which php)
rm -rf $(which php-fpm)
rm -rf /usr/lib64/php
rm -rf /usr/lib64/mysql
rm -rf /etc/php.d
./php-fpm --help
./php -c php.ini test.php
echo "if you see 'can't connect to local mysql' - it is good - mysql library is found and used"
echo "if you see 'call to undefined function mysqli_connect' - it is bad - something went wrong"

View File

@@ -1,149 +0,0 @@
/* eslint-disable no-bitwise,no-use-before-define */
const assert = require('assert');
const { freeParser } = require('_http_common');
const { spawn } = require('child_process');
const createConnection = require('./connection.js');
const { MSG_TYPE, PROTOCOL_STATUS } = require('./consts.js');
const { whenPortOpens } = require('./port.js');
const { HTTPParser } = process.binding('http_parser');
const BEGIN_REQUEST_DATA_KEEP_CONN = Buffer.from('\0\x01\x01\0\0\0\0\0'); // FCGI_ROLE_RESPONDER && FCGI_KEEP_CONN
const MESSAGE_FCGI_STDOUT = `message-${MSG_TYPE.FCGI_STDOUT}`;
const MESSAGE_FCGI_STDERR = `message-${MSG_TYPE.FCGI_STDERR}`;
const MESSAGE_FCGI_END_REQUEST = `message-${MSG_TYPE.FCGI_END_REQUEST}`;
let curReqId = 0;
let connection;
async function startPhp() {
assert(!connection);
const child = spawn(
'./php-fpm',
['-c', 'php.ini', '--fpm-config', 'php-fpm.ini', '--nodaemonize'],
{
stdio: 'inherit',
cwd: '/var/task/native',
},
);
child.on('exit', () => {
console.error('php exited');
process.exit(1);
});
child.on('error', (error) => {
console.error(error);
process.exit(1);
});
await whenPortOpens(9000, 400);
const newConnection = createConnection({
_host: '127.0.0.1',
_port: 9000,
});
await new Promise((resolve, reject) => {
function onError() {
cleanup();
reject();
}
function onConnect() {
connection = newConnection;
cleanup();
resolve();
}
newConnection.on('error', onError);
newConnection.on('connect', onConnect);
function cleanup() {
newConnection.removeListener('error', onError);
newConnection.removeListener('connect', onConnect);
}
});
}
async function query({ params, stdin }) {
if (!connection) {
await startPhp();
}
return new Promise((resolve) => {
assert(connection);
const chunks = [Buffer.from('HTTP/1.1 200 MAKES-PARSER-WORK\n')];
function onError(error) {
console.error(error);
process.exit(1);
}
function onStdout(reqId, data) {
chunks.push(data);
}
function onStderr(reqId, data) {
console.error(data.toString().trim());
}
function onEndRequest(reqId, data) {
const protocolStatus = data.readUInt8(4, true);
if (protocolStatus !== PROTOCOL_STATUS.FCGI_REQUEST_COMPLETE) {
console.error('protocolStatus', protocolStatus);
process.exit(1);
}
const response = Buffer.concat(chunks);
const parser = new HTTPParser(HTTPParser.RESPONSE);
let tuples = [];
parser[HTTPParser.kOnHeadersComplete | 0] = (major, minor, t) => {
tuples = t;
};
let body;
parser[HTTPParser.kOnBody | 0] = (b, start, len) => {
body = b.slice(start, start + len);
};
parser.execute(response);
freeParser(parser);
cleanup();
resolve({ tuples, body });
}
connection.on('error', onError);
connection.on(MESSAGE_FCGI_STDOUT, onStdout);
connection.on(MESSAGE_FCGI_STDERR, onStderr);
connection.on(MESSAGE_FCGI_END_REQUEST, onEndRequest);
function cleanup() {
connection.removeListener('error', onError);
connection.removeListener(MESSAGE_FCGI_STDOUT, onStdout);
connection.removeListener(MESSAGE_FCGI_STDERR, onStderr);
connection.removeListener(MESSAGE_FCGI_END_REQUEST, onEndRequest);
}
curReqId += 1;
// TODO these things have callbacks. what to do with them?
connection.send(
MSG_TYPE.FCGI_BEGIN_REQUEST,
curReqId,
BEGIN_REQUEST_DATA_KEEP_CONN,
);
connection.send(MSG_TYPE.FCGI_PARAMS, curReqId, params);
connection.send(MSG_TYPE.FCGI_PARAMS, curReqId, null);
if (stdin) connection.send(MSG_TYPE.FCGI_STDIN, curReqId, stdin);
connection.send(MSG_TYPE.FCGI_STDIN, curReqId, null);
});
}
module.exports = {
query,
};
/*
(async function() {
console.log(await query({ params: {
REQUEST_METHOD: 'GET', SCRIPT_FILENAME: '/phpinfo.php'
} }));
})();
*/

View File

@@ -1,41 +0,0 @@
/* eslint-disable consistent-return */
const net = require('net');
function whenPortOpensCallback(port, attempts, cb) {
const client = net.connect(port, '127.0.0.1');
client.on('error', (error) => {
if (!attempts) return cb(error);
setTimeout(() => {
whenPortOpensCallback(port, attempts - 1, cb);
}, 50);
});
client.on('connect', () => {
client.destroy();
cb();
});
}
function isPortOpen(port) {
return new Promise((resolve) => {
whenPortOpensCallback(port, 0, (error) => {
if (error) return resolve(false);
resolve(true);
});
});
}
function whenPortOpens(port, attempts) {
return new Promise((resolve, reject) => {
whenPortOpensCallback(port, attempts, (error) => {
if (error) return reject(error);
resolve();
});
});
}
module.exports = {
isPortOpen,
whenPortOpensCallback,
whenPortOpens,
};

View File

@@ -1,43 +0,0 @@
const FileBlob = require('@now/build-utils/file-blob.js'); // eslint-disable-line import/no-extraneous-dependencies
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
const path = require('path');
async function getFiles() {
const files = await glob('native/**', __dirname);
const phpConfig = await FileBlob.fromStream({
stream: files['native/php.ini'].toStream(),
});
phpConfig.data = phpConfig.data
.toString()
.replace(/\/root\/app\/modules/g, '/var/task/native/modules');
files['native/php.ini'] = phpConfig;
Object.assign(files, {
'fastcgi/connection.js': new FileFsRef({
fsPath: require.resolve('fastcgi-client/lib/connection.js'),
}),
'fastcgi/consts.js': new FileFsRef({
fsPath: require.resolve('fastcgi-client/lib/consts.js'),
}),
'fastcgi/stringifykv.js': new FileFsRef({
fsPath: require.resolve('fastcgi-client/lib/stringifykv.js'),
}),
'fastcgi/index.js': new FileFsRef({
fsPath: path.join(__dirname, 'fastcgi/index.js'),
}),
'fastcgi/port.js': new FileFsRef({
fsPath: path.join(__dirname, 'fastcgi/port.js'),
}),
'launcher.js': new FileFsRef({
fsPath: path.join(__dirname, 'launcher.js'),
}),
});
return files;
}
module.exports = {
getFiles,
};

View File

@@ -1,150 +0,0 @@
/* eslint-disable prefer-template */
const assert = require('assert');
const fs = require('fs');
const { join: pathJoin } = require('path');
const { parse: parseUrl } = require('url');
const { query } = require('./fastcgi/index.js');
function normalizeEvent(event) {
if (event.Action === 'Invoke') {
const invokeEvent = JSON.parse(event.body);
const {
method, path, headers, encoding,
} = invokeEvent;
let { body } = invokeEvent;
if (body) {
if (encoding === 'base64') {
body = Buffer.from(body, encoding);
} else if (encoding === undefined) {
body = Buffer.from(body);
} else {
throw new Error(`Unsupported encoding: ${encoding}`);
}
}
return {
method,
path,
headers,
body,
};
}
const {
httpMethod: method, path, headers, body,
} = event;
return {
method,
path,
headers,
body,
};
}
function isDirectory(p) {
return new Promise((resolve) => {
fs.stat(p, (error, s) => {
if (error) {
resolve(false);
return;
}
if (s.isDirectory()) {
resolve(true);
return;
}
resolve(false);
});
});
}
async function transformFromAwsRequest({
method, path, headers, body,
}) {
const { pathname, search, query: queryString } = parseUrl(path);
let requestUri = pathname + (search || '');
let filename = pathJoin(
'/var/task/user',
process.env.NOW_ENTRYPOINT || pathname,
);
if (await isDirectory(filename)) {
if (!filename.endsWith('/')) {
filename += '/';
requestUri = pathname + '/' + (search || '');
}
filename += 'index.php';
}
const params = {};
params.REQUEST_METHOD = method;
params.REQUEST_URI = requestUri;
params.QUERY_STRING = queryString || ''; // can be null
params.SCRIPT_FILENAME = filename;
params.SERVER_PROTOCOL = 'HTTP/1.1';
params.SERVER_PORT = 443;
params.HTTPS = 'on';
// eslint-disable-next-line no-restricted-syntax
for (const [k, v] of Object.entries(headers)) {
const camel = k.toUpperCase().replace(/-/g, '_');
params[`HTTP_${camel}`] = v;
if (camel === 'HOST') {
params.SERVER_NAME = v;
} else if (['CONTENT_TYPE', 'CONTENT_LENGTH'].includes(camel)) {
params[camel] = v; // without HOST_ prepended
}
}
return { params, stdin: body };
}
function transformToAwsResponse({ tuples, body }) {
let statusCode = 200;
const headers = {};
// eslint-disable-next-line no-param-reassign
if (!body) body = Buffer.alloc(0);
assert(Buffer.isBuffer(body));
for (let i = 0; i < tuples.length; i += 2) {
const k = tuples[i].toLowerCase();
const v = tuples[i + 1];
if (k === 'status') {
statusCode = Number(v.split(' ')[0]); // '408 Request Timeout'
} else {
if (!headers[k]) headers[k] = [];
headers[k].push(v);
}
}
return {
statusCode,
headers,
body: body.toString('base64'),
encoding: 'base64',
};
}
async function launcher(event) {
const awsRequest = normalizeEvent(event);
const input = await transformFromAwsRequest(awsRequest);
const output = await query(input);
return transformToAwsResponse(output);
}
exports.launcher = launcher;
/*
(async function() {
console.log(await launcher({
httpMethod: 'GET',
path: '/phpinfo.php'
}));
})();
*/

View File

@@ -1,9 +0,0 @@
[global]
error_log=/tmp/fpm-error.log
[www]
listen=0.0.0.0:9000
pm=static
pm.max_children=1
catch_workers_output=yes
clear_env=no

View File

@@ -1,8 +0,0 @@
extension=/root/app/modules/curl.so
extension=/root/app/modules/json.so
extension=/root/app/modules/mbstring.so
extension=/root/app/modules/mysqli.so
zend_extension=/root/app/modules/opcache.so
opcache.enable_cli=1
mysqli.max_links=10
mysqli.max_persistent=10

View File

@@ -1,13 +0,0 @@
{
"name": "@now/php-bridge",
"version": "0.5.3",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/zeit/now-builders.git",
"directory": "packages/now-php-bridge"
},
"dependencies": {
"fastcgi-client": "0.0.1"
}
}

View File

@@ -1 +0,0 @@
/test

View File

@@ -1,55 +0,0 @@
const {
createLambda,
rename,
glob,
download,
shouldServe,
} = require('@now/build-utils'); // eslint-disable-line import/no-extraneous-dependencies
const path = require('path');
const { getFiles } = require('@now/php-bridge');
exports.build = async ({
files, entrypoint, workPath, config, meta,
}) => {
// Download all files to workPath
const downloadedFiles = await download(files, workPath, meta);
let includedFiles = {};
if (config && config.includeFiles) {
// Find files for each glob
// eslint-disable-next-line no-restricted-syntax
for (const pattern of config.includeFiles) {
// eslint-disable-next-line no-await-in-loop
const matchedFiles = await glob(pattern, workPath);
Object.assign(includedFiles, matchedFiles);
}
// explicit and always include the entrypoint
Object.assign(includedFiles, {
[entrypoint]: files[entrypoint],
});
} else {
// Backwards compatibility
includedFiles = downloadedFiles;
}
console.log('Included files:', Object.keys(includedFiles));
const userFiles = rename(includedFiles, name => path.join('user', name));
const bridgeFiles = await getFiles();
// TODO config.extensions. OR php.ini from user
delete bridgeFiles['native/modules/mysqli.so'];
delete bridgeFiles['native/modules/libmysqlclient.so.16'];
const lambda = await createLambda({
files: { ...userFiles, ...bridgeFiles },
handler: 'launcher.launcher',
runtime: 'nodejs8.10',
environment: {
NOW_ENTRYPOINT: entrypoint,
},
});
return { [entrypoint]: lambda };
};
exports.shouldServe = shouldServe;

View File

@@ -1,17 +0,0 @@
{
"name": "@now/php",
"version": "0.5.7",
"license": "MIT",
"homepage": "https://zeit.co/docs/v2/deployments/official-builders/php-now-php",
"repository": {
"type": "git",
"url": "https://github.com/zeit/now-builders.git",
"directory": "packages/now-php"
},
"dependencies": {
"@now/php-bridge": "^0.5.3"
},
"scripts": {
"test": "jest"
}
}

View File

@@ -1,2 +0,0 @@
<?php
print('cow:RANDOMNESS_PLACEHOLDER');

View File

@@ -1,11 +0,0 @@
{
"version": 2,
"builds": [
{ "src": "index.php", "use": "@now/php" },
{ "src": "subdirectory/index.php", "use": "@now/php" }
],
"probes": [
{ "path": "/", "mustContain": "cow:RANDOMNESS_PLACEHOLDER" },
{ "path": "/subdirectory/", "mustContain": "yoda:RANDOMNESS_PLACEHOLDER" }
]
}

View File

@@ -1,2 +0,0 @@
<?php
print('yoda:RANDOMNESS_PLACEHOLDER');

View File

@@ -1,2 +0,0 @@
<?php
print($_ENV['RANDOMNESS_ENV_VAR'] . ':env');

View File

@@ -1,5 +0,0 @@
{
"version": 2,
"builds": [{ "src": "env/index.php", "use": "@now/php" }],
"probes": [{ "path": "/env", "mustContain": "RANDOMNESS_PLACEHOLDER:env" }]
}

View File

@@ -1,3 +0,0 @@
<?php
echo 'Excluded!';

View File

@@ -1,3 +0,0 @@
<?php
echo 'included:RANDOMNESS_PLACEHOLDER';

View File

@@ -1,9 +0,0 @@
<?php
echo 'mainfile:';
if (file_exists('included_file.php') && !file_exists('excluded_file.php')) {
require_once 'included_file.php';
} else {
echo PHP_EOL;
print_r(array_diff(scandir('.'), array('..', '.')));
}

View File

@@ -1,13 +0,0 @@
{
"version": 2,
"builds": [
{
"src": "index.php",
"use": "@now/php",
"config": { "includeFiles": ["included*.php"] }
}
],
"probes": [
{ "path": "/", "mustContain": "mainfile:included:RANDOMNESS_PLACEHOLDER" }
]
}

View File

@@ -1,28 +0,0 @@
<?php
header('Content-Type: text/plain');
print($_SERVER['SCRIPT_FILENAME'] . PHP_EOL);
print($_SERVER['REQUEST_METHOD'] . PHP_EOL);
print($_SERVER['REQUEST_URI'] . PHP_EOL);
print($_SERVER['HTTP_HOST'] . PHP_EOL);
print($_SERVER['HTTP_X_SOME_HEADER'] . PHP_EOL);
print($_SERVER['SERVER_PROTOCOL'] . PHP_EOL);
print($_SERVER['SERVER_NAME'] . PHP_EOL);
print($_SERVER['SERVER_PORT'] . PHP_EOL);
print($_SERVER['HTTPS'] . PHP_EOL);
print($_GET['get1'] . PHP_EOL);
var_dump($_GET['get2']);
print($_POST['post1'] . PHP_EOL);
var_dump($_POST['post2']);
print($_COOKIE['cookie1'] . PHP_EOL);
var_dump($_COOKIE['cookie2']);
print($_REQUEST['get1'] . PHP_EOL);
var_dump($_REQUEST['get2']);
print($_REQUEST['post1'] . PHP_EOL);
var_dump($_REQUEST['post2']);
print($_REQUEST['cookie1'] . PHP_EOL);
var_dump($_REQUEST['cookie2']);
print(file_get_contents('php://input') . PHP_EOL);
print('end' . PHP_EOL);

View File

@@ -1,4 +0,0 @@
{
"version": 2,
"builds": [{ "src": "index.php", "use": "@now/php" }]
}

View File

@@ -1,156 +0,0 @@
const assert = require('assert');
async function test1({ deploymentUrl, fetch }) {
const resp = await fetch(
`https://${deploymentUrl}/index.php?get1=foo&get1=bar&get2[]=bim&get2[]=bom`,
{
headers: {
'X-Some-Header': 'x-some-header-value',
},
},
);
assert(resp.status === 200);
const text = await resp.text();
const lines = text.trim().split('\n');
assert.deepEqual(lines, [
'/var/task/user/index.php',
'GET',
'/index.php?get1=foo&get1=bar&get2%5B%5D=bim&get2%5B%5D=bom', // TODO fake news, must be unescaped
deploymentUrl, // example 'test-19phw91ph.now.sh'
'x-some-header-value',
'HTTP/1.1',
deploymentUrl, // example 'test-19phw91ph.now.sh'
'443',
'on',
'bar',
'array(2) {',
' [0]=>',
' string(3) "bim"',
' [1]=>',
' string(3) "bom"',
'}',
'',
'NULL',
'',
'NULL',
'bar',
'array(2) {',
' [0]=>',
' string(3) "bim"',
' [1]=>',
' string(3) "bom"',
'}',
'',
'NULL',
'',
'NULL',
'',
'end',
]);
}
async function test2({ deploymentUrl, fetch }) {
const resp = await fetch(`https://${deploymentUrl}/index.php`, {
method: 'POST',
body: 'post1=baz&post1=bat&post2[]=pim&post2[]=pom',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
},
});
assert(resp.status === 200);
const text = await resp.text();
const lines = text.trim().split('\n');
assert.deepEqual(lines, [
'/var/task/user/index.php',
'POST',
'/index.php',
deploymentUrl, // example 'test-19phw91ph.now.sh'
'',
'HTTP/1.1',
deploymentUrl, // example 'test-19phw91ph.now.sh'
'443',
'on',
'',
'NULL',
'bat',
'array(2) {',
' [0]=>',
' string(3) "pim"',
' [1]=>',
' string(3) "pom"',
'}',
'',
'NULL',
'',
'NULL',
'bat',
'array(2) {',
' [0]=>',
' string(3) "pim"',
' [1]=>',
' string(3) "pom"',
'}',
'',
'NULL',
'post1=baz&post1=bat&post2[]=pim&post2[]=pom',
'end',
]);
}
async function test3({ deploymentUrl, fetch }) {
const resp = await fetch(`https://${deploymentUrl}/index.php`, {
method: 'GET',
headers: {
Cookie: `cookie1=foo; cookie1=${escape('bar|bar')}; ${escape(
'cookie2[]',
)}=dim; ${escape('cookie2[]')}=${escape('dom|dom')}`,
},
});
assert(resp.status === 200);
const text = await resp.text();
const lines = text.trim().split('\n');
assert.deepEqual(lines, [
'/var/task/user/index.php',
'GET',
'/index.php',
deploymentUrl, // example 'test-19phw91ph.now.sh'
'',
'HTTP/1.1',
deploymentUrl, // example 'test-19phw91ph.now.sh'
'443',
'on',
'',
'NULL',
'',
'NULL',
'foo',
'array(2) {',
' [0]=>',
' string(3) "dim"',
' [1]=>',
' string(7) "dom|dom"',
'}',
'',
'NULL',
'',
'NULL',
'foo',
'array(2) {',
' [0]=>',
' string(3) "dim"',
' [1]=>',
' string(7) "dom|dom"',
'}',
'',
'end',
]);
}
module.exports = async (opts) => {
await test1(opts);
await test2(opts);
await test3(opts);
};

View File

@@ -1,5 +0,0 @@
<?php
header('Content-Type: text/plain');
header('Content-Type: text/plain; charset=UTF-16');
setcookie('cookie1', 'cookie1value');
setcookie('cookie2', 'cookie2value');

View File

@@ -1,4 +0,0 @@
{
"version": 2,
"builds": [{ "src": "index.php", "use": "@now/php" }]
}

View File

@@ -1,9 +0,0 @@
const assert = require('assert');
module.exports = async ({ deploymentUrl, fetch }) => {
const resp = await fetch(`https://${deploymentUrl}/index.php`);
assert(resp.status === 200);
assert.equal(resp.headers.get('content-type'), 'text/plain; charset=UTF-16');
assert(resp.headers.get('set-cookie').includes('cookie1=cookie1value'));
assert(resp.headers.get('set-cookie').includes('cookie2=cookie2value'));
};

View File

@@ -1,10 +0,0 @@
<?php
// regression test for go-php engine reusage. on failure prints
// Fatal error: Cannot redeclare some_function() (previously declared in /var/task/user/index.php:7)
function some_function() {
print("paskantamasaari");
}
some_function();

View File

@@ -1,4 +0,0 @@
{
"version": 2,
"builds": [{ "src": "index.php", "use": "@now/php" }]
}

Some files were not shown because too many files have changed in this diff Show More