Compare commits

..

27 Commits

Author SHA1 Message Date
Nathan Rajlich
0351f02dff Publish Canary
- @vercel/build-utils@2.14.1-canary.1
 - vercel@24.0.1-canary.1
 - @vercel/client@10.3.1-canary.1
 - @vercel/frameworks@0.6.1-canary.1
 - @vercel/go@1.3.1-canary.1
 - @vercel/node@1.13.1-canary.1
 - @vercel/python@2.2.1-canary.1
 - @vercel/ruby@1.3.1-canary.1
2022-02-22 12:13:21 -08:00
Yuanlin Lin
0d7fa2f912 [frameworks] Updated Umi.js logo (#7470)
Co-authored-by: Andy <AndyBitz@users.noreply.github.com>
2022-02-22 19:59:32 +01:00
Nathan Rajlich
3b646880e7 [node] Use NodejsLambda class (#7436)
Updates `@vercel/node` to utilize the `NodejsLambda` class so that it doesn't need to explicitly bundle in the launcher/bridge files into the build result. This reduces the complexity of the Builder and is a cleaner separation of concerns. The `helpers.test.ts` file has been moved to `@vercel/node-bridge` so it's being removed in this PR as well.
2022-02-22 16:57:11 +00:00
Steven
350a0e5f36 [examples] Bump Next.js to 12.1.0 (#7458) 2022-02-18 18:53:26 -05:00
Steven
5c21d400bd Update dev test for image optimization with next@latest and python flask (#7454)
Related to https://github.com/vercel/next.js/pull/34431
2022-02-18 16:37:59 +00:00
Nathan Rajlich
04029013a6 Publish Canary
- @vercel/build-utils@2.14.1-canary.0
 - vercel@24.0.1-canary.0
 - @vercel/client@10.3.1-canary.0
 - @vercel/frameworks@0.6.1-canary.0
 - @vercel/go@1.3.1-canary.0
 - @vercel/node-bridge@2.1.2-canary.0
 - @vercel/node@1.13.1-canary.0
 - @vercel/python@2.2.1-canary.0
 - @vercel/ruby@1.3.1-canary.0
2022-02-17 20:40:07 -08:00
Nathan Rajlich
c65e7fa883 [node-bridge] Move helpers to node-bridge (#7451) 2022-02-17 20:36:37 -08:00
Knut Melvær
27b68be93f [frameworks] Add Sanity Studio (#7350)
Adds the default config and detector for a Sanity Studio project.
2022-02-16 14:26:39 -08:00
Nathan Rajlich
99fa729966 Publish Stable
- @vercel/build-utils@2.14.0
 - vercel@24.0.0
 - @vercel/client@10.3.0
 - @vercel/frameworks@0.6.0
 - @vercel/go@1.3.0
 - @vercel/node-bridge@2.1.1
 - @vercel/node@1.13.0
 - @vercel/python@2.2.0
 - @vercel/routing-utils@1.12.0
 - @vercel/ruby@1.3.0
 - @vercel/static-config@1.0.0-canary.0
2022-02-16 11:01:47 -08:00
Nathan Rajlich
2bb3da80e0 [cli] Remove timestamp based lookback in vc bisect --help (#7442)
Looking up a deployment via timestamp is not implemented in the `vc
bisect` command, so remove it from the `--help` output.
2022-02-16 01:54:47 -08:00
Nathan Rajlich
b852f34a27 [build-utils] Add Builder TypeScript types (#7386)
Adds formal type interfaces for Builders into `@vercel/build-utils`, and
updates the serverless functions runtimes to utilize them. This provides
type safety for the args/return values in i.e. the `build()` functions,
where previously they were not using any explicit return type.
2022-02-16 00:33:25 -08:00
Nathan Rajlich
ce8e6e3806 [build-utils] Add NodejsLambda class (#7423)
When an instance of this class is returned in a Builder's `output`, it is a signal to the build system that it needs to add additional files to the final Lambda before creating the zip file.
2022-02-15 22:17:12 +00:00
Nathan Rajlich
983946650e [cli] Remove initial iteration of vc build and Plugins (#7390)
* [cli] Remove initial iteration of `vc build` and Plugins

The `vercel build` command is being restructured, so we'll remove it for
now so that this initial iteration isn't included as part of the stable
CLI release.

* Update packages/cli/src/commands/init/init.ts

Co-authored-by: Steven <steven@ceriously.com>

* Remove more `.output` references

* Remove unnecessary space

* Fix test

* Remove vc dev middleware tests for now

Co-authored-by: Steven <steven@ceriously.com>
2022-02-14 19:00:15 -08:00
Steven
59e4572e76 Publish Canary
- vercel@23.1.3-canary.76
 - @vercel/node@1.12.2-canary.11
 - vercel-plugin-node@1.12.2-canary.43
2022-02-12 20:51:14 -05:00
Steven
5c297122cb [cli][node] Bump nft to 0.17.5 (#7349)
- Fixes #7287
2022-02-12 22:39:18 +00:00
Nathan Rajlich
28f3bf9ef6 Publish Canary
- @vercel/build-utils@2.13.1-canary.2
 - vercel@23.1.3-canary.75
 - @vercel/client@10.2.3-canary.53
 - @vercel/go@1.2.4-canary.7
 - vercel-plugin-middleware@0.0.0-canary.27
 - @vercel/node@1.12.2-canary.10
 - vercel-plugin-go@1.0.0-canary.38
 - vercel-plugin-node@1.12.2-canary.42
 - vercel-plugin-python@1.0.0-canary.39
 - vercel-plugin-ruby@1.0.0-canary.38
 - @vercel/python@2.1.2-canary.5
 - @vercel/ruby@1.2.10-canary.3
2022-02-10 13:44:22 -08:00
Nathan Rajlich
a936e92b8b [build-utils] Retain support for passing in a zipBuffer directly to new Lambda() (#7422) 2022-02-10 13:44:02 -08:00
Nathan Rajlich
ab1decf79d Publish Canary
- @vercel/build-utils@2.13.1-canary.1
 - vercel@23.1.3-canary.74
 - @vercel/client@10.2.3-canary.52
 - @vercel/frameworks@0.5.1-canary.21
 - @vercel/go@1.2.4-canary.6
 - vercel-plugin-middleware@0.0.0-canary.26
 - @vercel/node@1.12.2-canary.9
 - vercel-plugin-go@1.0.0-canary.37
 - vercel-plugin-node@1.12.2-canary.41
 - vercel-plugin-python@1.0.0-canary.38
 - vercel-plugin-ruby@1.0.0-canary.37
 - @vercel/python@2.1.2-canary.4
 - @vercel/ruby@1.2.10-canary.2
2022-02-10 11:51:31 -08:00
Nathan Rajlich
34408a7902 [build-utils] De-couple zip file creation from Lambda class (#7417)
Makes the `Lambda` class itself just a reference of the files that are
needed to produce a zip file, and adds a `createZip()` function that
does the actual zip file creation. This will allow the zip file creation
to be done in the Vercel build container instead of directly in the
Builder itself.

* Adds `files` property to `Lambda` class
* Adds `createZip()` function to `Lambda` class
* Deprecates the `createLambda()` function
* Deprecates the `zipBuffer` property (only populated when `createLambda()` is used)
* Removes the private `FILES_SYMBOL` symbol, since `files` is the same thing
2022-02-10 11:45:05 -08:00
Dominik Ferber
dc2d814d0f Add $schema property to vercel.json (#7394) 2022-02-08 00:56:42 -08:00
Nathan Rajlich
2402db92eb [cli] Add vercel bisect command (#5778)
This PR adds a new `vercel bisect` command which is inspired by `git bisect`. This command takes two deployment URLs as its input (one "bad" deployment that exhibits a bug, and one "good" deployment where the bug does not occur) and steps through the deployments using a binary search. The command is useful for finding a deployment (and commit if the project is connected to a Git repo) where a bug has been introduced.

<img width="582" alt="Screen Shot 2022-01-26 at 12 46 29 PM" src="https://user-images.githubusercontent.com/71256/151244295-8e90c493-c45f-40b4-8e43-f39e7fcac262.png">
2022-02-01 18:39:10 +00:00
Lee Robinson
a1787c740d Add --port $PORT for Vite. (#7378) 2022-01-31 12:16:11 -06:00
Mark Glagola
17fd88e044 [cli] Remove legacy verification handling in alias command (#7305)
* [cli] Remove legacy verification handling in `alias` command

* Update get-domain.ts

Co-authored-by: Nathan Rajlich <n@n8.io>
2022-01-27 12:03:46 -06:00
Nathan Rajlich
03a8fbd3a7 [cli] Use @vercel/fetch-retry in CLI integration tests (#7360)
These integration tests have started consistently failing with
`ECONNRESET` errors and after some debugging it's been
difficult to point the finger at anything related to Vercel
infrastructure. So possibly it's related to GitHub actions infra
changes, but either way retrying seems to help.
2022-01-26 17:42:04 -08:00
Lee Robinson
8d37c1045f [docs] Update README. (#7356) 2022-01-23 23:02:29 -06:00
Jared Palmer
30c433d248 Add runCustomInstallCommand build-util (#7345) 2022-01-21 18:07:50 -05:00
Jiachi Liu
d89a79601c [examples] Update Next.js example CSS styling. (#7337) 2022-01-20 15:59:33 -06:00
341 changed files with 13077 additions and 20310 deletions

View File

@@ -14,4 +14,6 @@ packages/client/lib
packages/node/src/bridge.ts
packages/node-bridge/bridge.js
packages/node-bridge/launcher.js
packages/node-bridge/helpers.js
packages/node-bridge/source-map-support.js
packages/middleware/src/entries.js

View File

@@ -2,11 +2,11 @@
When contributing to this repository, please first discuss the change you wish to make via [GitHub Discussions](https://github.com/vercel/vercel/discussions/new) with the owners of this repository before submitting a Pull Request.
Please read our [code of conduct](CODE_OF_CONDUCT.md) and follow it in all your interactions with the project.
Please read our [Code of Conduct](CODE_OF_CONDUCT.md) and follow it in all your interactions with the project.
## Local development
This project is configured in a monorepo pattern where one repo contains multiple npm packages. Dependencies are installed and managed with `yarn`, not `npm` CLI.
This project is configured in a monorepo, where one repository contains multiple npm packages. Dependencies are installed and managed with `yarn`, not `npm` CLI.
To get started, execute the following:
@@ -23,7 +23,7 @@ Make sure all the tests pass before making changes.
## Verifying your change
Once you are done with your changes (we even suggest doing it along the way ), make sure all the test still run by running
Once you are done with your changes (we even suggest doing it along the way), make sure all the test still run by running:
```
yarn build && yarn test

1
.gitignore vendored
View File

@@ -27,5 +27,4 @@ test/lib/deployment/failed-page.txt
/public
__pycache__
.vercel
.output
.turbo

View File

@@ -3,13 +3,21 @@
<img src="https://assets.vercel.com/image/upload/v1588805858/repositories/vercel/logo.png" height="96">
<h3 align="center">Vercel</h3>
</a>
<p align="center">Develop. Preview. Ship.</p>
</p>
[![CI Status](https://github.com/vercel/vercel/actions/workflows/test-unit.yml/badge.svg)](https://github.com/vercel/vercel/actions/workflows/test-unit.yml)
[![Join the community on GitHub Discussions](https://badgen.net/badge/join%20the%20discussion/on%20github/black?icon=github)](https://github.com/vercel/vercel/discussions)
<p align="center">
Develop. Preview. Ship.
</p>
## Usage
<p align="center">
<a href="https://vercel.com/docs"><strong>Documentation</strong></a> ·
<a href="https://vercel.com/changelog"><strong>Changelog</strong></a> ·
<a href="https://vercel.com/templates"><strong>Templates</strong></a> ·
<a href="https://vercel.com/cli"><strong>CLI</strong></a>
</p>
<br/>
## Vercel
Vercel is a platform for **static sites and frontend frameworks**, built to integrate with your headless content, commerce, or database.
@@ -17,22 +25,16 @@ We provide a **frictionless developer experience** to take care of the hard thin
We make it easy for frontend teams to **develop, preview, and ship** delightful user experiences, where performance is the default.
Get started by [Importing a Git Project](https://vercel.com/new) and use `git push` to deploy. Alternatively, you can [install Vercel CLI](https://vercel.com/cli).
## Deploy
Get started by [importing a project](https://vercel.com/new) or using the [Vercel CLI](https://vercel.com/cli). Then, `git push` to deploy.
## Documentation
For details on how to use Vercel, check out our [documentation](https://vercel.com/docs).
## Caught a Bug?
## Contributing
1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device
2. Install dependencies with `yarn install`
3. Compile the code: `yarn build`
4. Link the package to the global module directory: `cd ./packages/cli && yarn link`
5. You can start using `vercel` anywhere inside the command line
As always, you should use `yarn test-unit` to run the tests and see if your changes have broken anything.
## How to Create a Release
If you have write access to this repository, you can read more about how to publish a release [here](https://github.com/vercel/vercel/wiki/Creating-a-Release).
- [Code of Conduct](https://github.com/vercel/vercel/blob/main/.github/CODE_OF_CONDUCT.md)
- [Contributing Guidelines](https://github.com/vercel/vercel/blob/main/.github/CONTRIBUTING.md)
- [MIT License](https://github.com/vercel/vercel/blob/main/LICENSE)

50
examples/README.md vendored
View File

@@ -1,28 +1,6 @@
# Vercel Examples
This is the public list of examples for **Vercel**.
All of these ready to deploy examples feature a frontend framework or static site, created with zero configuration using the CLI tools they provide.
The `+functions` examples feature an `/api` directory as well, highlighting how to use serverless functions on top of a framework, again with zero configuration required.
## What is Vercel?
Vercel is a cloud platform for static frontends and serverless functions. It enables developers to host websites and web applications that deploy instantly, scale automatically, and require no supervision.
## What Does this Repository Contain?
This repository consists of multiple examples, created for use with the [Vercel](https://vercel.com) platform. In addition to this, it also contains:
- [Code of Conduct](https://github.com/vercel/vercel/blob/main/.github/CODE_OF_CONDUCT.md) - our Code of Conduct, adapted from the [Contributor Covenant](http://contributor-covenant.org)
- [Contributing Guidelines](https://github.com/vercel/vercel/blob/main/.github/CONTRIBUTING.md) - a guide on how to contribute to the examples repository
- [License](https://github.com/vercel/vercel/blob/main/LICENSE) - the standard MIT license under which these examples are published
We recommend familiarizing yourself with the above sections, particularly if you are looking to make a contribution.
## Deploying Examples
To get started using any of these examples as your own project, [install Vercel](https://vercel.com/download) and use either of the following commands in your terminal:
To get started using any of these examples as your own project, [install Vercel](https://vercel.com/cli) and use either of the following commands in your terminal:
```sh
vercel init # Pick an example in the CLI
@@ -30,7 +8,7 @@ vercel init <example> # Create a new project from a specific <example>
vercel init <example> <name> # Create a new project from a specific <example> with a different folder <name>
```
Deploying your project takes seconds and can be done with **just a single command**:
Deploying your project can be done with **a single command**:
```sh
vercel # Deploy your project with the CLI
@@ -38,26 +16,6 @@ vercel # Deploy your project with the CLI
With the `vercel` command, your project will be built and served by Vercel, providing you with a URL that can be shared immediately.
## New Examples
We are continuously improving our examples based on best practices and feedback from the community. As a result, it is possible that example names will change and on occasion deprecated in favor of an improved implementation.
For example, the previous `nodejs` example showed a static frontend with a Node.js API. This is illustrated in the `svelte` example. Below is a table that lists some of the most popular previous examples and the equivalent replacement:
| Previous Example | New Example |
| ----------------- | ---------------------------------------------------------------------------------------- |
| **monorepo** | [gatsby-functions](https://github.com/vercel/vercel/tree/main/examples/gatsby) |
| **nodejs** | [svelte-functions](https://github.com/vercel/vercel/tree/main/examples/svelte) |
| **nextjs-static** | [nextjs](https://github.com/vercel/vercel/tree/main/examples/nextjs) |
| **vanilla-go** | [create-react-app](https://github.com/vercel/vercel/tree/main/examples/create-react-app) |
| **typescript** | [gatsby-functions](https://github.com/vercel/vercel/tree/main/examples/gatsby) |
## Migrating and Upgrading
If you have an existing project you would like to deploy with Vercel, we recommend reading our guide on [migrating to Vercel and zero configuration](https://vercel.com/guides/migrate-to-vercel). By combining the guide with this repository, you will quickly be able to understand how to deploy your application.
If you would like to upgrade a project to take advantage of zero configuration, you may find the [upgrade guide](https://vercel.com/guides/upgrade-to-zero-configuration) useful. The upgrade guide covers how to remove configuration from existing projects along with how to use the `/api` directory.
## How to Contribute
Contributing examples should be an enjoyable experience, as such we have created a set of [contributing guidelines](https://github.com/vercel/vercel/blob/main/.github/CONTRIBUTING.md) to help you do so.
@@ -74,10 +32,6 @@ An issue can be raised by clicking the 'Issues' tab at the top of the repository
When submitting an issue, please thoroughly and concisely describe the problem you are experiencing so that we may easily understand and resolve the issue in a timely manner.
## License
This repository is an open source project. See the [License](https://github.com/vercel/vercel/blob/main/LICENSE).
## Get In Touch
If you have any questions that are not covered by raising an issue then please get in touch with us on [GitHub Discussions](https://github.com/vercel/vercel/discussions). There you will find both members of the community and staff who are happy to help answer questions on anything Vercel related.

View File

@@ -41,4 +41,3 @@ testem.log
.DS_Store
Thumbs.db
.vercel
.output

View File

@@ -23,6 +23,7 @@
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.pnpm-debug.log*
# local env files
.env.local

View File

@@ -1,3 +1,6 @@
module.exports = {
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
}
module.exports = nextConfig

File diff suppressed because it is too large Load Diff

View File

@@ -7,12 +7,12 @@
"lint": "next lint"
},
"dependencies": {
"next": "^12.0.8",
"next": "12.1.0",
"react": "17.0.2",
"react-dom": "17.0.2"
},
"devDependencies": {
"eslint": "8.7.0",
"eslint-config-next": "^12.0.8"
"eslint": "8.9.0",
"eslint-config-next": "12.1.0"
}
}

View File

@@ -33,7 +33,7 @@ export default function Home() {
</a>
<a
href="https://github.com/vercel/next.js/tree/master/examples"
href="https://github.com/vercel/next.js/tree/canary/examples"
className={styles.card}
>
<h2>Examples &rarr;</h2>

View File

@@ -1,15 +1,10 @@
.container {
min-height: 100vh;
padding: 0 0.5rem;
display: flex;
flex-direction: column;
justify-content: center;
align-items: center;
height: 100vh;
padding: 0 2rem;
}
.main {
padding: 5rem 0;
min-height: 100vh;
padding: 4rem 0;
flex: 1;
display: flex;
flex-direction: column;
@@ -18,10 +13,10 @@
}
.footer {
width: 100%;
height: 100px;
border-top: 1px solid #eaeaea;
display: flex;
flex: 1;
padding: 2rem 0;
border-top: 1px solid #eaeaea;
justify-content: center;
align-items: center;
}
@@ -56,6 +51,7 @@
}
.description {
margin: 4rem 0;
line-height: 1.5;
font-size: 1.5rem;
}
@@ -75,7 +71,6 @@
justify-content: center;
flex-wrap: wrap;
max-width: 800px;
margin-top: 3rem;
}
.card {
@@ -87,7 +82,7 @@
border: 1px solid #eaeaea;
border-radius: 10px;
transition: color 0.15s ease, border-color 0.15s ease;
width: 45%;
max-width: 300px;
}
.card:hover,

1633
examples/nextjs/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,6 @@ node_modules
.cache
.vercel
.output
public/build
api/_build

View File

@@ -0,0 +1,3 @@
# Run `vercel env pull` to generate a .env file from your Vercel project
SANITY_STUDIO_API_PROJECT_ID=
SANITY_STUDIO_API_DATASET=

View File

@@ -0,0 +1,12 @@
# Logs
/logs
*.log
# Coverage directory used by tools like istanbul
/coverage
# Dependency directories
node_modules
# Compiled sanity studio
/dist

29
examples/sanity/README.md Normal file
View File

@@ -0,0 +1,29 @@
# Sanity Blogging Content Studio
Congratulations, you have now installed Sanity Studio, an open source real-time content editing environment connected to the Sanity backend.
Now you can do the following things:
- [Read “getting started” in the docs](https://www.sanity.io/docs/introduction/getting-started?utm_source=readme)
- Check out the example frontend: [React/Next.js](https://github.com/sanity-io/tutorial-sanity-blog-react-next)
- [Read the blog post about this template](https://www.sanity.io/blog/build-your-own-blog-with-sanity-and-next-js?utm_source=readme)
- [Join the community Slack](https://slack.sanity.io/?utm_source=readme)
- [Extend and build plugins](https://www.sanity.io/docs/content-studio/extending?utm_source=readme)
## Develop locally
Install dependencies:
```sh
npx @sanity/cli install
```
Pull down environment variables from your Vercel project (requires the [Vercel CLI](https://vercel.com/cli)):
```sh
vercel env pull
```
You can also run `npx @sanity/init` in this repo and agree to reconfigure it. You'll then be able to select from existing projects. The CLI will update `sanity.json` with the project ID and dataset name.

View File

@@ -0,0 +1,7 @@
{
"#": "Used by Sanity to keep track of configuration file checksums, do not delete or modify!",
"@sanity/default-layout": "bb034f391ba508a6ca8cd971967cbedeb131c4d19b17b28a0895f32db5d568ea",
"@sanity/default-login": "6fb6d3800aa71346e1b84d95bbcaa287879456f2922372bb0294e30b968cd37f",
"@sanity/form-builder": "b38478227ba5e22c91981da4b53436df22e48ff25238a55a973ed620be5068aa",
"@sanity/data-aspects": "d199e2c199b3e26cd28b68dc84d7fc01c9186bf5089580f2e2446994d36b3cb6"
}

View File

@@ -0,0 +1,3 @@
{
"listOptions": {}
}

View File

@@ -0,0 +1,6 @@
{
"toolSwitcher": {
"order": [],
"hidden": []
}
}

View File

@@ -0,0 +1,7 @@
{
"providers": {
"mode": "append",
"redirectOnSingle": false,
"entries": []
}
}

View File

@@ -0,0 +1,5 @@
{
"images": {
"directUploads": true
}
}

View File

@@ -0,0 +1,30 @@
{
"name": "verceltemplateblogstudio",
"private": true,
"version": "1.0.0",
"description": "This is the public list of examples for **Vercel**",
"main": "package.json",
"author": "Knut Melvær <knut@sanity.io>",
"license": "UNLICENSED",
"scripts": {
"start": "sanity start",
"build": "sanity build"
},
"keywords": [
"sanity"
],
"dependencies": {
"@sanity/core": "^2.26",
"@sanity/default-layout": "^2.26",
"@sanity/default-login": "^2.26",
"@sanity/desk-tool": "^2.26",
"@sanity/vision": "^2.26",
"prop-types": "^15.7",
"react": "^17.0",
"react-dom": "^17.0",
"styled-components": "^5.2"
},
"devDependencies": {
"@sanity/cli": "^2.26"
}
}

View File

@@ -0,0 +1 @@
User-specific packages can be placed here

View File

@@ -0,0 +1,29 @@
{
"root": true,
"project": {
"name": "vercel-template-blog-studio"
},
"api": {
"projectId": "YOUR_PROJECT_ID",
"dataset": "YOUR_DATASET_NAME"
},
"plugins": [
"@sanity/base",
"@sanity/default-layout",
"@sanity/default-login",
"@sanity/desk-tool"
],
"env": {
"development": {
"plugins": [
"@sanity/vision"
]
}
},
"parts": [
{
"name": "part:@sanity/base/schema",
"path": "./schemas/schema"
}
]
}

View File

@@ -0,0 +1,48 @@
export default {
name: 'author',
title: 'Author',
type: 'document',
fields: [
{
name: 'name',
title: 'Name',
type: 'string',
},
{
name: 'slug',
title: 'Slug',
type: 'slug',
options: {
source: 'name',
maxLength: 96,
},
},
{
name: 'image',
title: 'Image',
type: 'image',
options: {
hotspot: true,
},
},
{
name: 'bio',
title: 'Bio',
type: 'array',
of: [
{
title: 'Block',
type: 'block',
styles: [{title: 'Normal', value: 'normal'}],
lists: [],
},
],
},
],
preview: {
select: {
title: 'name',
media: 'image',
},
},
}

View File

@@ -0,0 +1,65 @@
/**
* This is the schema definition for the rich text fields used for
* for this blog studio. When you import it in schemas.js it can be
* reused in other parts of the studio with:
* {
* name: 'someName',
* title: 'Some title',
* type: 'blockContent'
* }
*/
export default {
title: 'Block Content',
name: 'blockContent',
type: 'array',
of: [
{
title: 'Block',
type: 'block',
// Styles let you set what your user can mark up blocks with. These
// correspond with HTML tags, but you can set any title or value
// you want and decide how you want to deal with it where you want to
// use your content.
styles: [
{title: 'Normal', value: 'normal'},
{title: 'H1', value: 'h1'},
{title: 'H2', value: 'h2'},
{title: 'H3', value: 'h3'},
{title: 'H4', value: 'h4'},
{title: 'Quote', value: 'blockquote'},
],
lists: [{title: 'Bullet', value: 'bullet'}],
// Marks let you mark up inline text in the block editor.
marks: {
// Decorators usually describe a single property e.g. a typographic
// preference or highlighting by editors.
decorators: [
{title: 'Strong', value: 'strong'},
{title: 'Emphasis', value: 'em'},
],
// Annotations can be any object structure e.g. a link or a footnote.
annotations: [
{
title: 'URL',
name: 'link',
type: 'object',
fields: [
{
title: 'URL',
name: 'href',
type: 'url',
},
],
},
],
},
},
// You can add additional types here. Note that you can't use
// primitive types such as 'string' and 'number' in the same array
// as a block type.
{
type: 'image',
options: {hotspot: true},
},
],
}

View File

@@ -0,0 +1,17 @@
export default {
name: 'category',
title: 'Category',
type: 'document',
fields: [
{
name: 'title',
title: 'Title',
type: 'string',
},
{
name: 'description',
title: 'Description',
type: 'text',
},
],
}

View File

@@ -0,0 +1,65 @@
export default {
name: 'post',
title: 'Post',
type: 'document',
fields: [
{
name: 'title',
title: 'Title',
type: 'string',
},
{
name: 'slug',
title: 'Slug',
type: 'slug',
options: {
source: 'title',
maxLength: 96,
},
},
{
name: 'author',
title: 'Author',
type: 'reference',
to: {type: 'author'},
},
{
name: 'mainImage',
title: 'Main image',
type: 'image',
options: {
hotspot: true,
},
},
{
name: 'categories',
title: 'Categories',
type: 'array',
of: [{type: 'reference', to: {type: 'category'}}],
},
{
name: 'publishedAt',
title: 'Published at',
type: 'datetime',
},
{
name: 'body',
title: 'Body',
type: 'blockContent',
},
],
preview: {
select: {
title: 'title',
author: 'author.name',
media: 'mainImage',
},
prepare(selection) {
const {author} = selection
return Object.assign({}, selection, {
subtitle: author && `by ${author}`,
})
},
},
}

View File

@@ -0,0 +1,29 @@
// First, we must import the schema creator
import createSchema from 'part:@sanity/base/schema-creator'
// Then import schema types from any plugins that might expose them
import schemaTypes from 'all:part:@sanity/base/schema-type'
// We import object and document schemas
import blockContent from './blockContent'
import category from './category'
import post from './post'
import author from './author'
// Then we give our schema to the builder and provide the result to Sanity
export default createSchema({
// We name our schema
name: 'default',
// Then proceed to concatenate our document type
// to the ones provided by any plugins that are installed
types: schemaTypes.concat([
// The following are document types which will appear
// in the studio.
post,
author,
category,
// When added to this list, object types can be used as
// { type: 'typename' } in other document schemas
blockContent,
]),
})

View File

@@ -0,0 +1 @@
Files placed here will be served by the Sanity server under the `/static`-prefix

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -0,0 +1,6 @@
{
// Note: This config is only used to help editors like VS Code understand/resolve
// parts, the actual transpilation is done by babel. Any compiler configuration in
// here will be ignored.
"include": ["./node_modules/@sanity/base/types/**/*.ts", "./**/*.ts", "./**/*.tsx"]
}

9911
examples/sanity/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,6 @@ dist
worker
.solid
.vercel
.output
# dependencies
/node_modules

View File

@@ -7,4 +7,3 @@ node_modules
.env.*
!.env.example
.vercel
.output

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/build-utils",
"version": "2.13.1-canary.0",
"version": "2.14.1-canary.1",
"license": "MIT",
"main": "./dist/index.js",
"types": "./dist/index.d.js",
@@ -30,7 +30,7 @@
"@types/node-fetch": "^2.1.6",
"@types/semver": "6.0.0",
"@types/yazl": "^2.4.1",
"@vercel/frameworks": "0.5.1-canary.20",
"@vercel/frameworks": "0.6.1-canary.1",
"@vercel/ncc": "0.24.0",
"aggregate-error": "3.0.1",
"async-retry": "1.2.3",

View File

@@ -1,412 +0,0 @@
import fs from 'fs-extra';
import { join, parse, relative, dirname, basename, extname } from 'path';
import glob from './fs/glob';
import { normalizePath } from './fs/normalize-path';
import { FILES_SYMBOL, Lambda } from './lambda';
import type { BuildOptions, Files } from './types';
import { debug, getIgnoreFilter } from '.';
// `.output` was already created by the Build Command, so we have
// to ensure its contents don't get bundled into the Lambda. Similarily,
// we don't want to bundle anything from `.vercel` either. Lastly,
// Builders/Runtimes didn't have `vercel.json` or `now.json`.
const ignoredPaths = ['.output', '.vercel', 'vercel.json', 'now.json'];
const shouldIgnorePath = (
file: string,
ignoreFilter: any,
ignoreFile: boolean
) => {
const isNative = ignoredPaths.some(item => {
return file.startsWith(item);
});
if (!ignoreFile) {
return isNative;
}
return isNative || ignoreFilter(file);
};
const getSourceFiles = async (workPath: string, ignoreFilter: any) => {
const list = await glob('**', {
cwd: workPath,
});
// We're not passing this as an `ignore` filter to the `glob` function above,
// so that we can re-use exactly the same `getIgnoreFilter` method that the
// Build Step uses (literally the same code). Note that this exclusion only applies
// when deploying. Locally, another exclusion is needed, which is handled
// further below in the `convertRuntimeToPlugin` function.
for (const file in list) {
if (shouldIgnorePath(file, ignoreFilter, true)) {
delete list[file];
}
}
return list;
};
/**
* Convert legacy Runtime to a Plugin.
* @param buildRuntime - a legacy build() function from a Runtime
* @param packageName - the name of the package, for example `vercel-plugin-python`
* @param ext - the file extension, for example `.py`
*/
export function _experimental_convertRuntimeToPlugin(
buildRuntime: (options: BuildOptions) => Promise<{ output: Lambda }>,
packageName: string,
ext: string
) {
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
return async function build({ workPath }: { workPath: string }) {
// We also don't want to provide any files to Runtimes that were ignored
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
const ignoreFilter = await getIgnoreFilter(workPath);
// Retrieve the files that are currently available on the File System,
// before the Legacy Runtime has even started to build.
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
// Instead of doing another `glob` to get all the matching source files,
// we'll filter the list of existing files down to only the ones
// that are matching the entrypoint pattern, so we're first creating
// a clean new list to begin.
const entrypoints = Object.assign({}, sourceFilesPreBuild);
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
// Up next, we'll strip out the files from the list of entrypoints
// that aren't actually considered entrypoints.
for (const file in entrypoints) {
if (!entrypointMatch.test(file)) {
delete entrypoints[file];
}
}
const pages: { [key: string]: any } = {};
const pluginName = packageName.replace('vercel-plugin-', '');
const outputPath = join(workPath, '.output');
const traceDir = join(
outputPath,
`inputs`,
// Legacy Runtimes can only provide API Routes, so that's
// why we can use this prefix for all of them. Here, we have to
// make sure to not use a cryptic hash name, because people
// need to be able to easily inspect the output.
`api-routes-${pluginName}`
);
await fs.ensureDir(traceDir);
const entryRoot = join(outputPath, 'server', 'pages');
for (const entrypoint of Object.keys(entrypoints)) {
const { output } = await buildRuntime({
files: sourceFilesPreBuild,
entrypoint,
workPath,
config: {
zeroConfig: true,
},
meta: {
avoidTopLevelInstall: true,
skipDownload: true,
},
});
// @ts-ignore This symbol is a private API
const lambdaFiles: Files = output[FILES_SYMBOL];
// When deploying, the `files` that are passed to the Legacy Runtimes already
// have certain files that are ignored stripped, but locally, that list of
// files isn't used by the Legacy Runtimes, so we need to apply the filters
// to the outputs that they are returning instead.
for (const file in lambdaFiles) {
if (shouldIgnorePath(file, ignoreFilter, false)) {
delete lambdaFiles[file];
}
}
let handlerFileBase = output.handler;
let handlerFile = lambdaFiles[handlerFileBase];
let handlerHasImport = false;
const { handler } = output;
const handlerMethod = handler.split('.').pop();
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
// For compiled languages, the launcher file for the Lambda generated
// by the Legacy Runtime matches the `handler` defined for it, but for
// interpreted languages, the `handler` consists of the launcher file name
// without an extension, plus the name of the method inside of that file
// that should be invoked, so we have to construct the file path explicitly.
if (!handlerFile) {
handlerFileBase = handlerFileName + ext;
handlerFile = lambdaFiles[handlerFileBase];
handlerHasImport = true;
}
if (!handlerFile || !handlerFile.fsPath) {
throw new Error(
`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`
);
}
const handlerExtName = extname(handlerFile.fsPath);
const entryBase = basename(entrypoint).replace(ext, handlerExtName);
const entryPath = join(dirname(entrypoint), entryBase);
const entry = join(entryRoot, entryPath);
// Create the parent directory of the API Route that will be created
// for the current entrypoint inside of `.output/server/pages/api`.
await fs.ensureDir(dirname(entry));
// For compiled languages, the launcher file will be binary and therefore
// won't try to import a user-provided request handler (instead, it will
// contain it). But for interpreted languages, the launcher might try to
// load a user-provided request handler from the source file instead of bundling
// it, so we have to adjust the import statement inside the launcher to point
// to the respective source file. Previously, Legacy Runtimes simply expected
// the user-provided request-handler to be copied right next to the launcher,
// but with the new File System API, files won't be moved around unnecessarily.
if (handlerHasImport) {
const { fsPath } = handlerFile;
const encoding = 'utf-8';
// This is the true directory of the user-provided request handler in the
// source files, so that's what we will use as an import path in the launcher.
const locationPrefix = relative(entry, outputPath);
let handlerContent = await fs.readFile(fsPath, encoding);
const importPaths = [
// This is the full entrypoint path, like `./api/test.py`. In our tests
// Python didn't support importing from a parent directory without using different
// code in the launcher that registers it as a location for modules and then changing
// the importing syntax, but continuing to import it like before seems to work. If
// other languages need this, we should consider excluding Python explicitly.
// `./${entrypoint}`,
// This is the entrypoint path without extension, like `api/test`
entrypoint.slice(0, -ext.length),
];
// Generate a list of regular expressions that we can use for
// finding matches, but only allow matches if the import path is
// wrapped inside single (') or double quotes (").
const patterns = importPaths.map(path => {
// eslint-disable-next-line no-useless-escape
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
});
let replacedMatch = null;
for (const pattern of patterns) {
const newContent = handlerContent.replace(
pattern,
(_, p1, p2, p3) => {
return `${p1}${join(locationPrefix, p2)}${p3}`;
}
);
if (newContent !== handlerContent) {
debug(
`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`
);
handlerContent = newContent;
replacedMatch = true;
}
}
if (!replacedMatch) {
new Error(
`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`
);
}
await fs.writeFile(entry, handlerContent, encoding);
} else {
await fs.copy(handlerFile.fsPath, entry);
}
// Legacy Runtimes based on interpreted languages will create a new launcher file
// for every entrypoint, but they will create each one inside `workPath`, which means that
// the launcher for one entrypoint will overwrite the launcher provided for the previous
// entrypoint. That's why, above, we copy the file contents into the new destination (and
// optionally transform them along the way), instead of linking. We then also want to remove
// the copy origin right here, so that the `workPath` doesn't contain a useless launcher file
// once the build has finished running.
await fs.remove(handlerFile.fsPath);
debug(`Removed temporary file "${handlerFile.fsPath}"`);
const nft = `${entry}.nft.json`;
const json = JSON.stringify({
version: 2,
files: Object.keys(lambdaFiles)
.map(file => {
const { fsPath } = lambdaFiles[file];
if (!fsPath) {
throw new Error(
`File "${file}" is missing valid \`fsPath\` property`
);
}
// The handler was already moved into position above.
if (file === handlerFileBase) {
return;
}
return normalizePath(relative(dirname(nft), fsPath));
})
.filter(Boolean),
});
await fs.writeFile(nft, json);
// Add an entry that will later on be added to the `functions-manifest.json`
// file that is placed inside of the `.output` directory.
pages[normalizePath(entryPath)] = {
// Because the underlying file used as a handler was placed
// inside `.output/server/pages/api`, it no longer has the name it originally
// had and is now named after the API Route that it's responsible for,
// so we have to adjust the name of the Lambda handler accordingly.
handler: handler.replace(handlerFileName, parse(entry).name),
runtime: output.runtime,
memory: output.memory,
maxDuration: output.maxDuration,
environment: output.environment,
allowQuery: output.allowQuery,
};
}
// Add any Serverless Functions that were exposed by the Legacy Runtime
// to the `functions-manifest.json` file provided in `.output`.
await _experimental_updateFunctionsManifest({ workPath, pages });
};
}
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
try {
const str = await fs.readFile(filePath, 'utf8');
return JSON.parse(str);
} catch (err) {
if (err.code === 'ENOENT') {
return {};
}
throw err;
}
}
/**
* If `.output/functions-manifest.json` exists, append to the pages
* property. Otherwise write a new file.
*/
export async function _experimental_updateFunctionsManifest({
workPath,
pages,
}: {
workPath: string;
pages: { [key: string]: any };
}) {
const functionsManifestPath = join(
workPath,
'.output',
'functions-manifest.json'
);
const functionsManifest = await readJson(functionsManifestPath);
if (!functionsManifest.version) functionsManifest.version = 2;
if (!functionsManifest.pages) functionsManifest.pages = {};
for (const [pageKey, pageConfig] of Object.entries(pages)) {
functionsManifest.pages[pageKey] = { ...pageConfig };
}
await fs.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
}
/**
* Append routes to the `routes-manifest.json` file.
* If the file does not exist, it will be created.
*/
export async function _experimental_updateRoutesManifest({
workPath,
redirects,
rewrites,
headers,
dynamicRoutes,
staticRoutes,
}: {
workPath: string;
redirects?: {
source: string;
destination: string;
statusCode: number;
regex: string;
}[];
rewrites?: {
source: string;
destination: string;
regex: string;
}[];
headers?: {
source: string;
headers: {
key: string;
value: string;
}[];
regex: string;
}[];
dynamicRoutes?: {
page: string;
regex: string;
namedRegex?: string;
routeKeys?: { [named: string]: string };
}[];
staticRoutes?: {
page: string;
regex: string;
namedRegex?: string;
routeKeys?: { [named: string]: string };
}[];
}) {
const routesManifestPath = join(workPath, '.output', 'routes-manifest.json');
const routesManifest = await readJson(routesManifestPath);
if (!routesManifest.version) routesManifest.version = 3;
if (routesManifest.pages404 === undefined) routesManifest.pages404 = true;
if (redirects) {
if (!routesManifest.redirects) routesManifest.redirects = [];
routesManifest.redirects.push(...redirects);
}
if (rewrites) {
if (!routesManifest.rewrites) routesManifest.rewrites = [];
routesManifest.rewrites.push(...rewrites);
}
if (headers) {
if (!routesManifest.headers) routesManifest.headers = [];
routesManifest.headers.push(...headers);
}
if (dynamicRoutes) {
if (!routesManifest.dynamicRoutes) routesManifest.dynamicRoutes = [];
routesManifest.dynamicRoutes.push(...dynamicRoutes);
}
if (staticRoutes) {
if (!routesManifest.staticRoutes) routesManifest.staticRoutes = [];
routesManifest.staticRoutes.push(...staticRoutes);
}
await fs.writeFile(routesManifestPath, JSON.stringify(routesManifest));
}

View File

@@ -1,4 +1,4 @@
import { getPlatformEnv } from './';
import { getPlatformEnv } from './get-platform-env';
export default function debug(message: string, ...additional: any[]) {
if (getPlatformEnv('BUILDER_DEBUG')) {

View File

@@ -328,8 +328,12 @@ export async function runNpmInstall(
const opts: SpawnOptionsExtended = { cwd: destPath, ...spawnOpts };
const env = opts.env ? { ...opts.env } : { ...process.env };
delete env.NODE_ENV;
opts.env = env;
opts.env = getEnvForPackageManager({
cliType,
lockfileVersion,
nodeVersion,
env,
});
let commandArgs: string[];
if (cliType === 'npm') {
@@ -337,25 +341,9 @@ export async function runNpmInstall(
commandArgs = args
.filter(a => a !== '--prefer-offline')
.concat(['install', '--no-audit', '--unsafe-perm']);
// If the lockfile version is 2 or greater and the node version is less than 16 than we will force npm7 to be used
if (
typeof lockfileVersion === 'number' &&
lockfileVersion >= 2 &&
(nodeVersion?.major || 0) < 16
) {
// Ensure that npm 7 is at the beginning of the `$PATH`
env.PATH = `/node16/bin-npm7:${env.PATH}`;
console.log('Detected `package-lock.json` generated by npm 7...');
}
} else {
opts.prettyCommand = 'yarn install';
commandArgs = ['install', ...args];
// Yarn v2 PnP mode may be activated, so force "node-modules" linker style
if (!env.YARN_NODE_LINKER) {
env.YARN_NODE_LINKER = 'node-modules';
}
}
if (process.env.NPM_ONLY_PRODUCTION) {
@@ -365,6 +353,65 @@ export async function runNpmInstall(
return spawnAsync(cliType, commandArgs, opts);
}
export function getEnvForPackageManager({
cliType,
lockfileVersion,
nodeVersion,
env,
}: {
cliType: CliType;
lockfileVersion: number | undefined;
nodeVersion: NodeVersion | undefined;
env: { [x: string]: string | undefined };
}) {
const newEnv: { [x: string]: string | undefined } = { ...env };
if (cliType === 'npm') {
if (
typeof lockfileVersion === 'number' &&
lockfileVersion >= 2 &&
(nodeVersion?.major || 0) < 16
) {
// Ensure that npm 7 is at the beginning of the `$PATH`
newEnv.PATH = `/node16/bin-npm7:${env.PATH}`;
console.log('Detected `package-lock.json` generated by npm 7...');
}
} else {
// Yarn v2 PnP mode may be activated, so force "node-modules" linker style
if (!env.YARN_NODE_LINKER) {
newEnv.YARN_NODE_LINKER = 'node-modules';
}
}
return newEnv;
}
export async function runCustomInstallCommand({
destPath,
installCommand,
nodeVersion,
spawnOpts,
}: {
destPath: string;
installCommand: string;
nodeVersion: NodeVersion;
spawnOpts?: SpawnOptions;
}) {
console.log(`Running "install" command: \`${installCommand}\`...`);
const { cliType, lockfileVersion } = await scanParentDirs(destPath);
const env = getEnvForPackageManager({
cliType,
lockfileVersion,
nodeVersion,
env: spawnOpts?.env || {},
});
debug(`Running with $PATH:`, env?.PATH || '');
await execCommand(installCommand, {
...spawnOpts,
env,
cwd: destPath,
});
}
export async function runPackageJsonScript(
destPath: string,
scriptNames: string | Iterable<string>,
@@ -385,23 +432,24 @@ export async function runPackageJsonScript(
debug('Running user script...');
const runScriptTime = Date.now();
const opts: SpawnOptionsExtended = { cwd: destPath, ...spawnOpts };
const env = (opts.env = { ...process.env, ...opts.env });
const opts: SpawnOptionsExtended = {
cwd: destPath,
...spawnOpts,
env: getEnvForPackageManager({
cliType,
lockfileVersion,
nodeVersion: undefined,
env: {
...process.env,
...spawnOpts?.env,
},
}),
};
if (cliType === 'npm') {
opts.prettyCommand = `npm run ${scriptName}`;
if (typeof lockfileVersion === 'number' && lockfileVersion >= 2) {
// Ensure that npm 7 is at the beginning of the `$PATH`
env.PATH = `/node16/bin-npm7:${env.PATH}`;
}
} else {
opts.prettyCommand = `yarn run ${scriptName}`;
// Yarn v2 PnP mode may be activated, so force "node-modules" linker style
if (!env.YARN_NODE_LINKER) {
env.YARN_NODE_LINKER = 'node-modules';
}
}
console.log(`Running "${opts.prettyCommand}"`);

View File

@@ -0,0 +1,23 @@
import { NowBuildError } from './errors';
/**
* Helper function to support both `VERCEL_` and legacy `NOW_` env vars.
* Throws an error if *both* env vars are defined.
*/
export const getPlatformEnv = (name: string): string | undefined => {
const vName = `VERCEL_${name}`;
const nName = `NOW_${name}`;
const v = process.env[vName];
const n = process.env[nName];
if (typeof v === 'string') {
if (typeof n === 'string') {
throw new NowBuildError({
code: 'CONFLICTING_ENV_VAR_NAMES',
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`,
link: 'https://vercel.link/combining-old-and-new-config',
});
}
return v;
}
return n;
};

View File

@@ -2,6 +2,7 @@ import FileBlob from './file-blob';
import FileFsRef from './file-fs-ref';
import FileRef from './file-ref';
import { Lambda, createLambda, getLambdaOptionsFromFunction } from './lambda';
import { NodejsLambda } from './nodejs-lambda';
import { Prerender } from './prerender';
import download, { DownloadedFiles, isSymbolicLink } from './fs/download';
import getWriteableDirectory from './fs/get-writable-directory';
@@ -20,6 +21,8 @@ import {
runBundleInstall,
runPipInstall,
runShellScript,
runCustomInstallCommand,
getEnvForPackageManager,
getNodeVersion,
getSpawnOptions,
getNodeBinPath,
@@ -29,17 +32,18 @@ import {
getLatestNodeVersion,
getDiscontinuedNodeVersions,
} from './fs/node-version';
import { NowBuildError } from './errors';
import streamToBuffer from './fs/stream-to-buffer';
import shouldServe from './should-serve';
import debug from './debug';
import getIgnoreFilter from './get-ignore-filter';
import { getPlatformEnv } from './get-platform-env';
export {
FileBlob,
FileFsRef,
FileRef,
Lambda,
NodejsLambda,
createLambda,
Prerender,
download,
@@ -61,10 +65,13 @@ export {
runBundleInstall,
runPipInstall,
runShellScript,
runCustomInstallCommand,
getEnvForPackageManager,
getNodeVersion,
getLatestNodeVersion,
getDiscontinuedNodeVersions,
getSpawnOptions,
getPlatformEnv,
streamToBuffer,
shouldServe,
debug,
@@ -85,11 +92,6 @@ export { detectFramework } from './detect-framework';
export { DetectorFilesystem } from './detectors/filesystem';
export { readConfigFile } from './fs/read-config-file';
export { normalizePath } from './fs/normalize-path';
export {
_experimental_convertRuntimeToPlugin,
_experimental_updateFunctionsManifest,
_experimental_updateRoutesManifest,
} from './convert-runtime-to-plugin';
export * from './schemas';
export * from './types';
@@ -113,25 +115,3 @@ export const isOfficialRuntime = (desired: string, name?: string): boolean => {
export const isStaticRuntime = (name?: string): boolean => {
return isOfficialRuntime('static', name);
};
/**
* Helper function to support both `VERCEL_` and legacy `NOW_` env vars.
* Throws an error if *both* env vars are defined.
*/
export const getPlatformEnv = (name: string): string | undefined => {
const vName = `VERCEL_${name}`;
const nName = `NOW_${name}`;
const v = process.env[vName];
const n = process.env[nName];
if (typeof v === 'string') {
if (typeof n === 'string') {
throw new NowBuildError({
code: 'CONFLICTING_ENV_VAR_NAMES',
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`,
link: 'https://vercel.link/combining-old-and-new-config',
});
}
return v;
}
return n;
};

View File

@@ -12,18 +12,7 @@ interface Environment {
[key: string]: string;
}
interface LambdaOptions {
zipBuffer: Buffer;
handler: string;
runtime: string;
memory?: number;
maxDuration?: number;
environment: Environment;
allowQuery?: string[];
regions?: string[];
}
interface CreateLambdaOptions {
export interface LambdaOptions {
files: Files;
handler: string;
runtime: string;
@@ -32,6 +21,10 @@ interface CreateLambdaOptions {
environment?: Environment;
allowQuery?: string[];
regions?: string[];
/**
* @deprecated Use `files` property instead.
*/
zipBuffer?: Buffer;
}
interface GetLambdaOptionsFromFunctionOptions {
@@ -39,31 +32,64 @@ interface GetLambdaOptionsFromFunctionOptions {
config?: Pick<Config, 'functions'>;
}
export const FILES_SYMBOL = Symbol('files');
export class Lambda {
public type: 'Lambda';
public zipBuffer: Buffer;
public handler: string;
public runtime: string;
public memory?: number;
public maxDuration?: number;
public environment: Environment;
public allowQuery?: string[];
public regions?: string[];
type: 'Lambda';
files: Files;
handler: string;
runtime: string;
memory?: number;
maxDuration?: number;
environment: Environment;
allowQuery?: string[];
regions?: string[];
/**
* @deprecated Use `await lambda.createZip()` instead.
*/
zipBuffer?: Buffer;
constructor({
zipBuffer,
files,
handler,
runtime,
maxDuration,
memory,
environment,
environment = {},
allowQuery,
regions,
zipBuffer,
}: LambdaOptions) {
if (!zipBuffer) {
assert(typeof files === 'object', '"files" must be an object');
}
assert(typeof handler === 'string', '"handler" is not a string');
assert(typeof runtime === 'string', '"runtime" is not a string');
assert(typeof environment === 'object', '"environment" is not an object');
if (memory !== undefined) {
assert(typeof memory === 'number', '"memory" is not a number');
}
if (maxDuration !== undefined) {
assert(typeof maxDuration === 'number', '"maxDuration" is not a number');
}
if (allowQuery !== undefined) {
assert(Array.isArray(allowQuery), '"allowQuery" is not an Array');
assert(
allowQuery.every(q => typeof q === 'string'),
'"allowQuery" is not a string Array'
);
}
if (regions !== undefined) {
assert(Array.isArray(regions), '"regions" is not an Array');
assert(
regions.every(r => typeof r === 'string'),
'"regions" is not a string Array'
);
}
this.type = 'Lambda';
this.zipBuffer = zipBuffer;
this.files = files;
this.handler = handler;
this.runtime = runtime;
this.memory = memory;
@@ -71,70 +97,36 @@ export class Lambda {
this.environment = environment;
this.allowQuery = allowQuery;
this.regions = regions;
this.zipBuffer = zipBuffer;
}
async createZip(): Promise<Buffer> {
let { zipBuffer } = this;
if (!zipBuffer) {
await sema.acquire();
try {
zipBuffer = await createZip(this.files);
} finally {
sema.release();
}
}
return zipBuffer;
}
}
const sema = new Sema(10);
const mtime = new Date(1540000000000);
export async function createLambda({
files,
handler,
runtime,
memory,
maxDuration,
environment = {},
allowQuery,
regions,
}: CreateLambdaOptions): Promise<Lambda> {
assert(typeof files === 'object', '"files" must be an object');
assert(typeof handler === 'string', '"handler" is not a string');
assert(typeof runtime === 'string', '"runtime" is not a string');
assert(typeof environment === 'object', '"environment" is not an object');
/**
* @deprecated Use `new Lambda()` instead.
*/
export async function createLambda(opts: LambdaOptions): Promise<Lambda> {
const lambda = new Lambda(opts);
if (memory !== undefined) {
assert(typeof memory === 'number', '"memory" is not a number');
}
// backwards compat
lambda.zipBuffer = await lambda.createZip();
if (maxDuration !== undefined) {
assert(typeof maxDuration === 'number', '"maxDuration" is not a number');
}
if (allowQuery !== undefined) {
assert(Array.isArray(allowQuery), '"allowQuery" is not an Array');
assert(
allowQuery.every(q => typeof q === 'string'),
'"allowQuery" is not a string Array'
);
}
if (regions !== undefined) {
assert(Array.isArray(regions), '"regions" is not an Array');
assert(
regions.every(r => typeof r === 'string'),
'"regions" is not a string Array'
);
}
await sema.acquire();
try {
const zipBuffer = await createZip(files);
const lambda = new Lambda({
zipBuffer,
handler,
runtime,
memory,
maxDuration,
environment,
regions,
});
// @ts-ignore This symbol is a private API
lambda[FILES_SYMBOL] = files;
return lambda;
} finally {
sema.release();
}
return lambda;
}
export async function createZip(files: Files): Promise<Buffer> {
@@ -177,7 +169,7 @@ export async function getLambdaOptionsFromFunction({
}: GetLambdaOptionsFromFunctionOptions): Promise<
Pick<LambdaOptions, 'memory' | 'maxDuration'>
> {
if (config && config.functions) {
if (config?.functions) {
for (const [pattern, fn] of Object.entries(config.functions)) {
if (sourceFile === pattern || minimatch(sourceFile, pattern)) {
return {

View File

@@ -0,0 +1,27 @@
import { Lambda, LambdaOptions } from './lambda';
interface NodejsLambdaOptions extends LambdaOptions {
shouldAddHelpers: boolean;
shouldAddSourcemapSupport: boolean;
awsLambdaHandler?: string;
}
export class NodejsLambda extends Lambda {
launcherType: 'Nodejs';
shouldAddHelpers: boolean;
shouldAddSourcemapSupport: boolean;
awsLambdaHandler?: string;
constructor({
shouldAddHelpers,
shouldAddSourcemapSupport,
awsLambdaHandler,
...opts
}: NodejsLambdaOptions) {
super(opts);
this.launcherType = 'Nodejs';
this.shouldAddHelpers = shouldAddHelpers;
this.shouldAddSourcemapSupport = shouldAddSourcemapSupport;
this.awsLambdaHandler = awsLambdaHandler;
}
}

View File

@@ -1,5 +1,6 @@
import FileRef from './file-ref';
import FileFsRef from './file-fs-ref';
import { Lambda } from './lambda';
export interface Env {
[name: string]: string | undefined;
@@ -368,3 +369,49 @@ export interface ProjectSettings {
directoryListing?: boolean;
gitForkProtection?: boolean;
}
export interface BuilderV2 {
version: 2;
build: BuildV2;
prepareCache?: PrepareCache;
}
export interface BuilderV3 {
version: 3;
build: BuildV3;
prepareCache?: PrepareCache;
startDevServer?: StartDevServer;
}
type ImageFormat = 'image/avif' | 'image/webp';
export interface Images {
domains: string[];
sizes: number[];
minimumCacheTTL?: number;
formats?: ImageFormat[];
}
export interface BuildResultV2 {
// TODO: use proper `Route` type from `routing-utils` (perhaps move types to a common package)
routes: any[];
images?: Images;
output: {
[key: string]: File | Lambda;
};
wildcard?: Array<{
domain: string;
value: string;
}>;
}
export interface BuildResultV3 {
output: Lambda;
}
export type BuildV2 = (options: BuildOptions) => Promise<BuildResultV2>;
export type BuildV3 = (options: BuildOptions) => Promise<BuildResultV3>;
export type PrepareCache = (options: PrepareCacheOptions) => Promise<Files>;
export type StartDevServer = (
options: StartDevServerOptions
) => Promise<StartDevServerResult>;

View File

@@ -1,160 +0,0 @@
import { join } from 'path';
import fs from 'fs-extra';
import { BuildOptions, createLambda, FileFsRef } from '../src';
import { _experimental_convertRuntimeToPlugin } from '../src/convert-runtime-to-plugin';
async function fsToJson(dir: string, output: Record<string, any> = {}) {
const files = await fs.readdir(dir);
for (const file of files) {
const fsPath = join(dir, file);
const stat = await fs.stat(fsPath);
if (stat.isDirectory()) {
output[file] = {};
await fsToJson(fsPath, output[file]);
} else {
output[file] = await fs.readFile(fsPath, 'utf8');
}
}
return output;
}
const invalidFuncWorkpath = join(
__dirname,
'convert-runtime',
'invalid-functions'
);
const pythonApiWorkpath = join(__dirname, 'convert-runtime', 'python-api');
describe('convert-runtime-to-plugin', () => {
afterEach(async () => {
await fs.remove(join(invalidFuncWorkpath, '.output'));
await fs.remove(join(pythonApiWorkpath, '.output'));
});
it('should create correct fileystem for python', async () => {
const ext = '.py';
const workPath = pythonApiWorkpath;
const handlerName = 'vc__handler__python';
const handlerFileName = handlerName + ext;
const lambdaOptions = {
handler: `${handlerName}.vc_handler`,
runtime: 'python3.9',
memory: 512,
maxDuration: 5,
environment: {},
};
const buildRuntime = async (opts: BuildOptions) => {
const handlerPath = join(workPath, handlerFileName);
// This is the usual time at which a Legacy Runtime writes its Lambda launcher.
await fs.writeFile(handlerPath, '# handler');
opts.files[handlerFileName] = new FileFsRef({
fsPath: handlerPath,
});
const lambda = await createLambda({
files: opts.files,
...lambdaOptions,
});
return { output: lambda };
};
const packageName = 'vercel-plugin-python';
const build = await _experimental_convertRuntimeToPlugin(
buildRuntime,
packageName,
ext
);
await build({ workPath });
const output = await fsToJson(join(workPath, '.output'));
expect(output).toMatchObject({
'functions-manifest.json': expect.stringContaining('{'),
server: {
pages: {
api: {
'index.py': expect.stringContaining('handler'),
'index.py.nft.json': expect.stringContaining('{'),
users: {
'get.py': expect.stringContaining('handler'),
'get.py.nft.json': expect.stringContaining('{'),
'post.py': expect.stringContaining('handler'),
'post.py.nft.json': expect.stringContaining('{'),
},
},
},
},
});
const funcManifest = JSON.parse(output['functions-manifest.json']);
expect(funcManifest).toMatchObject({
version: 2,
pages: {
'api/index.py': { ...lambdaOptions, handler: 'index.vc_handler' },
'api/users/get.py': { ...lambdaOptions, handler: 'get.vc_handler' },
'api/users/post.py': {
...lambdaOptions,
handler: 'post.vc_handler',
memory: 512,
},
},
});
const indexJson = JSON.parse(output.server.pages.api['index.py.nft.json']);
expect(indexJson).toMatchObject({
version: 2,
files: [
'../../../../api/db/[id].py',
'../../../../api/index.py',
'../../../../api/project/[aid]/[bid]/index.py',
'../../../../api/users/get.py',
'../../../../api/users/post.py',
'../../../../file.txt',
'../../../../util/date.py',
'../../../../util/math.py',
],
});
const getJson = JSON.parse(
output.server.pages.api.users['get.py.nft.json']
);
expect(getJson).toMatchObject({
version: 2,
files: [
'../../../../../api/db/[id].py',
'../../../../../api/index.py',
'../../../../../api/project/[aid]/[bid]/index.py',
'../../../../../api/users/get.py',
'../../../../../api/users/post.py',
'../../../../../file.txt',
'../../../../../util/date.py',
'../../../../../util/math.py',
],
});
const postJson = JSON.parse(
output.server.pages.api.users['post.py.nft.json']
);
expect(postJson).toMatchObject({
version: 2,
files: [
'../../../../../api/db/[id].py',
'../../../../../api/index.py',
'../../../../../api/project/[aid]/[bid]/index.py',
'../../../../../api/users/get.py',
'../../../../../api/users/post.py',
'../../../../../file.txt',
'../../../../../util/date.py',
'../../../../../util/math.py',
],
});
expect(output.server.pages['file.txt']).toBeUndefined();
expect(output.server.pages.api['file.txt']).toBeUndefined();
});
});

View File

@@ -0,0 +1,109 @@
import assert from 'assert';
import { getEnvForPackageManager, NodeVersion } from '../src';
import { CliType } from '../src/fs/run-user-scripts';
describe('Test `getEnvForPackageManager()`', () => {
const cases = [
{
name: 'should do nothing to env for npm < 6 and node < 16',
args: {
cliType: 'npm' as CliType,
nodeVersion: {
major: 14,
} as NodeVersion,
lockfileVersion: 1,
env: {
FOO: 'bar',
},
},
want: {
FOO: 'bar',
},
},
{
name: 'should set path if npm 7+ is detected and node < 16',
args: {
cliType: 'npm' as CliType,
nodeVersion: {
major: 14,
} as NodeVersion,
lockfileVersion: 2,
env: {
FOO: 'bar',
PATH: 'foo',
},
},
want: {
FOO: 'bar',
PATH: `/node16/bin-npm7:foo`,
},
},
{
name: 'should not set path if node is 16 and npm 7+ is detected',
args: {
cliType: 'npm' as CliType,
nodeVersion: {
major: 16,
} as NodeVersion,
lockfileVersion: 2,
env: {
FOO: 'bar',
PATH: 'foo',
},
},
want: {
FOO: 'bar',
PATH: 'foo',
},
},
{
name: 'should set YARN_NODE_LINKER w/yarn if it is not already defined',
args: {
cliType: 'yarn' as CliType,
nodeVersion: {
major: 16,
} as NodeVersion,
lockfileVersion: 2,
env: {
FOO: 'bar',
},
},
want: {
FOO: 'bar',
YARN_NODE_LINKER: 'node-modules',
},
},
{
name: 'should not set YARN_NODE_LINKER if it already exists',
args: {
cliType: 'yarn' as CliType,
nodeVersion: {
major: 16,
} as NodeVersion,
lockfileVersion: 2,
env: {
FOO: 'bar',
YARN_NODE_LINKER: 'exists',
},
},
want: {
FOO: 'bar',
YARN_NODE_LINKER: 'exists',
},
},
];
for (const { name, want, args } of cases) {
it(name, () => {
assert.deepStrictEqual(
getEnvForPackageManager({
cliType: args.cliType,
lockfileVersion: args.lockfileVersion,
nodeVersion: args.nodeVersion,
env: args.env,
}),
want
);
});
}
});

View File

@@ -0,0 +1,21 @@
import { NodejsLambda, FileBlob } from '../src';
describe('Test `NodejsLambda`', () => {
it('should create an instance', () => {
const helloSrc = 'module.exports = (req, res) => res.end("hi");';
const lambda = new NodejsLambda({
files: {
'api/hello.js': new FileBlob({ data: helloSrc }),
},
handler: 'api/hello.js',
runtime: 'node14.x',
shouldAddHelpers: true,
shouldAddSourcemapSupport: false,
});
expect(lambda.handler).toEqual('api/hello.js');
expect(lambda.runtime).toEqual('node14.x');
expect(lambda.shouldAddHelpers).toEqual(true);
expect(lambda.shouldAddSourcemapSupport).toEqual(false);
expect(lambda.awsLambdaHandler).toBeUndefined();
});
});

View File

@@ -1,6 +1,6 @@
{
"name": "vercel",
"version": "23.1.3-canary.73",
"version": "24.0.1-canary.1",
"preferGlobal": true,
"license": "Apache-2.0",
"description": "The command-line interface for Vercel",
@@ -43,14 +43,12 @@
"node": ">= 12"
},
"dependencies": {
"@vercel/build-utils": "2.13.1-canary.0",
"@vercel/go": "1.2.4-canary.5",
"@vercel/node": "1.12.2-canary.8",
"@vercel/python": "2.1.2-canary.3",
"@vercel/ruby": "1.2.10-canary.1",
"update-notifier": "4.1.0",
"vercel-plugin-middleware": "0.0.0-canary.25",
"vercel-plugin-node": "1.12.2-canary.40"
"@vercel/build-utils": "2.14.1-canary.1",
"@vercel/go": "1.3.1-canary.1",
"@vercel/node": "1.13.1-canary.1",
"@vercel/python": "2.2.1-canary.1",
"@vercel/ruby": "1.3.1-canary.1",
"update-notifier": "4.1.0"
},
"devDependencies": {
"@next/env": "11.1.2",
@@ -90,10 +88,11 @@
"@types/update-notifier": "5.1.0",
"@types/which": "1.3.2",
"@types/write-json-file": "2.2.1",
"@vercel/client": "10.2.3-canary.51",
"@vercel/frameworks": "0.5.1-canary.20",
"@vercel/client": "10.3.1-canary.1",
"@vercel/fetch-retry": "5.0.3",
"@vercel/frameworks": "0.6.1-canary.1",
"@vercel/ncc": "0.24.0",
"@vercel/nft": "0.17.0",
"@vercel/nft": "0.17.5",
"@zeit/fun": "0.11.2",
"@zeit/source-map-support": "0.6.2",
"ajv": "6.12.2",
@@ -142,7 +141,7 @@
"ms": "2.1.2",
"node-fetch": "2.6.1",
"npm-package-arg": "6.1.0",
"open": "8.2.0",
"open": "8.4.0",
"ora": "3.4.0",
"pcre-to-regexp": "1.0.0",
"pluralize": "7.0.0",

View File

@@ -6,7 +6,6 @@ import { Output } from '../../util/output';
import * as ERRORS from '../../util/errors-ts';
import assignAlias from '../../util/alias/assign-alias';
import Client from '../../util/client';
import formatNSTable from '../../util/format-ns-table';
import getDeploymentByIdOrHost from '../../util/deploy/get-deployment-by-id-or-host';
import { getDeploymentForAlias } from '../../util/alias/get-deployment-by-alias';
import getScope from '../../util/get-scope';
@@ -226,29 +225,6 @@ function handleSetupDomainError<T>(
output: Output,
error: SetupDomainError | T
): T | 1 {
if (
error instanceof ERRORS.DomainVerificationFailed ||
error instanceof ERRORS.DomainNsNotVerifiedForWildcard
) {
const { nsVerification, domain } = error.meta;
output.error(
`We could not alias since the domain ${domain} could not be verified due to the following reasons:\n`
);
output.print(
`Nameservers verification failed since we see a different set than the intended set:`
);
output.print(
`\n${formatNSTable(
nsVerification.intendedNameservers,
nsVerification.nameservers,
{ extraSpace: ' ' }
)}\n\n`
);
output.print(' Read more: https://err.sh/vercel/domain-verification\n');
return 1;
}
if (error instanceof ERRORS.DomainPermissionDenied) {
output.error(
`You don't have permissions over domain ${chalk.underline(

View File

@@ -0,0 +1,411 @@
import open from 'open';
import boxen from 'boxen';
import execa from 'execa';
import plural from 'pluralize';
import inquirer from 'inquirer';
import { resolve } from 'path';
import chalk, { Chalk } from 'chalk';
import { URLSearchParams, parse } from 'url';
import sleep from '../../util/sleep';
import formatDate from '../../util/format-date';
import link from '../../util/output/link';
import logo from '../../util/output/logo';
import getArgs from '../../util/get-args';
import Client from '../../util/client';
import { getPkgName } from '../../util/pkg-name';
import { Output } from '../../util/output';
import { Deployment, PaginationOptions } from '../../types';
interface DeploymentV6
extends Pick<
Deployment,
'url' | 'target' | 'projectId' | 'ownerId' | 'meta' | 'inspectorUrl'
> {
createdAt: number;
}
interface Deployments {
deployments: DeploymentV6[];
pagination: PaginationOptions;
}
const pkgName = getPkgName();
const help = () => {
console.log(`
${chalk.bold(`${logo} ${pkgName} bisect`)} [options]
${chalk.dim('Options:')}
-h, --help Output usage information
-d, --debug Debug mode [off]
-b, --bad Known bad URL
-g, --good Known good URL
-o, --open Automatically open each URL in the browser
-p, --path Subpath of the deployment URL to test
-r, --run Test script to run for each deployment
${chalk.dim('Examples:')}
${chalk.gray('')} Bisect the current project interactively
${chalk.cyan(`$ ${pkgName} bisect`)}
${chalk.gray('')} Bisect with a known bad deployment
${chalk.cyan(`$ ${pkgName} bisect --bad example-310pce9i0.vercel.app`)}
${chalk.gray('')} Automated bisect with a run script
${chalk.cyan(`$ ${pkgName} bisect --run ./test.sh`)}
`);
};
export default async function main(client: Client): Promise<number> {
const { output } = client;
const argv = getArgs(client.argv.slice(2), {
'--bad': String,
'-b': '--bad',
'--good': String,
'-g': '--good',
'--open': Boolean,
'-o': '--open',
'--path': String,
'-p': '--path',
'--run': String,
'-r': '--run',
});
if (argv['--help']) {
help();
return 2;
}
let bad =
argv['--bad'] ||
(await prompt(output, `Specify a URL where the bug occurs:`));
let good =
argv['--good'] ||
(await prompt(output, `Specify a URL where the bug does not occur:`));
let subpath = argv['--path'] || '';
let run = argv['--run'] || '';
const openEnabled = argv['--open'] || false;
if (run) {
run = resolve(run);
}
if (!bad.startsWith('https://')) {
bad = `https://${bad}`;
}
let parsed = parse(bad);
if (!parsed.hostname) {
output.error('Invalid input: no hostname provided');
return 1;
}
bad = parsed.hostname;
if (typeof parsed.path === 'string' && parsed.path !== '/') {
if (subpath && subpath !== parsed.path) {
output.note(
`Ignoring subpath ${chalk.bold(
parsed.path
)} in favor of \`--path\` argument ${chalk.bold(subpath)}`
);
} else {
subpath = parsed.path;
}
}
const badDeploymentPromise = getDeployment(client, bad).catch(err => err);
if (!good.startsWith('https://')) {
good = `https://${good}`;
}
parsed = parse(good);
if (!parsed.hostname) {
output.error('Invalid input: no hostname provided');
return 1;
}
good = parsed.hostname;
if (
typeof parsed.path === 'string' &&
parsed.path !== '/' &&
subpath &&
subpath !== parsed.path
) {
output.note(
`Ignoring subpath ${chalk.bold(
parsed.path
)} which does not match ${chalk.bold(subpath)}`
);
}
const goodDeploymentPromise = getDeployment(client, good).catch(err => err);
if (!subpath) {
subpath = await prompt(
output,
`Specify the URL subpath where the bug occurs:`
);
}
output.spinner('Retrieving deployments…');
const [badDeployment, goodDeployment] = await Promise.all([
badDeploymentPromise,
goodDeploymentPromise,
]);
if (badDeployment) {
if (badDeployment instanceof Error) {
badDeployment.message += ` "${bad}"`;
output.prettyError(badDeployment);
return 1;
}
bad = badDeployment.url;
} else {
output.error(`Failed to retrieve ${chalk.bold('bad')} Deployment: ${bad}`);
return 1;
}
const { projectId } = badDeployment;
if (goodDeployment) {
if (goodDeployment instanceof Error) {
goodDeployment.message += ` "${good}"`;
output.prettyError(goodDeployment);
return 1;
}
good = goodDeployment.url;
} else {
output.error(
`Failed to retrieve ${chalk.bold('good')} Deployment: ${good}`
);
return 1;
}
if (projectId !== goodDeployment.projectId) {
output.error(`Good and Bad deployments must be from the same Project`);
return 1;
}
if (badDeployment.createdAt < goodDeployment.createdAt) {
output.error(`Good deployment must be older than the Bad deployment`);
return 1;
}
if (badDeployment.target !== goodDeployment.target) {
output.error(
`Bad deployment target "${
badDeployment.target || 'preview'
}" does not match good deployment target "${
goodDeployment.target || 'preview'
}"`
);
return 1;
}
// Fetch all the project's "READY" deployments with the pagination API
let deployments: DeploymentV6[] = [];
let next: number | undefined = badDeployment.createdAt + 1;
do {
const query = new URLSearchParams();
query.set('projectId', projectId);
if (badDeployment.target) {
query.set('target', badDeployment.target);
}
query.set('limit', '100');
query.set('state', 'READY');
if (next) {
query.set('until', String(next));
}
const res = await client.fetch<Deployments>(`/v6/deployments?${query}`, {
accountId: badDeployment.ownerId,
});
next = res.pagination.next;
let newDeployments = res.deployments;
// If we have the "good" deployment in this chunk, then we're done
for (let i = 0; i < newDeployments.length; i++) {
if (newDeployments[i].url === good) {
newDeployments = newDeployments.slice(0, i + 1);
next = undefined;
break;
}
}
deployments = deployments.concat(newDeployments);
if (next) {
// Small sleep to avoid rate limiting
await sleep(100);
}
} while (next);
if (!deployments.length) {
output.error(
'Cannot bisect because this project does not have any deployments'
);
return 1;
}
// The first deployment is the one that was marked
// as "bad", so that one does not need to be tested
let lastBad = deployments.shift()!;
while (deployments.length > 0) {
// Add a blank space before the next step
output.print('\n');
const middleIndex = Math.floor(deployments.length / 2);
const deployment = deployments[middleIndex];
const rem = plural('deployment', deployments.length, true);
const steps = Math.floor(Math.log2(deployments.length));
const pSteps = plural('step', steps, true);
output.log(
chalk.magenta(
`${chalk.bold(
'Bisecting:'
)} ${rem} left to test after this (roughly ${pSteps})`
),
chalk.magenta
);
const testUrl = `https://${deployment.url}${subpath}`;
output.log(`${chalk.bold('Deployment URL:')} ${link(testUrl)}`);
output.log(`${chalk.bold('Date:')} ${formatDate(deployment.createdAt)}`);
const commit = getCommit(deployment);
if (commit) {
const shortSha = commit.sha.substring(0, 7);
const firstLine = commit.message.split('\n')[0];
output.log(`${chalk.bold('Commit:')} [${shortSha}] ${firstLine}`);
}
let action: string;
if (run) {
const proc = await execa(run, [testUrl], {
stdio: 'inherit',
reject: false,
env: {
...process.env,
HOST: deployment.url,
URL: testUrl,
},
});
if (proc instanceof Error && typeof proc.exitCode !== 'number') {
// Script does not exist or is not executable, so exit
output.prettyError(proc);
return 1;
}
const { exitCode } = proc;
let color: Chalk;
if (exitCode === 0) {
color = chalk.green;
action = 'good';
} else if (exitCode === 125) {
action = 'skip';
color = chalk.grey;
} else {
action = 'bad';
color = chalk.red;
}
output.log(
`Run script returned exit code ${chalk.bold(String(exitCode))}: ${color(
action
)}`
);
} else {
if (openEnabled) {
await open(testUrl);
}
const answer = await inquirer.prompt({
type: 'expand',
name: 'action',
message: 'Select an action:',
choices: [
{ key: 'g', name: 'Good', value: 'good' },
{ key: 'b', name: 'Bad', value: 'bad' },
{ key: 's', name: 'Skip', value: 'skip' },
],
});
action = answer.action;
}
if (action === 'good') {
deployments = deployments.slice(0, middleIndex);
} else if (action === 'bad') {
lastBad = deployment;
deployments = deployments.slice(middleIndex + 1);
} else if (action === 'skip') {
deployments.splice(middleIndex, 1);
}
}
output.print('\n');
let result = [
chalk.bold(
`The first bad deployment is: ${link(`https://${lastBad.url}`)}`
),
'',
` ${chalk.bold('Date:')} ${formatDate(lastBad.createdAt)}`,
];
const commit = getCommit(lastBad);
if (commit) {
const shortSha = commit.sha.substring(0, 7);
const firstLine = commit.message.split('\n')[0];
result.push(` ${chalk.bold('Commit:')} [${shortSha}] ${firstLine}`);
}
result.push(`${chalk.bold('Inspect:')} ${link(lastBad.inspectorUrl)}`);
output.print(boxen(result.join('\n'), { padding: 1 }));
output.print('\n');
return 0;
}
function getDeployment(
client: Client,
hostname: string
): Promise<DeploymentV6> {
const query = new URLSearchParams();
query.set('url', hostname);
query.set('resolve', '1');
query.set('noState', '1');
return client.fetch<DeploymentV6>(`/v10/deployments/get?${query}`);
}
function getCommit(deployment: DeploymentV6) {
const sha =
deployment.meta?.githubCommitSha ||
deployment.meta?.gitlabCommitSha ||
deployment.meta?.bitbucketCommitSha;
if (!sha) return null;
const message =
deployment.meta?.githubCommitMessage ||
deployment.meta?.gitlabCommitMessage ||
deployment.meta?.bitbucketCommitMessage;
return { sha, message };
}
async function prompt(output: Output, message: string): Promise<string> {
// eslint-disable-next-line no-constant-condition
while (true) {
const { val } = await inquirer.prompt({
type: 'input',
name: 'val',
message,
});
if (val) {
return val;
} else {
output.error('A value must be specified');
}
}
}

View File

@@ -1,911 +0,0 @@
import { loadEnvConfig, processEnv } from '@next/env';
import {
execCommand,
getScriptName,
GlobOptions,
scanParentDirs,
spawnAsync,
glob as buildUtilsGlob,
detectFileSystemAPI,
detectBuilders,
PackageJson,
} from '@vercel/build-utils';
import { nodeFileTrace } from '@vercel/nft';
import Sema from 'async-sema';
import chalk from 'chalk';
import { SpawnOptions } from 'child_process';
import { assert } from 'console';
import fs from 'fs-extra';
import ogGlob from 'glob';
import { dirname, isAbsolute, join, parse, relative } from 'path';
import pluralize from 'pluralize';
import Client from '../util/client';
import { VercelConfig } from '../util/dev/types';
import { emoji, prependEmoji } from '../util/emoji';
import { CantParseJSONFile } from '../util/errors-ts';
import getArgs from '../util/get-args';
import handleError from '../util/handle-error';
import confirm from '../util/input/confirm';
import { isSettingValue } from '../util/is-setting-value';
import cmd from '../util/output/cmd';
import logo from '../util/output/logo';
import param from '../util/output/param';
import stamp from '../util/output/stamp';
import { getCommandName, getPkgName } from '../util/pkg-name';
import { loadCliPlugins } from '../util/plugins';
import { findFramework } from '../util/projects/find-framework';
import { VERCEL_DIR } from '../util/projects/link';
import { readProjectSettings } from '../util/projects/project-settings';
import readJSONFile from '../util/read-json-file';
import pull from './pull';
const sema = new Sema(16, {
capacity: 100,
});
const help = () => {
return console.log(`
${chalk.bold(`${logo} ${getPkgName()} build`)}
${chalk.dim('Options:')}
-h, --help Output usage information
-A ${chalk.bold.underline('FILE')}, --local-config=${chalk.bold.underline(
'FILE'
)} Path to the local ${'`vercel.json`'} file
-Q ${chalk.bold.underline('DIR')}, --global-config=${chalk.bold.underline(
'DIR'
)} Path to the global ${'`.vercel`'} directory
--cwd [path] The current working directory
-d, --debug Debug mode [off]
-y, --yes Skip the confirmation prompt
${chalk.dim('Examples:')}
${chalk.gray('')} Build the project
${chalk.cyan(`$ ${getPkgName()} build`)}
${chalk.cyan(`$ ${getPkgName()} build --cwd ./path-to-project`)}
`);
};
const OUTPUT_DIR = '.output';
export default async function main(client: Client) {
if (process.env.__VERCEL_BUILD_RUNNING) {
client.output.error(
`${cmd(
`${getPkgName()} build`
)} must not recursively invoke itself. Check the Build Command in the Project Settings or the ${cmd(
'build'
)} script in ${cmd('package.json')}`
);
client.output.error(
`Learn More: https://vercel.link/recursive-invocation-of-commands`
);
return 1;
} else {
process.env.__VERCEL_BUILD_RUNNING = '1';
}
let argv;
const buildStamp = stamp();
try {
argv = getArgs(client.argv.slice(2), {
'--debug': Boolean,
'--cwd': String,
});
} catch (err) {
handleError(err);
return 1;
}
if (argv['--help']) {
help();
return 2;
}
let cwd = argv['--cwd'] || process.cwd();
let project = await readProjectSettings(join(cwd, VERCEL_DIR));
// If there are no project settings, only then do we pull them down
while (!project?.settings) {
const confirmed = await confirm(
`No Project Settings found locally. Run ${getCommandName(
'pull'
)} for retrieving them?`,
true
);
if (!confirmed) {
client.output.print(`Aborted. No Project Settings retrieved.\n`);
return 0;
}
const result = await pull(client);
if (result !== 0) {
return result;
}
project = await readProjectSettings(join(cwd, VERCEL_DIR));
}
// If `rootDirectory` exists, then `baseDir` will be the repo's root directory.
const baseDir = cwd;
cwd = project.settings.rootDirectory
? join(cwd, project.settings.rootDirectory)
: cwd;
// Load the environment
const { combinedEnv, loadedEnvFiles } = loadEnvConfig(cwd, false, {
info: () => ({}), // we don't want to log this yet.
error: (...args: any[]) => client.output.error(args.join(' ')),
});
// Set process.env with loaded environment variables
processEnv(loadedEnvFiles);
const spawnOpts: {
env: Record<string, string | undefined>;
} = {
env: { ...combinedEnv, VERCEL: '1' },
};
process.chdir(cwd);
const pkg = await readJSONFile<PackageJson>('./package.json');
if (pkg instanceof CantParseJSONFile) {
throw pkg;
}
const vercelConfig = await readJSONFile<VercelConfig>('./vercel.json');
if (vercelConfig instanceof CantParseJSONFile) {
throw vercelConfig;
}
if (!process.env.NOW_BUILDER) {
// This validation is only necessary when
// a user runs `vercel build` locally.
const globFiles = await buildUtilsGlob('**', { cwd });
const zeroConfig = await detectBuilders(Object.keys(globFiles), pkg);
const { reason } = await detectFileSystemAPI({
files: globFiles,
projectSettings: project.settings,
builders: zeroConfig.builders || [],
pkg,
vercelConfig,
tag: '',
enableFlag: true,
});
if (reason) {
client.output.error(`${cmd(`${getPkgName()} build`)} failed: ${reason}`);
return 1;
}
}
const framework = findFramework(project.settings.framework);
// If this is undefined, we bail. If it is null, then findFramework should return "Other",
// so this should really never happen, but just in case....
if (framework === undefined) {
client.output.error(
`Framework detection failed or is malformed. Please run ${getCommandName(
'pull'
)} again.`
);
return 1;
}
const buildState = { ...project.settings };
const formatSetting = (
name: string,
override: string | null | undefined,
defaults: typeof framework.settings.outputDirectory
) =>
` - ${chalk.bold(`${name}:`)} ${`${
override
? override + ` (override)`
: 'placeholder' in defaults
? chalk.italic(`${defaults.placeholder}`)
: defaults.value
}`}`;
console.log(`Retrieved Project Settings:`);
console.log(
chalk.dim(` - ${chalk.bold(`Framework Preset:`)} ${framework.name}`)
);
console.log(
chalk.dim(
formatSetting(
'Build Command',
project.settings.buildCommand,
framework.settings.buildCommand
)
)
);
console.log(
chalk.dim(
formatSetting(
'Output Directory',
project.settings.outputDirectory,
framework.settings.outputDirectory
)
)
);
buildState.outputDirectory =
project.settings.outputDirectory ||
(isSettingValue(framework.settings.outputDirectory)
? framework.settings.outputDirectory.value
: null);
buildState.rootDirectory = project.settings.rootDirectory;
if (loadedEnvFiles.length > 0) {
console.log(
`Loaded Environment Variables from ${loadedEnvFiles.length} ${pluralize(
'file',
loadedEnvFiles.length
)}:`
);
for (let envFile of loadedEnvFiles) {
console.log(chalk.dim(` - ${envFile.path}`));
}
}
// Load plugins
const debug = argv['--debug'];
let plugins;
try {
plugins = await loadCliPlugins(cwd, client.output);
} catch (error) {
client.output.error('Failed to load CLI Plugins');
handleError(error, { debug });
return 1;
}
const origLog = console.log;
const origErr = console.error;
const prefixedLog = (
prefix: string,
args: any[],
logger: (...args: any[]) => void
) => {
if (typeof args[0] === 'string') {
args[0] = `${prefix} ${args[0]}`;
} else {
args.unshift(prefix);
}
return logger(...args);
};
if (plugins?.pluginCount && plugins?.pluginCount > 0) {
console.log(
`Loaded ${plugins.pluginCount} CLI ${pluralize(
'Plugin',
plugins.pluginCount
)}`
);
// preBuild Plugins
if (plugins.preBuildPlugins.length > 0) {
console.log(
`Running ${plugins.pluginCount} CLI ${pluralize(
'Plugin',
plugins.pluginCount
)} before Build Command:`
);
for (let item of plugins.preBuildPlugins) {
const { name, plugin, color } = item;
if (typeof plugin.preBuild === 'function') {
const pluginStamp = stamp();
const fullName = name + '.preBuild';
const prefix = chalk.gray(' > ') + color(fullName + ':');
client.output.debug(`Running ${fullName}:`);
try {
console.log = (...args: any[]) =>
prefixedLog(prefix, args, origLog);
console.error = (...args: any[]) =>
prefixedLog(prefix, args, origErr);
await plugin.preBuild();
client.output.debug(
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
);
} catch (error) {
client.output.error(`${prefix} failed`);
handleError(error, { debug });
return 1;
} finally {
console.log = origLog;
console.error = origErr;
}
}
}
}
}
// Clean the output directory
fs.removeSync(join(cwd, OUTPUT_DIR));
if (framework && process.env.VERCEL_URL && 'envPrefix' in framework) {
for (const key of Object.keys(process.env)) {
if (key.startsWith('VERCEL_')) {
const newKey = `${framework.envPrefix}${key}`;
// Set `process.env` and `spawnOpts.env` to make sure the variables are
// available to the `build` step and the CLI Plugins.
process.env[newKey] = process.env[newKey] || process.env[key];
spawnOpts.env[newKey] = process.env[newKey];
}
}
}
// Required for Next.js to produce the correct `.nft.json` files.
spawnOpts.env.NEXT_PRIVATE_OUTPUT_TRACE_ROOT = baseDir;
// Yarn v2 PnP mode may be activated, so force
// "node-modules" linker style
const env = {
YARN_NODE_LINKER: 'node-modules',
...spawnOpts.env,
};
if (typeof buildState.buildCommand === 'string') {
console.log(`Running Build Command: ${cmd(buildState.buildCommand)}`);
await execCommand(buildState.buildCommand, {
...spawnOpts,
env,
cwd,
});
} else if (fs.existsSync(join(cwd, 'package.json'))) {
await runPackageJsonScript(
client,
cwd,
['vercel-build', 'now-build', 'build'],
spawnOpts
);
} else if (typeof framework.settings.buildCommand.value === 'string') {
console.log(
`Running Build Command: ${cmd(framework.settings.buildCommand.value)}`
);
await execCommand(framework.settings.buildCommand.value, {
...spawnOpts,
env,
cwd,
});
}
if (!fs.existsSync(join(cwd, OUTPUT_DIR))) {
let dotNextDir: string | null = null;
// If a custom `outputDirectory` was set, we'll need to verify
// if it's `.next` output, or just static output.
const userOutputDirectory = project.settings.outputDirectory;
if (typeof userOutputDirectory === 'string') {
if (fs.existsSync(join(cwd, userOutputDirectory, 'BUILD_ID'))) {
dotNextDir = join(cwd, userOutputDirectory);
client.output.debug(
`Consider ${param(userOutputDirectory)} as ${param('.next')} output.`
);
}
} else if (fs.existsSync(join(cwd, '.next'))) {
dotNextDir = join(cwd, '.next');
client.output.debug(`Found ${param('.next')} directory.`);
}
// We cannot rely on the `framework` alone, as it might be a static export,
// and the current build might use a different project that's not in the settings.
const isNextOutput = Boolean(dotNextDir);
const nextExport = await getNextExportStatus(dotNextDir);
const outputDir =
isNextOutput && !nextExport ? OUTPUT_DIR : join(OUTPUT_DIR, 'static');
const getDistDir = framework.getFsOutputDir || framework.getOutputDirName;
const distDir =
(nextExport?.exportDetail.outDirectory
? relative(cwd, nextExport.exportDetail.outDirectory)
: false) ||
dotNextDir ||
userOutputDirectory ||
(await getDistDir(cwd));
await fs.ensureDir(join(cwd, outputDir));
const copyStamp = stamp();
client.output.spinner(
`Copying files from ${param(distDir)} to ${param(outputDir)}`
);
const files = await glob(join(relative(cwd, distDir), '**'), {
ignore: [
'node_modules/**',
'.vercel/**',
'.env',
'.env.*',
'.*ignore',
'_middleware.ts',
'_middleware.mts',
'_middleware.cts',
'_middleware.mjs',
'_middleware.cjs',
'_middleware.js',
'api/**',
'.git/**',
'.next/cache/**',
],
nodir: true,
dot: true,
cwd,
absolute: true,
});
await Promise.all(
files.map(f =>
smartCopy(
client,
f,
distDir === '.'
? join(cwd, outputDir, relative(cwd, f))
: f.replace(distDir, outputDir)
)
)
);
client.output.stopSpinner();
console.log(
`Copied ${files.length.toLocaleString()} files from ${param(
distDir
)} to ${param(outputDir)} ${copyStamp()}`
);
const buildManifestPath = join(cwd, OUTPUT_DIR, 'build-manifest.json');
const routesManifestPath = join(cwd, OUTPUT_DIR, 'routes-manifest.json');
if (!fs.existsSync(buildManifestPath)) {
client.output.debug(
`Generating build manifest: ${param(buildManifestPath)}`
);
const buildManifest = {
version: 1,
cache: framework.cachePattern ? [framework.cachePattern] : [],
};
await fs.writeJSON(buildManifestPath, buildManifest, { spaces: 2 });
}
if (!fs.existsSync(routesManifestPath)) {
client.output.debug(
`Generating routes manifest: ${param(routesManifestPath)}`
);
const routesManifest = {
version: 3,
pages404: true,
basePath: '',
redirects: framework.defaultRedirects ?? [],
headers: framework.defaultHeaders ?? [],
dynamicRoutes: [],
dataRoutes: [],
rewrites: framework.defaultRewrites ?? [],
};
await fs.writeJSON(
join(cwd, OUTPUT_DIR, 'routes-manifest.json'),
routesManifest,
{ spaces: 2 }
);
}
// Special Next.js processing.
if (nextExport) {
client.output.debug('Found `next export` output.');
const htmlFiles = await buildUtilsGlob(
'**/*.html',
join(cwd, OUTPUT_DIR, 'static')
);
if (nextExport.exportDetail.success !== true) {
client.output.error(
`Export of Next.js app failed. Please check your build logs.`
);
process.exit(1);
}
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'server', 'pages'));
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'static'));
await Promise.all(
Object.keys(htmlFiles).map(async fileName => {
await sema.acquire();
const input = join(cwd, OUTPUT_DIR, 'static', fileName);
const target = join(cwd, OUTPUT_DIR, 'server', 'pages', fileName);
await fs.mkdirp(dirname(target));
await fs.promises.rename(input, target).finally(() => {
sema.release();
});
})
);
for (const file of [
'BUILD_ID',
'images-manifest.json',
'routes-manifest.json',
'build-manifest.json',
]) {
const input = join(nextExport.dotNextDir, file);
if (fs.existsSync(input)) {
// Do not use `smartCopy`, since we want to overwrite if they already exist.
await fs.copyFile(input, join(OUTPUT_DIR, file));
}
}
} else if (isNextOutput) {
// The contents of `.output/static` should be placed inside of `.output/static/_next/static`
const tempStatic = '___static';
await fs.rename(
join(cwd, OUTPUT_DIR, 'static'),
join(cwd, OUTPUT_DIR, tempStatic)
);
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'static', '_next', 'static'));
await fs.rename(
join(cwd, OUTPUT_DIR, tempStatic),
join(cwd, OUTPUT_DIR, 'static', '_next', 'static')
);
// Next.js might reference files from the `static` directory in `middleware-manifest.json`.
// Since we move all files from `static` to `static/_next/static`, we'll need to change
// those references as well and update the manifest file.
const middlewareManifest = join(
cwd,
OUTPUT_DIR,
'server',
'middleware-manifest.json'
);
if (fs.existsSync(middlewareManifest)) {
const manifest = await fs.readJSON(middlewareManifest);
Object.keys(manifest.middleware).forEach(key => {
const files = manifest.middleware[key].files.map((f: string) => {
if (f.startsWith('static/')) {
const next = f.replace(/^static\//gm, 'static/_next/static/');
client.output.debug(
`Replacing file in \`middleware-manifest.json\`: ${f} => ${next}`
);
return next;
}
return f;
});
manifest.middleware[key].files = files;
});
await fs.writeJSON(middlewareManifest, manifest);
}
// We want to pick up directories for user-provided static files into `.`output/static`.
// More specifically, the static directory contents would then be mounted to `output/static/static`,
// and the public directory contents would be mounted to `output/static`. Old Next.js versions
// allow `static`, and newer ones allow both, but since there's nobody that actually uses both,
// we can check for the existence of both and pick the first match that we find (first
// `public`, then`static`). We can't read both at the same time because that would mean we'd
// read public for old Next.js versions that don't support it, which might be breaking (and
// we don't want to make vercel build specific framework versions).
const nextSrcDirectory = dirname(distDir);
const publicFiles = await glob('public/**', {
nodir: true,
dot: true,
cwd: nextSrcDirectory,
absolute: true,
});
if (publicFiles.length > 0) {
await Promise.all(
publicFiles.map(f =>
smartCopy(
client,
f,
join(
OUTPUT_DIR,
'static',
relative(join(dirname(distDir), 'public'), f)
)
)
)
);
} else {
const staticFiles = await glob('static/**', {
nodir: true,
dot: true,
cwd: nextSrcDirectory,
absolute: true,
});
await Promise.all(
staticFiles.map(f =>
smartCopy(
client,
f,
join(
OUTPUT_DIR,
'static',
'static',
relative(join(dirname(distDir), 'static'), f)
)
)
)
);
}
// Regardless of the Next.js version, we make sure that it is compatible with
// the Filesystem API. We get there by moving all the files needed
// into the outputs directory `inputs` folder. Next.js is > 12, we can
// read the .nft.json files directly. If there aren't .nft.json files
// we trace and create them. We then resolve the files in each nft file list
// and move them into the "inputs" directory. We rename them with hashes to
// prevent collisions and then update the related .nft files accordingly
// to point to the newly named input files. Again, all of this is so that Next.js
// works with the Filesystem API (and so .output contains all inputs
// needed to run Next.js) and `vc --prebuilt`.
const nftFiles = await glob(join(OUTPUT_DIR, '**', '*.nft.json'), {
nodir: true,
dot: true,
ignore: ['cache/**'],
cwd,
absolute: true,
});
// If there are no .nft.json files, we know that Next.js < 12. We then
// execute the tracing on our own.
if (nftFiles.length === 0) {
const serverFiles = await glob(
join(OUTPUT_DIR, 'server', 'pages', '**', '*.js'),
{
nodir: true,
dot: true,
cwd,
ignore: ['webpack-runtime.js'],
absolute: true,
}
);
for (let f of serverFiles) {
const { ext, dir } = parse(f);
const { fileList } = await nodeFileTrace([f], {
ignore: [
relative(cwd, f),
'node_modules/next/dist/pages/**/*',
'node_modules/next/dist/compiled/webpack/(bundle4|bundle5).js',
'node_modules/react/**/*.development.js',
'node_modules/react-dom/**/*.development.js',
'node_modules/use-subscription/**/*.development.js',
'node_modules/sharp/**/*',
],
});
fileList.delete(relative(cwd, f));
const nftFileName = f.replace(ext, '.js.nft.json');
client.output.debug(`Creating ${nftFileName}`);
await fs.writeJSON(nftFileName, {
version: 2,
files: Array.from(fileList).map(fileListEntry =>
relative(dir, fileListEntry)
),
});
}
}
const requiredServerFilesPath = join(
OUTPUT_DIR,
'required-server-files.json'
);
if (fs.existsSync(requiredServerFilesPath)) {
client.output.debug(`Resolve ${param('required-server-files.json')}.`);
const requiredServerFilesJson = await fs.readJSON(
requiredServerFilesPath
);
await fs.writeJSON(requiredServerFilesPath, {
...requiredServerFilesJson,
appDir: '.',
files: requiredServerFilesJson.files.map((i: string) => {
const originalPath = join(requiredServerFilesJson.appDir, i);
const relPath = join(OUTPUT_DIR, relative(distDir, originalPath));
return relPath;
}),
});
}
}
}
// Build Plugins
if (plugins?.buildPlugins && plugins.buildPlugins.length > 0) {
console.log(
`Running ${plugins.pluginCount} CLI ${pluralize(
'Plugin',
plugins.pluginCount
)} after Build Command:`
);
let vercelConfig: VercelConfig = {};
try {
vercelConfig = await fs.readJSON(join(cwd, 'vercel.json'));
} catch (error) {
if (error.code !== 'ENOENT') {
throw new Error(`Failed to read vercel.json: ${error.message}`);
}
}
for (let item of plugins.buildPlugins) {
const { name, plugin, color } = item;
if (typeof plugin.build === 'function') {
const pluginStamp = stamp();
const fullName = name + '.build';
const prefix = chalk.gray(' > ') + color(fullName + ':');
client.output.debug(`Running ${fullName}:`);
try {
console.log = (...args: any[]) => prefixedLog(prefix, args, origLog);
console.error = (...args: any[]) =>
prefixedLog(prefix, args, origErr);
await plugin.build({
vercelConfig,
workPath: cwd,
});
client.output.debug(
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
);
} catch (error) {
client.output.error(`${prefix} failed`);
handleError(error, { debug });
return 1;
} finally {
console.log = origLog;
console.error = origLog;
}
}
}
}
console.log(
`${prependEmoji(
`Build Completed in ${chalk.bold(OUTPUT_DIR)} ${chalk.gray(
buildStamp()
)}`,
emoji('success')
)}`
);
return 0;
}
export async function runPackageJsonScript(
client: Client,
destPath: string,
scriptNames: string | Iterable<string>,
spawnOpts?: SpawnOptions
) {
assert(isAbsolute(destPath));
const { packageJson, cliType, lockfileVersion } = await scanParentDirs(
destPath,
true
);
const scriptName = getScriptName(
packageJson,
typeof scriptNames === 'string' ? [scriptNames] : scriptNames
);
if (!scriptName) return false;
client.output.debug('Running user script...');
const runScriptTime = Date.now();
const opts: any = { cwd: destPath, ...spawnOpts };
const env = (opts.env = { ...process.env, ...opts.env });
if (cliType === 'npm') {
opts.prettyCommand = `npm run ${scriptName}`;
if (typeof lockfileVersion === 'number' && lockfileVersion >= 2) {
// Ensure that npm 7 is at the beginning of the `$PATH`
env.PATH = `/node16/bin-npm7:${env.PATH}`;
}
} else {
opts.prettyCommand = `yarn run ${scriptName}`;
// Yarn v2 PnP mode may be activated, so force "node-modules" linker style
if (!env.YARN_NODE_LINKER) {
env.YARN_NODE_LINKER = 'node-modules';
}
}
console.log(`Running Build Command: ${cmd(opts.prettyCommand)}\n`);
await spawnAsync(cliType, ['run', scriptName], opts);
console.log(); // give it some room
client.output.debug(`Script complete [${Date.now() - runScriptTime}ms]`);
return true;
}
async function linkOrCopy(existingPath: string, newPath: string) {
try {
if (
newPath.endsWith('.nft.json') ||
newPath.endsWith('middleware-manifest.json') ||
newPath.endsWith('required-server-files.json')
) {
await fs.copy(existingPath, newPath, {
overwrite: true,
});
} else {
await fs.createLink(existingPath, newPath);
}
} catch (err: any) {
// eslint-disable-line
// If a symlink to the same file already exists
// then trying to copy it will make an empty file from it.
if (err['code'] === 'EEXIST') return;
// In some VERY rare cases (1 in a thousand), symlink creation fails on Windows.
// In that case, we just fall back to copying.
// This issue is reproducible with "pnpm add @material-ui/icons@4.9.1"
await fs.copy(existingPath, newPath, {
overwrite: true,
});
}
}
async function smartCopy(client: Client, from: string, to: string) {
sema.acquire();
try {
client.output.debug(`Copying from ${from} to ${to}`);
await linkOrCopy(from, to);
} finally {
sema.release();
}
}
async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
return new Promise((resolve, reject) => {
ogGlob(pattern, options, (err, files) => {
err ? reject(err) : resolve(files);
});
});
}
/**
* Files will only exist when `next export` was used.
*/
async function getNextExportStatus(dotNextDir: string | null) {
if (!dotNextDir) {
return null;
}
const exportDetail: {
success: boolean;
outDirectory: string;
} | null = await fs
.readJson(join(dotNextDir, 'export-detail.json'))
.catch(error => {
if (error.code === 'ENOENT') {
return null;
}
throw error;
});
if (!exportDetail) {
return null;
}
const exportMarker: {
version: 1;
exportTrailingSlash: boolean;
hasExportPathMap: boolean;
} | null = await fs
.readJSON(join(dotNextDir, 'export-marker.json'))
.catch(error => {
if (error.code === 'ENOENT') {
return null;
}
throw error;
});
return {
dotNextDir,
exportDetail,
exportMarker: {
trailingSlash: exportMarker?.hasExportPathMap
? exportMarker.exportTrailingSlash
: false,
},
};
}

View File

@@ -28,6 +28,7 @@ export const help = () => `
${chalk.dim('Advanced')}
rm | remove [id] Removes a deployment
bisect Use binary search to find the deployment that introduced a bug
domains [name] Manages your domain names
projects Manages your Projects
dns [name] Manages your DNS records

View File

@@ -2,7 +2,7 @@ export default new Map([
['alias', 'alias'],
['aliases', 'alias'],
['billing', 'billing'],
['build', 'build'],
['bisect', 'bisect'],
['cc', 'billing'],
['cert', 'certs'],
['certs', 'certs'],

View File

@@ -161,7 +161,8 @@ const main = async () => {
// * a subcommand (as in: `vercel ls`)
const targetOrSubcommand = argv._[2];
if (targetOrSubcommand === 'build') {
const betaCommands: string[] = [];
if (betaCommands.includes(targetOrSubcommand)) {
console.log(
`${chalk.grey(
`${getTitleName()} CLI ${
@@ -292,14 +293,7 @@ const main = async () => {
let authConfig = null;
const subcommandsWithoutToken = [
'login',
'logout',
'help',
'init',
'update',
'build',
];
const subcommandsWithoutToken = ['login', 'logout', 'help', 'init', 'update'];
if (authConfigExists) {
try {
@@ -406,33 +400,20 @@ const main = async () => {
} else if (commands.has(singular)) {
alternative = singular;
}
if (targetOrSubcommand === 'build') {
output.note(
`If you wish to deploy the ${fileType} ${param(
targetOrSubcommand
)}, run ${getCommandName('deploy build')}.` +
console.error(
error(
`The supplied argument ${param(targetOrSubcommand)} is ambiguous.` +
`\nIf you wish to deploy the ${fileType} ${param(
targetOrSubcommand
)}, first run "cd ${targetOrSubcommand}". ` +
(alternative
? `\nIf you wish to use the subcommand ${param(
targetOrSubcommand
)}, use ${param(alternative)} instead.`
: '')
);
} else {
console.error(
error(
`The supplied argument ${param(targetOrSubcommand)} is ambiguous.` +
`\nIf you wish to deploy the ${fileType} ${param(
targetOrSubcommand
)}, first run "cd ${targetOrSubcommand}". ` +
(alternative
? `\nIf you wish to use the subcommand ${param(
targetOrSubcommand
)}, use ${param(alternative)} instead.`
: '')
)
);
return 1;
}
)
);
return 1;
}
if (subcommandExists) {
@@ -627,8 +608,8 @@ const main = async () => {
case 'billing':
func = await import('./commands/billing');
break;
case 'build':
func = await import('./commands/build');
case 'bisect':
func = await import('./commands/bisect');
break;
case 'certs':
func = await import('./commands/certs');

View File

@@ -85,10 +85,6 @@ export type Domain = {
transferredAt?: number | null;
orderedAt?: number;
serviceType: 'zeit.world' | 'external' | 'na';
verified: boolean;
nsVerifiedAt: number | null;
txtVerifiedAt: number | null;
verificationRecord: string;
nameservers: string[];
intendedNameservers: string[];
creator: {
@@ -133,6 +129,13 @@ export type Deployment = {
created: number;
createdAt: number;
creator: { uid: string; username: string };
target: string | null;
ownerId: string;
projectId: string;
inspectorUrl: string;
meta: {
[key: string]: any;
};
};
export type Alias = {

View File

@@ -35,6 +35,7 @@ import {
BuildResultV3,
BuilderOutputs,
EnvConfigs,
BuiltLambda,
} from './types';
import { normalizeRoutes } from '@vercel/routing-utils';
import getUpdateCommand from '../get-update-command';
@@ -288,7 +289,7 @@ export async function executeBuild(
// subclass type instances.
for (const name of Object.keys(output)) {
const obj = output[name] as File;
let lambda: Lambda;
let lambda: BuiltLambda;
let fileRef: FileFsRef;
let fileBlob: FileBlob;
switch (obj.type) {
@@ -302,7 +303,7 @@ export async function executeBuild(
output[name] = fileBlob;
break;
case 'Lambda':
lambda = Object.assign(Object.create(Lambda.prototype), obj) as Lambda;
lambda = Object.assign(Object.create(Lambda.prototype), obj);
// Convert the JSON-ified Buffer object back into an actual Buffer
lambda.zipBuffer = Buffer.from((obj as any).zipBuffer.data);
output[name] = lambda;

View File

@@ -40,7 +40,6 @@ import {
detectApiExtensions,
spawnCommand,
isOfficialRuntime,
detectFileSystemAPI,
} from '@vercel/build-utils';
import frameworkList from '@vercel/frameworks';
@@ -90,7 +89,6 @@ import {
} from './types';
import { ProjectEnvVariable, ProjectSettings } from '../../types';
import exposeSystemEnvs from './expose-system-envs';
import { loadCliPlugins } from '../plugins';
const frontendRuntimeSet = new Set(
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
@@ -600,32 +598,6 @@ export default class DevServer {
);
}
const { reason, metadata } = await detectFileSystemAPI({
files,
builders: builders || [],
projectSettings: projectSettings || this.projectSettings || {},
vercelConfig,
pkg,
tag: '',
enableFlag: true,
});
if (reason) {
if (metadata.hasMiddleware) {
this.output.error(
`Detected middleware usage which requires the latest API. ${reason}`
);
await this.exit();
} else if (metadata.plugins.length > 0) {
this.output.error(
`Detected CLI plugins which requires the latest API. ${reason}`
);
await this.exit();
} else {
this.output.warn(`Unable to use latest API. ${reason}`);
}
}
if (builders) {
if (this.devCommand) {
builders = builders.filter(filterFrontendBuilds);
@@ -1377,6 +1349,7 @@ export default class DevServer {
return false;
};
/*
runDevMiddleware = async (
req: http.IncomingMessage,
res: http.ServerResponse
@@ -1400,6 +1373,7 @@ export default class DevServer {
};
}
};
*/
/**
* Serve project directory as a v2 deployment.
@@ -1468,6 +1442,7 @@ export default class DevServer {
let prevUrl = req.url;
let prevHeaders: HttpHeadersConfig = {};
/*
const middlewareResult = await this.runDevMiddleware(req, res);
if (middlewareResult) {
@@ -1497,6 +1472,7 @@ export default class DevServer {
prevUrl = url.format(origUrl);
}
}
*/
for (const phase of phases) {
statusCode = undefined;

View File

@@ -66,6 +66,7 @@ export interface BuilderInputs {
}
export interface BuiltLambda extends Lambda {
zipBuffer: Buffer;
fn?: FunLambda;
}

View File

@@ -16,7 +16,7 @@ export async function getDomain(
);
try {
const { domain } = await client.fetch<Response>(
`/v4/domains/${domainName}`
`/v5/domains/${domainName}`
);
return domain;

View File

@@ -7,9 +7,7 @@ import addDomain from './add-domain';
import Client from '../client';
import maybeGetDomainByName from './maybe-get-domain-by-name';
import purchaseDomainIfAvailable from './purchase-domain-if-available';
import verifyDomain from './verify-domain';
import extractDomain from '../alias/extract-domain';
import isWildcardAlias from '../alias/is-wildcard-alias';
export default async function setupDomain(
output: Output,
@@ -27,36 +25,6 @@ export default async function setupDomain(
if (info) {
const { name: domain } = info;
output.debug(`Domain ${domain} found for the given context`);
if (!info.verified || (!info.nsVerifiedAt && isWildcardAlias(alias))) {
output.debug(
`Domain ${domain} is not verified, trying to perform a verification`
);
const verificationResult = await verifyDomain(
client,
domain,
contextName
);
if (verificationResult instanceof ERRORS.DomainVerificationFailed) {
output.debug(`Domain ${domain} verification failed`);
return verificationResult;
}
if (!verificationResult.nsVerifiedAt && isWildcardAlias(alias)) {
return new ERRORS.DomainNsNotVerifiedForWildcard({
domain,
nsVerification: {
intendedNameservers: verificationResult.intendedNameservers,
nameservers: verificationResult.nameservers,
},
});
}
output.debug(`Domain ${domain} successfuly verified`);
return maybeGetDomainByName(client, contextName, domain) as Promise<
Domain
>;
}
output.debug(`Domain ${domain} is already verified`);
return info;
}
@@ -92,21 +60,6 @@ export default async function setupDomain(
return addResult;
}
if (!addResult.verified) {
const verificationResult = await verifyDomain(
client,
domain,
contextName
);
if (verificationResult instanceof ERRORS.DomainVerificationFailed) {
output.debug(`Domain ${domain} was added but it couldn't be verified`);
return verificationResult;
}
output.debug(`Domain ${domain} successfuly added and manually verified`);
return verificationResult;
}
output.debug(
`Domain ${domain} successfuly added and automatically verified`
);
@@ -120,23 +73,6 @@ export default async function setupDomain(
aliasDomain
)) as Domain;
const { name: domain } = purchasedDomain;
if (!purchasedDomain.verified) {
const verificationResult = await verifyDomain(client, domain, contextName);
if (verificationResult instanceof ERRORS.DomainVerificationFailed) {
output.debug(
`Domain ${domain} was purchased but verification is still pending`
);
return new ERRORS.DomainVerificationFailed({
domain: verificationResult.meta.domain,
nsVerification: verificationResult.meta.nsVerification,
txtVerification: verificationResult.meta.txtVerification,
purchased: true,
});
}
output.debug(`Domain ${domain} was purchased and it was manually verified`);
return maybeGetDomainByName(client, contextName, domain) as Promise<Domain>;
}
output.debug(
`Domain ${domain} was purchased and it is automatically verified`

View File

@@ -1,47 +0,0 @@
import chalk from 'chalk';
import retry from 'async-retry';
import { Domain } from '../../types';
import * as ERRORS from '../errors-ts';
import Client from '../client';
export default async function verifyDomain(
client: Client,
domainName: string,
contextName: string
) {
client.output.spinner(
`Verifying domain ${domainName} under ${chalk.bold(contextName)}`
);
try {
const { domain } = await performVerifyDomain(client, domainName);
return domain;
} catch (error) {
if (error.code === 'verification_failed') {
return new ERRORS.DomainVerificationFailed({
purchased: false,
domain: error.name as string,
nsVerification: error.nsVerification as ERRORS.NSVerificationError,
txtVerification: error.txtVerification as ERRORS.TXTVerificationError,
});
}
throw error;
}
}
type Response = {
domain: Domain;
};
async function performVerifyDomain(client: Client, domain: string) {
return retry(
async () =>
client.fetch<Response>(
`/v4/domains/${encodeURIComponent(domain)}/verify`,
{
body: {},
method: 'POST',
}
),
{ retries: 5, maxTimeout: 8000 }
);
}

View File

@@ -260,31 +260,6 @@ export type TXTVerificationError = {
values: string[];
};
/**
* This error is returned when the domain is not verified by nameservers for wildcard alias.
*/
export class DomainNsNotVerifiedForWildcard extends NowError<
'DOMAIN_NS_NOT_VERIFIED_FOR_WILDCARD',
{
domain: string;
nsVerification: NSVerificationError;
}
> {
constructor({
domain,
nsVerification,
}: {
domain: string;
nsVerification: NSVerificationError;
}) {
super({
code: 'DOMAIN_NS_NOT_VERIFIED_FOR_WILDCARD',
meta: { domain, nsVerification },
message: `The domain ${domain} is not verified by nameservers for wildcard alias.`,
});
}
}
/**
* Used when a domain is validated because we tried to add it to an account
* via API or for any other reason.

View File

@@ -1,76 +0,0 @@
import code from '../util/output/code';
import { getColorForPkgName } from '../util/output/color-name-cache';
import cliPkgJson from '../util/pkg';
import { scanParentDirs } from '@vercel/build-utils';
import { Output } from './output';
const VERCEL_PLUGIN_PREFIX = 'vercel-plugin-';
export async function loadCliPlugins(cwd: string, output: Output) {
const { packageJson } = await scanParentDirs(cwd, true);
let pluginCount = 0;
const preBuildPlugins = [];
const buildPlugins = [];
const devServerPlugins = [];
const devMiddlewarePlugins = [];
const deps = new Set(
[
...Object.keys(packageJson?.dependencies || {}),
...Object.keys(packageJson?.devDependencies || {}),
...Object.keys(cliPkgJson.dependencies),
].filter(dep => dep.startsWith(VERCEL_PLUGIN_PREFIX))
);
for (let dep of deps) {
pluginCount++;
const resolved = require.resolve(dep, {
paths: [cwd, process.cwd(), __dirname],
});
let plugin;
try {
plugin = require(resolved);
const color = getColorForPkgName(dep);
if (typeof plugin.preBuild === 'function') {
preBuildPlugins.push({
plugin,
name: dep,
color,
});
}
if (typeof plugin.build === 'function') {
buildPlugins.push({
plugin,
name: dep,
color,
});
}
if (typeof plugin.startDevServer === 'function') {
devServerPlugins.push({
plugin,
name: dep,
color,
});
}
if (typeof plugin.runDevMiddleware === 'function') {
devMiddlewarePlugins.push({
plugin,
name: dep,
color,
});
}
} catch (error) {
output.error(`Failed to import ${code(dep)}`);
throw error;
}
}
return {
pluginCount,
preBuildPlugins,
buildPlugins,
devServerPlugins,
devMiddlewarePlugins,
};
}

View File

@@ -23,7 +23,6 @@ const readFile = promisify(fs.readFile);
const writeFile = promisify(fs.writeFile);
export const VERCEL_DIR = '.vercel';
export const VERCEL_OUTPUT_DIR = '.output';
export const VERCEL_DIR_FALLBACK = '.now';
export const VERCEL_DIR_README = 'README.txt';
export const VERCEL_DIR_PROJECT = 'project.json';
@@ -256,13 +255,6 @@ export async function linkFolderToProject(
contentModified = true;
}
if (!gitIgnore.split(EOL).includes(VERCEL_OUTPUT_DIR)) {
gitIgnore += `${
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
}${VERCEL_OUTPUT_DIR}${EOL}`;
contentModified = true;
}
if (contentModified) {
await writeFile(gitIgnorePath, gitIgnore);
isGitIgnoreUpdated = true;

View File

@@ -1 +1,3 @@
Flask==1.0.3
Flask==2.0.1
werkzeug==2.0.1

View File

@@ -1703,13 +1703,11 @@ test(
fetchOpts('image/webp')
);
*/
await testPath(
200,
toUrl('/test.svg', 64, 70),
null,
expectHeader('image/svg+xml'),
fetchOpts('image/webp')
);
/*
* Disabled svg in https://github.com/vercel/next.js/pull/34431
* We can test for 400 status since config option is not enabled.
*/
await testPath(400, toUrl('/test.svg', 64, 70));
/* Disabled bmp because `next dev` bypasses
* and production will convert. Eventually
* we can enable once `next dev` supports it.

View File

@@ -1,4 +1,3 @@
.next
yarn.lock
.vercel
.output
.vercel

View File

@@ -367,7 +367,7 @@ module.exports = async function prepare(session, binaryPath) {
},
'project-link-gitignore': {
'package.json': '{}',
'.gitignore': '.output',
'.gitignore': '',
},
'project-link-legacy': {
'index.html': 'Hello',

View File

@@ -7,9 +7,10 @@ import { Readable } from 'stream';
import { homedir } from 'os';
import _execa from 'execa';
import XDGAppPaths from 'xdg-app-paths';
import fetch from 'node-fetch';
import nodeFetch from 'node-fetch';
import tmp from 'tmp-promise';
import retry from 'async-retry';
import createFetchRetry from '@vercel/fetch-retry';
import fs, {
writeFile,
readFile,
@@ -24,6 +25,8 @@ import pkg from '../package';
import prepareFixtures from './helpers/prepare';
import { fetchTokenWithRetry } from '../../../test/lib/deployment/now-deploy';
const fetch = createFetchRetry(nodeFetch);
// log command when running `execa`
function execa(file, args, options) {
console.log(`$ vercel ${args.join(' ')}`);
@@ -2268,62 +2271,6 @@ test('[vercel dev] fails when development commad calls vercel dev recursively',
);
});
test('[vercel build] fails when build commad calls vercel build recursively', async t => {
const dir = fixture('build-fail-on-recursion-command');
const projectName = `build-fail-on-recursion-command-${
Math.random().toString(36).split('.')[1]
}`;
const build = execa(binaryPath, ['build', ...defaultArgs], {
cwd: dir,
reject: false,
});
await waitForPrompt(build, chunk =>
chunk.includes('No Project Settings found locally')
);
build.stdin.write('yes\n');
await setupProject(build, projectName, {
buildCommand: `${binaryPath} build`,
});
const { exitCode, stderr } = await build;
t.is(exitCode, 1);
t.true(
stderr.includes('must not recursively invoke itself'),
`Received instead: "${stderr}"`
);
});
test('[vercel build] fails when build script calls vercel build recursively', async t => {
const dir = fixture('build-fail-on-recursion-script');
const projectName = `build-fail-on-recursion-script-${
Math.random().toString(36).split('.')[1]
}`;
const build = execa(binaryPath, ['build', ...defaultArgs], {
cwd: dir,
reject: false,
});
await waitForPrompt(build, chunk =>
chunk.includes('No Project Settings found locally')
);
build.stdin.write('yes\n');
await setupProject(build, projectName);
const { exitCode, stderr } = await build;
t.is(exitCode, 1);
t.true(
stderr.includes('must not recursively invoke itself'),
`Received instead: "${stderr}"`
);
});
test('`vercel rm` removes a deployment', async t => {
const directory = fixture('static-deployment');
@@ -2781,7 +2728,7 @@ test('should show prompts to set up project during first deploy', async t => {
// Ensure .gitignore is created
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
t.is(gitignore, '.vercel\n.output\n');
t.is(gitignore, '.vercel\n');
// Ensure .vercel/project.json and .vercel/README.txt are created
t.is(
@@ -3350,7 +3297,7 @@ test('[vc link] should show prompts to set up project', async t => {
// Ensure .gitignore is created
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
t.is(gitignore, '.vercel\n.output\n');
t.is(gitignore, '.vercel\n');
// Ensure .vercel/project.json and .vercel/README.txt are created
t.is(
@@ -3385,7 +3332,7 @@ test('[vc link --confirm] should not show prompts and autolink', async t => {
// Ensure .gitignore is created
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
t.is(gitignore, '.vercel\n.output\n');
t.is(gitignore, '.vercel\n');
// Ensure .vercel/project.json and .vercel/README.txt are created
t.is(
@@ -3420,7 +3367,7 @@ test('[vc link] should not duplicate paths in .gitignore', async t => {
// Ensure .gitignore is created
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
t.is(gitignore, '.output\n.vercel\n');
t.is(gitignore, '.vercel\n');
});
test('[vc dev] should show prompts to set up project', async t => {
@@ -3444,7 +3391,7 @@ test('[vc dev] should show prompts to set up project', async t => {
// Ensure .gitignore is created
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
t.is(gitignore, '.vercel\n.output\n');
t.is(gitignore, '.vercel\n');
// Ensure .vercel/project.json and .vercel/README.txt are created
t.is(
@@ -3511,7 +3458,7 @@ test('[vc link] should show project prompts but not framework when `builds` defi
// Ensure .gitignore is created
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
t.is(gitignore, '.vercel\n.output\n');
t.is(gitignore, '.vercel\n');
// Ensure .vercel/project.json and .vercel/README.txt are created
t.is(

View File

@@ -336,6 +336,7 @@ describe('DevServer', () => {
})
);
/*
it(
'should support edge middleware',
testFixture('edge-middleware', async server => {
@@ -394,4 +395,5 @@ describe('DevServer', () => {
expect(body).toStrictEqual('is strict mode? yes');
})
);
*/
});

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/client",
"version": "10.2.3-canary.51",
"version": "10.3.1-canary.1",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"homepage": "https://vercel.com",
@@ -40,7 +40,7 @@
]
},
"dependencies": {
"@vercel/build-utils": "2.13.1-canary.0",
"@vercel/build-utils": "2.14.1-canary.1",
"@zeit/fetch": "5.2.0",
"async-retry": "1.2.3",
"async-sema": "3.0.0",

View File

@@ -0,0 +1,6 @@
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 128 128" height="128" width="128">
<rect fill="#F03E2F" height="128" width="128"></rect>
<path fill="white" d="M39.4229 33.1629C39.4229 44.1614 46.3362 50.7055 60.1767 54.1563L74.8429 57.4971C87.9417 60.453 95.9185 67.7945 95.9185 79.7554C96.0204 84.9662 94.296 90.053 91.0345 94.1634C91.0345 82.23 84.751 75.7822 69.595 71.9052L55.1947 68.6881C43.6633 66.1035 34.7628 60.068 34.7628 47.076C34.7021 42.0589 36.3415 37.1644 39.4229 33.1629Z"></path>
<path fill="#F9B1AB" d="M82.0221 76.827C88.2776 80.759 91.0205 86.2583 91.0205 94.1497C85.8426 100.666 76.7462 104.323 66.0545 104.323C48.0576 104.323 35.4626 95.6207 32.6637 80.4978H49.9468C52.172 87.4406 58.0636 90.6577 65.9285 90.6577C75.5287 90.6577 81.9102 85.6258 82.0361 76.7995"></path>
<path fill="#F9B1AB" d="M48.4074 49.4682C45.5509 47.8004 43.2073 45.404 41.6255 42.5332C40.0437 39.6624 39.2825 36.4244 39.423 33.1629C44.419 26.7013 53.1095 22.7556 63.7033 22.7556C82.0361 22.7556 92.6439 32.2693 95.2608 45.66H78.6354C76.8021 40.3807 72.212 36.27 63.8433 36.27C54.9008 36.27 48.7992 41.3843 48.4494 49.4682"></path>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -1 +1,19 @@
<svg width="48" height="48" fill="none" xmlns="http://www.w3.org/2000/svg"><circle cx="24" cy="24" r="24" fill="#1CA1FA"/><path d="M19.77 16.09h-2.625v11.086c0 3.656 2.613 6.234 6.843 6.234 4.254 0 6.856-2.578 6.856-6.234V16.09h-2.625v10.875c0 2.414-1.535 4.113-4.23 4.113-2.684 0-4.22-1.7-4.22-4.113V16.09z" fill="#fff"/></svg>
<svg width="48" height="48" viewBox="0 0 28 27" version="1.1" xmlns="http://www.w3.org/2000/svg"
xmlns:xlink="http://www.w3.org/1999/xlink">
<g stroke="none" stroke-width="1" fill="none" fill-rule="evenodd">
<g transform="translate(-22.000000, -23.000000)">
<g transform="translate(23.000000, 24.000000)">
<path
d="M25.59375,11.968254 C25.59375,12.8799621 24.8542859,13.6190476 23.9421107,13.6190476 C23.0299354,13.6190476 2.97006457,13.6190476 2.05788934,13.6190476 C1.14571412,13.6190476 0.40625,12.8799621 0.40625,11.968254 C0.40625,11.368056 0.726725255,10.8426707 1.2059758,10.5537152 C0.964572543,10.2667081 0.819159836,9.89635858 0.819159836,9.49206349 C0.819159836,8.83899235 1.19858525,8.27449538 1.74912718,8.00677482 C1.68179398,7.82682944 1.64497951,7.63199747 1.64497951,7.42857143 C1.64497951,6.51686327 2.38444363,5.77777778 3.29661885,5.77777778 C3.37476958,5.77777778 3.45165256,5.78320283 3.52691139,5.79369671 C3.38061557,5.54732422 3.29661885,5.2596568 3.29661885,4.95238095 C3.29661885,4.04067279 4.03608297,3.3015873 4.9482582,3.3015873 C5.25569146,3.3015873 5.54350625,3.38554101 5.79000496,3.53176193 C5.7795057,3.45654163 5.77407787,3.37969801 5.77407787,3.3015873 C5.77407787,2.38987914 6.51354199,1.65079365 7.42571721,1.65079365 C7.73083366,1.65079365 8.0166262,1.73348677 8.26188409,1.87767313 C8.35426205,1.05344 9.0538542,0.412698413 9.90317623,0.412698413 C10.4521218,0.412698413 10.938518,0.680366532 11.2388423,1.09224331 C11.4679015,0.455411137 12.0775125,0 12.7935451,0 C13.5095776,0 14.1191887,0.455411137 14.3482478,1.09224331 C14.6485721,0.680366532 15.1349684,0.412698413 15.6839139,0.412698413 C16.533236,0.412698413 17.2328281,1.05344 17.3252061,1.87767313 C17.570464,1.73348677 17.8562565,1.65079365 18.161373,1.65079365 C19.0735482,1.65079365 19.8130123,2.38987914 19.8130123,3.3015873 C19.8130123,3.37969801 19.8075845,3.45654163 19.7970852,3.53176193 C20.0435839,3.38554101 20.3313987,3.3015873 20.638832,3.3015873 C21.5510072,3.3015873 22.2904713,4.04067279 22.2904713,4.95238095 C22.2904713,5.2596568 22.2064746,5.54732422 22.0601788,5.79369671 C22.1354376,5.78320283 22.2123206,5.77777778 22.2904713,5.77777778 C23.2026465,5.77777778 23.9421107,6.51686327 23.9421107,7.42857143 C23.9421107,7.63199747 23.9052962,7.82682944 23.837963,8.00677482 C24.3885049,8.27449538 24.7679303,8.83899235 24.7679303,9.49206349 C24.7679303,9.83184534 24.665222,10.1476506 24.489143,10.4101567 C25.1324705,10.635781 25.59375,11.2481763 25.59375,11.968254 Z"
stroke="#000000" stroke-width="0.8" fill="#FFFFFF" stroke-linejoin="round"></path>
<rect fill="#000000" x="8.53125" y="23.9365079" width="8.9375" height="2.06349206"></rect>
<path
d="M0,12.7936508 L26,12.7936508 C25.579783,19.7028245 19.9201192,25.1746032 13,25.1746032 C6.07988078,25.1746032 0.420216968,19.7028245 0,12.7936508 L0,12.7936508 Z"
stroke="#000000" stroke-width="0.8" fill="#1890FF"></path>
<path
d="M7.3125,7.01587302 C7.08813432,7.01587302 6.90625,6.83110164 6.90625,6.6031746 C6.90625,6.37524756 7.08813432,6.19047619 7.3125,6.19047619 C7.53686568,6.19047619 7.71875,6.37524756 7.71875,6.6031746 C7.71875,6.83110164 7.53686568,7.01587302 7.3125,7.01587302 Z M5.6875,9.07936508 C5.46313432,9.07936508 5.28125,8.89459371 5.28125,8.66666667 C5.28125,8.43873963 5.46313432,8.25396825 5.6875,8.25396825 C5.91186568,8.25396825 6.09375,8.43873963 6.09375,8.66666667 C6.09375,8.89459371 5.91186568,9.07936508 5.6875,9.07936508 Z M7.3125,10.7301587 C7.08813432,10.7301587 6.90625,10.5453874 6.90625,10.3174603 C6.90625,10.0895333 7.08813432,9.9047619 7.3125,9.9047619 C7.53686568,9.9047619 7.71875,10.0895333 7.71875,10.3174603 C7.71875,10.5453874 7.53686568,10.7301587 7.3125,10.7301587 Z M8.9375,7.42857143 C8.71313432,7.42857143 8.53125,7.24380006 8.53125,7.01587302 C8.53125,6.78794598 8.71313432,6.6031746 8.9375,6.6031746 C9.16186568,6.6031746 9.34375,6.78794598 9.34375,7.01587302 C9.34375,7.24380006 9.16186568,7.42857143 8.9375,7.42857143 Z M18.6875,9.07936508 C18.4631343,9.07936508 18.28125,8.89459371 18.28125,8.66666667 C18.28125,8.43873963 18.4631343,8.25396825 18.6875,8.25396825 C18.9118657,8.25396825 19.09375,8.43873963 19.09375,8.66666667 C19.09375,8.89459371 18.9118657,9.07936508 18.6875,9.07936508 Z"
fill="#000000"></path>
</g>
</g>
</g>
</svg>

Before

Width:  |  Height:  |  Size: 328 B

After

Width:  |  Height:  |  Size: 4.4 KiB

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/frameworks",
"version": "0.5.1-canary.20",
"version": "0.6.1-canary.1",
"main": "./dist/frameworks.js",
"types": "./dist/frameworks.d.ts",
"files": [
@@ -20,7 +20,7 @@
"@types/js-yaml": "3.12.1",
"@types/node": "12.0.4",
"@types/node-fetch": "2.5.8",
"@vercel/routing-utils": "1.11.4-canary.6",
"@vercel/routing-utils": "1.12.0",
"ajv": "6.12.2",
"typescript": "4.3.4"
}

View File

@@ -1941,7 +1941,7 @@ export const frameworks = [
},
devCommand: {
placeholder: 'vite',
value: 'vite',
value: 'vite --port $PORT',
},
outputDirectory: {
value: 'dist',
@@ -2006,6 +2006,50 @@ export const frameworks = [
},
],
},
{
name: 'Sanity',
slug: 'sanity',
demo: 'https://sanity-studio-template.vercel.app',
logo: 'https://raw.githubusercontent.com/vercel/vercel/main/packages/frameworks/logos/sanity.svg',
tagline:
'The structured content platform.',
description: 'A Sanity Studio',
website: 'https://www.sanity.io',
envPrefix: 'SANITY_STUDIO_',
detectors: {
every: [
{
path: 'sanity.json',
},
],
},
settings: {
installCommand: {
placeholder: '`yarn install` or `npm install`',
},
buildCommand: {
placeholder: '`npm run build` or `sanity build`',
value: 'sanity build',
},
devCommand: {
value: 'sanity start --port $PORT',
},
outputDirectory: {
value: 'dist',
},
},
dependency: '@sanity/cli',
getOutputDirName: async () => 'dist',
defaultRoutes: [
{
handle: 'filesystem',
},
{
src: '/(.*)',
dest: '/index.html',
},
],
},
{
name: 'Other',
slug: null,

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/go",
"version": "1.2.4-canary.5",
"version": "1.3.1-canary.1",
"license": "MIT",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
@@ -24,7 +24,7 @@
"@types/fs-extra": "^5.0.5",
"@types/node-fetch": "^2.3.0",
"@types/tar": "^4.0.0",
"@vercel/build-utils": "2.13.1-canary.0",
"@vercel/build-utils": "2.14.1-canary.1",
"@vercel/ncc": "0.24.0",
"async-retry": "1.3.1",
"execa": "^1.0.0",

View File

@@ -1,2 +0,0 @@
entries.js
dist

View File

@@ -1,2 +0,0 @@
/dist
/test/fixtures/*/.output

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env node
const fs = require('fs-extra');
const execa = require('execa');
const { join } = require('path');
async function main() {
const srcDir = join(__dirname, 'src');
const outDir = join(__dirname, 'dist');
// Start fresh
await fs.remove(outDir);
await execa(
'ncc',
['build', join(srcDir, 'index.ts'), '-o', outDir, '--external', 'esbuild'],
{
stdio: 'inherit',
}
);
await fs.copyFile(
join(__dirname, 'src/entries.js'),
join(outDir, 'entries.js')
);
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -1,59 +0,0 @@
{
"name": "vercel-plugin-middleware",
"version": "0.0.0-canary.25",
"license": "MIT",
"main": "./dist/index",
"homepage": "",
"repository": {
"type": "git",
"url": "https://github.com/vercel/vercel.git",
"directory": "packages/middleware"
},
"scripts": {
"build": "node build",
"test-unit": "jest",
"prepublishOnly": "node build"
},
"files": [
"dist"
],
"dependencies": {
"esbuild": "0.13.12"
},
"devDependencies": {
"@peculiar/webcrypto": "1.2.0",
"@types/cookie": "0.4.1",
"@types/glob": "7.2.0",
"@types/http-proxy": "1.17.7",
"@types/jest": "27.0.2",
"@types/node": "16.11.6",
"@types/node-fetch": "^2",
"@types/ua-parser-js": "0.7.36",
"@types/uuid": "8.3.1",
"@vercel/build-utils": "2.13.1-canary.0",
"@vercel/ncc": "0.24.0",
"cookie": "0.4.1",
"formdata-node": "4.3.1",
"glob": "7.2.0",
"http-proxy": "1.18.1",
"node-fetch": "^2",
"ua-parser-js": "1.0.2",
"url": "0.11.0",
"uuid": "8.3.2",
"web-streams-polyfill": "3.1.1"
},
"jest": {
"preset": "ts-jest",
"globals": {
"ts-jest": {
"diagnostics": false,
"isolatedModules": true
}
},
"verbose": false,
"testEnvironment": "node",
"testMatch": [
"<rootDir>/test/**/*.test.ts"
]
}
}

View File

@@ -1,18 +0,0 @@
import * as middleware from './_temp_middleware';
_ENTRIES = typeof _ENTRIES === 'undefined' ? {} : _ENTRIES;
_ENTRIES['middleware_pages/_middleware'] = {
default: async function (ev) {
const result = await middleware.default(ev.request, ev);
return {
promise: Promise.resolve(),
waitUntil: Promise.resolve(),
response:
result ||
new Response(null, {
headers: {
'x-middleware-next': 1,
},
}),
};
},
};

View File

@@ -1,52 +0,0 @@
import path from 'path';
import * as esbuild from 'esbuild';
const processInjectFile = `
// envOverride is passed by esbuild plugin
const env = envOverride
function cwd() {
return '/'
}
function chdir(dir) {
throw new Error('process.chdir is not supported')
}
export const process = {
argv: [],
env,
chdir,
cwd,
};
`;
export function nodeProcessPolyfillPlugin({ env = {} } = {}): esbuild.Plugin {
return {
name: 'node-process-polyfill',
setup({ initialOptions, onResolve, onLoad }) {
onResolve({ filter: /_virtual-process-polyfill_\.js/ }, ({ path }) => {
return {
path,
sideEffects: false,
};
});
onLoad({ filter: /_virtual-process-polyfill_\.js/ }, () => {
const contents = `const envOverride = ${JSON.stringify(
env
)};\n${processInjectFile}`;
return {
loader: 'js',
contents,
};
});
const polyfills = [
path.resolve(__dirname, '_virtual-process-polyfill_.js'),
];
if (initialOptions.inject) {
initialOptions.inject.push(...polyfills);
} else {
initialOptions.inject = [...polyfills];
}
},
};
}

View File

@@ -1,341 +0,0 @@
import util from 'util';
import { extname, join, basename } from 'path';
import * as esbuild from 'esbuild';
import { promises as fsp } from 'fs';
import { IncomingMessage, ServerResponse } from 'http';
import libGlob from 'glob';
import Proxy from 'http-proxy';
import { _experimental_updateFunctionsManifest } from '@vercel/build-utils';
import { run } from './websandbox';
import type { FetchEventResult } from './websandbox/types';
import { ParsedUrlQuery, stringify as stringifyQs } from 'querystring';
import {
format as formatUrl,
parse as parseUrl,
UrlWithParsedQuery,
} from 'url';
import { toNodeHeaders } from './websandbox/utils';
import { nodeProcessPolyfillPlugin } from './esbuild-plugins';
const glob = util.promisify(libGlob);
const SUPPORTED_EXTENSIONS = ['.js', '.ts'];
// File name of the `entries.js` file that gets copied into the
// project directory. Use a name that is unlikely to conflict.
const TMP_ENTRIES_NAME = '.output/inputs/middleware/___vc_entries.js';
const TMP_MIDDLEWARE_BUNDLE = '.output/inputs/middleware/_temp_middleware.js';
async function getMiddlewareFile(workingDirectory: string) {
// Only the root-level `_middleware.*` files are considered.
// For more granular routing, the Project's Framework (i.e. Next.js)
// middleware support should be used.
const middlewareFiles = await glob(join(workingDirectory, '_middleware.*'));
if (middlewareFiles.length === 0) {
// No middleware file at the root of the project, so bail...
return;
}
if (middlewareFiles.length > 1) {
throw new Error(
`Only one middleware file is allowed. Found: ${middlewareFiles.join(
', '
)}`
);
}
const ext = extname(middlewareFiles[0]);
if (!SUPPORTED_EXTENSIONS.includes(ext)) {
throw new Error(`Unsupported file type: ${ext}`);
}
return middlewareFiles[0];
}
export async function build({ workPath }: { workPath: string }) {
const entriesPath = join(workPath, TMP_ENTRIES_NAME);
const transientFilePath = join(workPath, TMP_MIDDLEWARE_BUNDLE);
const middlewareFile = await getMiddlewareFile(workPath);
if (!middlewareFile) return;
console.log('Compiling middleware file: %j', middlewareFile);
/**
* Two builds happen here, because esbuild doesn't offer a way to add a banner
* to individual input files, and the entries wrapper relies on running in
* non-strict mode to access the ENTRIES global.
*
* To work around this, we bundle the middleware directly and add
* 'use strict'; to make the entire bundle run in strict mode. We then bundle
* a second time, adding the global ENTRIES wrapper and preserving the
* 'use strict' for the entire scope of the original bundle.
*/
try {
await esbuild.build({
entryPoints: [middlewareFile],
bundle: true,
absWorkingDir: workPath,
outfile: transientFilePath,
banner: {
js: '"use strict";',
},
plugins: [nodeProcessPolyfillPlugin({ env: process.env })],
format: 'cjs',
});
// Create `_ENTRIES` wrapper
await fsp.copyFile(join(__dirname, 'entries.js'), entriesPath);
await esbuild.build({
entryPoints: [entriesPath],
bundle: true,
absWorkingDir: workPath,
outfile: join(workPath, '.output/server/pages/_middleware.js'),
});
} finally {
await fsp.unlink(transientFilePath);
await fsp.unlink(entriesPath);
}
const fileName = basename(middlewareFile);
const pages: { [key: string]: any } = {};
pages[fileName] = {
runtime: 'web',
env: [],
files: ['server/pages/_middleware.js'],
name: 'pages/_middleware',
page: '/',
regexp: '^/.*$',
sortingIndex: 1,
};
await _experimental_updateFunctionsManifest({ workPath, pages });
}
const stringifyQuery = (req: IncomingMessage, query: ParsedUrlQuery) => {
const initialQueryValues = Object.values((req as any).__NEXT_INIT_QUERY);
return stringifyQs(query, undefined, undefined, {
encodeURIComponent(value: any) {
if (initialQueryValues.some(val => val === value)) {
return encodeURIComponent(value);
}
return value;
},
});
};
// eslint-disable-next-line
async function runMiddlewareCatchAll(
req: IncomingMessage,
res: ServerResponse,
requestId: string,
name: string,
path: string
) {
let result: FetchEventResult | null = null;
const parsedUrl = parseUrl(req.url!, true);
try {
result = await runMiddleware({
request: req,
response: res,
name: name,
path,
requestId: requestId,
parsedUrl,
parsed: parseUrl(req.url!, true),
});
} catch (err) {
console.error(err);
return { finished: true, error: err };
}
if (result === null) {
return { finished: true };
}
if (
!result.response.headers.has('x-middleware-rewrite') &&
!result.response.headers.has('x-middleware-next') &&
!result.response.headers.has('Location')
) {
result.response.headers.set('x-middleware-refresh', '1');
}
result.response.headers.delete('x-middleware-next');
for (const [key, value] of Object.entries(
toNodeHeaders(result.response.headers)
)) {
if (key !== 'content-encoding' && value !== undefined) {
res.setHeader(key, value);
}
}
const preflight =
req.method === 'HEAD' && req.headers['x-middleware-preflight'];
if (preflight) {
res.writeHead(200);
res.end();
return {
finished: true,
};
}
res.statusCode = result.response.status;
res.statusMessage = result.response.statusText;
const location = result.response.headers.get('Location');
if (location) {
res.statusCode = result.response.status;
if (res.statusCode === 308) {
res.setHeader('Refresh', `0;url=${location}`);
}
res.end();
return {
finished: true,
};
}
if (result.response.headers.has('x-middleware-rewrite')) {
const rewrite = result.response.headers.get('x-middleware-rewrite')!;
const rewriteParsed = parseUrl(rewrite, true);
if (rewriteParsed.protocol) {
return proxyRequest(req, res, rewriteParsed);
}
(req as any)._nextRewroteUrl = rewrite;
(req as any)._nextDidRewrite = (req as any)._nextRewroteUrl !== req.url;
return {
finished: false,
pathname: rewriteParsed.pathname,
query: {
...parsedUrl.query,
...rewriteParsed.query,
},
};
}
if (result.response.headers.has('x-middleware-refresh')) {
res.writeHead(result.response.status);
if (result.response.body instanceof Buffer) {
res.write(result.response.body);
} else {
//@ts-ignore
for await (const chunk of result.response.body || []) {
res.write(chunk);
}
}
res.end();
return {
finished: true,
};
}
return {
finished: false,
};
}
const proxyRequest = async (
req: IncomingMessage,
res: ServerResponse,
parsedUrl: UrlWithParsedQuery
) => {
const { query } = parsedUrl;
delete (parsedUrl as any).query;
parsedUrl.search = stringifyQuery(req, query);
const target = formatUrl(parsedUrl);
const proxy = new Proxy({
target,
changeOrigin: true,
ignorePath: true,
xfwd: true,
proxyTimeout: 30_000, // limit proxying to 30 seconds
});
await new Promise((proxyResolve, proxyReject) => {
let finished = false;
proxy.on('proxyReq', (proxyReq: any) => {
proxyReq.on('close', () => {
if (!finished) {
finished = true;
proxyResolve(true);
}
});
});
proxy.on('error', (err: any) => {
if (!finished) {
finished = true;
proxyReject(err);
}
});
proxy.web(req, res);
});
return {
finished: true,
};
};
async function runMiddleware(params: {
request: IncomingMessage;
response: ServerResponse;
parsedUrl: UrlWithParsedQuery;
parsed: UrlWithParsedQuery;
requestId: string;
name: string;
path: string;
}): Promise<FetchEventResult | null> {
const page: { name?: string; params?: { [key: string]: string } } = {};
let result: FetchEventResult | null = null;
result = await run({
name: params.name,
path: params.path,
request: {
headers: params.request.headers,
method: params.request.method || 'GET',
url: params.request.url!,
// url: (params.request as any).__NEXT_INIT_URL,
page,
},
});
result.waitUntil.catch((error: any) => {
console.error(`Uncaught: middleware waitUntil errored`, error);
});
return result;
}
// Should run the middleware in the `vm` sandbox and return the result
// back to `vercel dev`. If no middleware file exists then this function
// should return `finished: false` (very quickly, since this is being
// invoked for every HTTP request!).
export async function runDevMiddleware(
req: IncomingMessage,
res: ServerResponse,
workingDirectory: string
): ReturnType<typeof runMiddlewareCatchAll> {
const middlewareFile = await getMiddlewareFile(workingDirectory);
if (!middlewareFile) {
return {
finished: false,
};
}
return runMiddlewareCatchAll(
req,
res,
'',
basename(middlewareFile),
middlewareFile
);
}

View File

@@ -1,65 +0,0 @@
import type { RequestData, FetchEventResult } from './types';
import { DeprecationError } from './error';
import { fromNodeHeaders } from './utils';
import { NextFetchEvent } from './spec-extension/fetch-event';
import { NextRequest, RequestInit } from './spec-extension/request';
import { SpecResponse } from './spec-extension/response';
import { waitUntilSymbol } from './spec-compliant/fetch-event';
import { Response } from 'node-fetch';
export async function adapter(params: {
handler: (request: NextRequest, event: NextFetchEvent) => Promise<Response>;
page: string;
request: RequestData;
}): Promise<FetchEventResult> {
const url = params.request.url.startsWith('/')
? `https://${params.request.headers.host}${params.request.url}`
: params.request.url;
const request = new NextRequestHint({
page: params.page,
input: url,
init: {
geo: params.request.geo,
//@ts-ignore
headers: fromNodeHeaders(params.request.headers),
ip: params.request.ip,
method: params.request.method,
page: params.request.page,
},
});
const event = new NextFetchEvent({ request, page: params.page });
const original = await params.handler(request, event);
return {
response: original || SpecResponse.next(),
waitUntil: Promise.all(event[waitUntilSymbol]),
};
}
class NextRequestHint extends NextRequest {
sourcePage: string;
constructor(params: {
init: RequestInit;
input: Request | string;
page: string;
}) {
//@ts-ignore
super(params.input, params.init);
this.sourcePage = params.page;
}
get request() {
throw new DeprecationError({ page: this.sourcePage });
}
respondWith() {
throw new DeprecationError({ page: this.sourcePage });
}
waitUntil() {
throw new DeprecationError({ page: this.sourcePage });
}
}

View File

@@ -1,12 +0,0 @@
export class DeprecationError extends Error {
constructor({ page }: { page: string }) {
super(`The middleware "${page}" accepts an async API directly with the form:
export function middleware(request, event) {
return new Response("Hello " + request.url)
}
Read more: https://nextjs.org/docs/messages/middleware-new-signature
`);
}
}

View File

@@ -1,76 +0,0 @@
import { isBlob } from './is';
import { streamToIterator } from './utils';
const carriage = '\r\n';
const dashes = '--';
const carriageLength = 2;
function escape(str: string) {
return str.replace(/"/g, '\\"');
}
function getFooter(boundary: string) {
return `${dashes}${boundary}${dashes}${carriage.repeat(2)}`;
}
function getHeader(boundary: string, name: string, field: FormDataEntryValue) {
let header = '';
header += `${dashes}${boundary}${carriage}`;
header += `Content-Disposition: form-data; name="${escape(name)}"`;
if (isBlob(field)) {
header += `; filename="${escape(field.name)}"${carriage}`;
header += `Content-Type: ${field.type || 'application/octet-stream'}`;
}
return `${header}${carriage.repeat(2)}`;
}
export function getBoundary() {
const array = new Uint8Array(32);
crypto.getRandomValues(array);
let str = '';
for (let i = 0; i < array.length; i++) {
str += array[i].toString(16).padStart(2, '0');
}
return str;
}
export async function* formDataIterator(
form: FormData,
boundary: string
): AsyncIterableIterator<Uint8Array> {
const encoder = new TextEncoder();
//@ts-ignore
for (const [name, value] of form) {
yield encoder.encode(getHeader(boundary, name, value));
if (isBlob(value)) {
// @ts-ignore /shrug
const stream: ReadableStream<Uint8Array> = value.stream();
yield* streamToIterator(stream);
} else {
yield encoder.encode(value);
}
yield encoder.encode(carriage);
}
yield encoder.encode(getFooter(boundary));
}
export function getFormDataLength(form: FormData, boundary: string) {
let length = 0;
//@ts-ignore
for (const [name, value] of form) {
length += Buffer.byteLength(getHeader(boundary, name, value));
length += isBlob(value) ? value.size : Buffer.byteLength(String(value));
length += carriageLength;
}
length += Buffer.byteLength(getFooter(boundary));
return length;
}

View File

@@ -1 +0,0 @@
export * from './sandbox/sandbox';

View File

@@ -1,80 +0,0 @@
/**
* The ArrayBuffer object is used to represent a generic, fixed-length raw
* binary data buffer. It is an array of bytes, often referred to in other
* languages as a "byte array". You cannot directly manipulate the contents of
* an ArrayBuffer; instead, you create one of the typed array objects or a
* DataView object which represents the buffer in a specific format, and use
* that to read and write the contents of the buffer.
*/
export function isArrayBuffer(value: any): value is ArrayBuffer {
return Object.prototype.isPrototypeOf.call(ArrayBuffer, value);
}
/**
* ArrayBufferView is a helper type representing any of the following JS
* TypedArray types which correspond to the list below. It is checked by duck
* typing the provided object.
*/
export function isArrayBufferView(value: any): value is ArrayBufferView {
return ArrayBuffer.isView(value);
}
/**
* The DataView view provides a low-level interface for reading and writing
* multiple number types in a binary ArrayBuffer, without having to care about
* the platform's endianness.
*/
export function isDataView(value: any): value is DataView {
return Object.prototype.isPrototypeOf.call(DataView, value);
}
/**
* The URLSearchParams interface defines utility methods to work with the
* query string of a URL.
*/
export function isURLSearchParams(value: any): value is URLSearchParams {
return Object.prototype.isPrototypeOf.call(URLSearchParams, value);
}
/**
* The Blob object represents a blob, which is a file-like object of immutable,
* raw data; they can be read as text or binary data. Blobs can represent data
* that isn't necessarily in a JavaScript-native format.
*/
export function isBlob(value: any): value is Blob {
return Object.prototype.isPrototypeOf.call(Blob, value);
}
/**
* The FormData interface provides a way to easily construct a set of key/value
* pairs representing form fields and their values, which can then be easily
* sent using the XMLHttpRequest.send() method. It uses the same format a
* form would use if the encoding type were set to "multipart/form-data".
*/
export function isFormData(value: any): value is FormData {
return Object.prototype.isPrototypeOf.call(FormData, value);
}
/**
* The ReadableStream interface of the Streams API represents a readable stream
* of byte data. Because we want to allow alternative implementations we also
* duck type here.
*/
export function isReadableStream(value: any): value is ReadableStream {
return (
value &&
(Object.prototype.isPrototypeOf.call(ReadableStream, value) ||
(value.constructor.name === 'ReadableStream' && 'getReader' in value))
);
}
/**
* Checks in an object implements an Iterable interface
*/
export function isIterable(object: any): object is Iterable<unknown> {
return (
object &&
Symbol.iterator in object &&
typeof object[Symbol.iterator] === 'function'
);
}

View File

@@ -1,127 +0,0 @@
import { Crypto as WebCrypto } from '@peculiar/webcrypto';
import { TransformStream } from 'web-streams-polyfill';
import { v4 as uuid } from 'uuid';
import crypto from 'crypto';
export function atob(b64Encoded: string) {
return Buffer.from(b64Encoded, 'base64').toString('binary');
}
export function btoa(str: string) {
return Buffer.from(str, 'binary').toString('base64');
}
class TextEncoderRuntime {
encoder: TextEncoder;
constructor() {
this.encoder = new TextEncoder();
}
get encoding() {
return this.encoder.encoding;
}
public encode(input: string) {
return this.encoder.encode(input);
}
}
class TextDecoderRuntime {
decoder: TextDecoder;
constructor() {
this.decoder = new TextDecoder();
}
get encoding() {
return this.decoder.encoding;
}
get fatal() {
return this.decoder.fatal;
}
get ignoreBOM() {
return this.decoder.ignoreBOM;
}
public decode(input: BufferSource, options?: TextDecodeOptions) {
return this.decoder.decode(input, options);
}
}
export { TextDecoderRuntime as TextDecoder };
export { TextEncoderRuntime as TextEncoder };
export class Crypto extends WebCrypto {
// @ts-ignore Remove once types are updated and we deprecate node 12
randomUUID = crypto.randomUUID || uuid;
}
export class ReadableStream<T> {
constructor(opts: UnderlyingSource = {}) {
let closed = false;
let pullPromise: any;
let transformController: TransformStreamDefaultController;
const { readable, writable } = new TransformStream(
{
start: (controller: TransformStreamDefaultController) => {
transformController = controller;
},
},
undefined,
{
highWaterMark: 1,
}
);
const writer = writable.getWriter();
const encoder = new TextEncoder();
const controller: ReadableStreamController<T> = {
get desiredSize() {
return transformController.desiredSize;
},
close: () => {
if (!closed) {
closed = true;
writer.close();
}
},
enqueue: (chunk: T) => {
writer.write(typeof chunk === 'string' ? encoder.encode(chunk) : chunk);
pull();
},
error: (reason: any) => {
transformController.error(reason);
},
};
const pull = () => {
if (opts.pull) {
if (!pullPromise) {
pullPromise = Promise.resolve().then(() => {
pullPromise = 0;
opts.pull!(controller);
});
}
}
};
if (opts.start) {
opts.start(controller);
}
if (opts.cancel) {
readable.cancel = (reason: any) => {
opts.cancel!(reason);
return readable.cancel(reason);
};
}
pull();
return readable;
}
}

View File

@@ -1,228 +0,0 @@
import type { RequestData, FetchEventResult, NodeHeaders } from '../types';
import { Blob, File, FormData } from 'formdata-node';
import { dirname, extname, resolve } from 'path';
import { readFileSync } from 'fs';
import { TransformStream } from 'web-streams-polyfill';
import * as polyfills from './polyfills';
import cookie from 'cookie';
import vm from 'vm';
import fetch, {
Headers,
RequestInit,
Response,
Request,
RequestInfo,
} from 'node-fetch';
import { adapter } from '../adapter';
import * as esbuild from 'esbuild';
import m from 'module';
interface URLLike {
href: string;
}
let cache:
| {
context: { [key: string]: any };
paths: Map<string, string>;
require: Map<string, any>;
sandbox: vm.Context;
}
| undefined;
const WEBPACK_HASH_REGEX =
/__webpack_require__\.h = function\(\) \{ return "[0-9a-f]+"; \}/g;
/**
* The cache is cleared when a path is cached and the content has changed. The
* hack ignores changes than only change the compilation hash. Instead it is
* probably better to disable HMR for middleware entries.
*/
export function clearSandboxCache(path: string, content: Buffer | string) {
const prev = cache?.paths.get(path)?.replace(WEBPACK_HASH_REGEX, '');
if (prev === undefined) return;
if (prev === content.toString().replace(WEBPACK_HASH_REGEX, '')) return;
cache = undefined;
}
export async function run(params: {
name: string;
path: string;
request: RequestData;
}): Promise<FetchEventResult> {
if (cache === undefined) {
const context: { [key: string]: any } = {
atob: polyfills.atob,
Blob,
btoa: polyfills.btoa,
clearInterval,
clearTimeout,
console: {
assert: console.assert.bind(console),
error: console.error.bind(console),
info: console.info.bind(console),
log: console.log.bind(console),
time: console.time.bind(console),
timeEnd: console.timeEnd.bind(console),
timeLog: console.timeLog.bind(console),
warn: console.warn.bind(console),
},
Crypto: polyfills.Crypto,
crypto: new polyfills.Crypto(),
Response,
Headers,
Request,
fetch: (input: RequestInfo, init: RequestInit = {}) => {
const url = getFetchURL(input, params.request.headers);
init.headers = getFetchHeaders(params.name, init);
if (isRequestLike(input)) {
return fetch(url, {
...init,
headers: {
...Object.fromEntries(input.headers),
...Object.fromEntries(init.headers),
},
});
}
return fetch(url, init);
},
File,
FormData,
process: { env: { ...process.env } },
ReadableStream: polyfills.ReadableStream,
setInterval,
setTimeout,
TextDecoder: polyfills.TextDecoder,
TextEncoder: polyfills.TextEncoder,
TransformStream,
URL,
URLSearchParams,
};
context.self = context;
cache = {
context,
require: new Map<string, any>([
[require.resolve('cookie'), { exports: cookie }],
]),
paths: new Map<string, string>(),
sandbox: vm.createContext(context),
};
}
try {
const content = readFileSync(params.path, 'utf-8');
const esBuildResult = esbuild.transformSync(content, {
format: 'cjs',
banner: '"use strict";',
});
const x = vm.runInNewContext(m.wrap(esBuildResult.code), cache.sandbox, {
filename: params.path,
});
const module = {
exports: {},
loaded: false,
id: params.path,
};
x(
module.exports,
sandboxRequire.bind(null, params.path),
module,
dirname(params.path),
params.path
);
const adapterResult = await adapter({
request: params.request,
// @ts-ignore
handler: module.exports.default,
page: params.path,
});
return adapterResult;
} catch (error) {
cache = undefined;
throw error;
}
}
function sandboxRequire(referrer: string, specifier: string) {
const resolved = require.resolve(specifier, {
paths: [resolve(dirname(referrer))],
});
const cached = cache?.require.get(resolved);
if (cached !== undefined) {
return cached.exports;
}
const module = {
exports: {},
loaded: false,
id: resolved,
};
cache?.require.set(resolved, module);
const transformOptions: esbuild.TransformOptions = {
format: 'cjs',
banner: '"use strict";',
};
if (extname(resolved) === '.json') {
transformOptions.loader = 'json';
}
const transformedContent = esbuild.transformSync(
readFileSync(resolved, 'utf-8'),
transformOptions
).code;
const fn = vm.runInContext(
`(function(module,exports,require,__dirname,__filename) {${transformedContent}\n})`,
cache!.sandbox
);
try {
fn(
module,
module.exports,
sandboxRequire.bind(null, resolved),
dirname(resolved),
resolved
);
} finally {
cache?.require.delete(resolved);
}
module.loaded = true;
return module.exports;
}
function getFetchHeaders(middleware: string, init: RequestInit) {
const headers = new Headers(init.headers ?? {});
const prevsub = headers.get(`x-middleware-subrequest`) || '';
const value = prevsub.split(':').concat(middleware).join(':');
headers.set(`x-middleware-subrequest`, value);
headers.set(`user-agent`, `Next.js Middleware`);
return headers;
}
function getFetchURL(input: RequestInfo, headers: NodeHeaders = {}): string {
const initurl = isRequestLike(input)
? input.url
: isURLLike(input)
? input.href
: input;
if (initurl.startsWith('/')) {
const host = headers.host?.toString();
const localhost =
host === '127.0.0.1' ||
host === 'localhost' ||
host?.startsWith('localhost:');
return `${localhost ? 'http' : 'https'}://${host}${initurl}`;
}
return initurl;
}
function isURLLike(obj: unknown): obj is URLLike {
return Boolean(obj && typeof obj === 'object' && 'href' in obj);
}
function isRequestLike(obj: unknown): obj is Request {
return Boolean(obj && typeof obj === 'object' && 'url' in obj);
}

View File

@@ -1,237 +0,0 @@
import { formDataIterator, getBoundary } from '../form-data';
import { streamToIterator } from '../utils';
import * as util from '../is';
import { URLSearchParams } from 'url';
const INTERNALS = Symbol('internal body');
abstract class BaseBody implements Body {
abstract headers: Headers;
[INTERNALS]: {
bodyInit?: BodyInit;
boundary?: string;
disturbed: boolean;
stream?: ReadableStream<Uint8Array> | null;
};
constructor(bodyInit?: BodyInit) {
this[INTERNALS] = {
bodyInit: bodyInit,
disturbed: false,
};
if (util.isFormData(bodyInit)) {
this[INTERNALS].boundary = getBoundary();
}
}
get body(): ReadableStream<Uint8Array> | null {
const body = this[INTERNALS].bodyInit;
if (!body) {
return null;
}
// eslint-disable-next-line
const that = this;
if (!this[INTERNALS].stream) {
const readable = new ReadableStream({
async start(controller) {
if (typeof body === 'string') {
const encoder = new TextEncoder();
controller.enqueue(encoder.encode(body));
} else if (util.isBlob(body)) {
const buffer = await body.arrayBuffer();
controller.enqueue(new Uint8Array(buffer));
} else if (util.isDataView(body)) {
controller.enqueue(body);
} else if (util.isArrayBuffer(body)) {
controller.enqueue(body);
} else if (util.isArrayBufferView(body)) {
controller.enqueue(body);
} else if (util.isURLSearchParams(body)) {
const encoder = new TextEncoder();
controller.enqueue(encoder.encode(body.toString()));
} else if (util.isFormData(body)) {
for await (const chunk of formDataIterator(
body,
that[INTERNALS].boundary!
)) {
controller.enqueue(chunk);
}
} else if (util.isReadableStream(body)) {
for await (const chunk of streamToIterator(body)) {
if (chunk.length) {
controller.enqueue(chunk);
}
}
} else {
const text = Object.prototype.toString.call(body);
const encoder = new TextEncoder();
controller.enqueue(encoder.encode(text));
}
controller.close();
},
});
// Spy on reading chunks to set the stream as disturbed
const getReader = readable.getReader.bind(readable);
readable.getReader = () => {
const reader = getReader();
const read = reader.read.bind(reader);
reader.read = () => {
this[INTERNALS].disturbed = true;
return read();
};
return reader;
};
this[INTERNALS].stream = readable;
}
return this[INTERNALS].stream!;
}
get bodyUsed(): boolean {
return this[INTERNALS].disturbed;
}
_consume() {
if (this[INTERNALS].disturbed) {
return Promise.reject(
new TypeError(
`Body has already been used. It can only be used once. Use tee() first if you need to read it twice.`
)
);
}
this[INTERNALS].disturbed = true;
const body = this.body;
return new Promise<Uint8Array>((resolve, reject) => {
let buffer = new Uint8Array(0);
if (!body) {
return resolve(buffer);
}
const reader = body.getReader();
(function pump() {
reader.read().then(({ value, done }) => {
if (done) {
return resolve(buffer);
} else if (value) {
const merge = new Uint8Array(buffer.length + value.length);
merge.set(buffer);
merge.set(value, buffer.length);
buffer = merge;
}
pump();
}, reject);
})();
});
}
async arrayBuffer() {
const buffer = await this._consume();
const arrayBuffer = new ArrayBuffer(buffer.length);
const view = new Uint8Array(arrayBuffer);
for (let i = 0; i < buffer.length; ++i) {
view[i] = buffer[i];
}
return arrayBuffer;
}
async blob() {
const buffer = await this._consume();
return new Blob([buffer]);
}
async formData() {
const bodyInit = this[INTERNALS].bodyInit;
if (util.isURLSearchParams(bodyInit)) {
const form = new FormData();
for (const [key, value] of bodyInit) {
form.append(key, value);
}
return form;
} else if (util.isFormData(bodyInit)) {
return bodyInit;
} else {
throw new TypeError(
`Unrecognized Content-Type header value. FormData can only parse the following MIME types: multipart/form-data, application/x-www-form-urlencoded.`
);
}
}
async text() {
const decoder = new TextDecoder();
const buffer = await this._consume();
return decoder.decode(buffer);
}
async json() {
const text = await this.text();
try {
return JSON.parse(text);
} catch (err: any) {
throw new TypeError(`invalid json body reason: ${err.message}`);
}
}
}
export { BaseBody as Body };
export type BodyInit =
| null
| string
| Blob
| BufferSource
| FormData
| URLSearchParams
| ReadableStream<Uint8Array>;
export function extractContentType(instance: BaseBody) {
const body = instance[INTERNALS].bodyInit;
if (typeof body === 'string') {
return 'text/plain;charset=UTF-8';
} else if (util.isBlob(body)) {
return body.type;
} else if (util.isDataView(body)) {
return null;
} else if (util.isArrayBuffer(body)) {
return null;
} else if (util.isArrayBufferView(body)) {
return null;
} else if (util.isURLSearchParams(body)) {
return 'application/x-www-form-urlencoded;charset=UTF-8';
} else if (util.isFormData(body)) {
return `multipart/form-data;boundary=${instance[INTERNALS].boundary}`;
} else if (util.isReadableStream(body)) {
return null;
} else {
return 'text/plain;charset=UTF-8';
}
}
export function cloneBody(instance: BaseBody) {
if (instance.bodyUsed) {
throw new Error('cannot clone body after it is used');
}
const body = instance[INTERNALS].bodyInit;
if (util.isReadableStream(body)) {
const [r1, r2] = body.tee();
instance[INTERNALS].bodyInit = r1;
return r2;
}
return body || null;
}
export function getInstanceBody(instance: BaseBody) {
return instance[INTERNALS].bodyInit;
}

View File

@@ -1,26 +0,0 @@
export const responseSymbol = Symbol('response');
export const passThroughSymbol = Symbol('passThrough');
export const waitUntilSymbol = Symbol('waitUntil');
export class FetchEvent {
readonly [waitUntilSymbol]: Promise<any>[] = [];
[responseSymbol]?: Promise<Response>;
[passThroughSymbol] = false;
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
constructor() {}
respondWith(response: Response | Promise<Response>): void {
if (!this[responseSymbol]) {
this[responseSymbol] = Promise.resolve(response);
}
}
passThroughOnException(): void {
this[passThroughSymbol] = true;
}
waitUntil(promise: Promise<any>): void {
this[waitUntilSymbol].push(promise);
}
}

View File

@@ -1,238 +0,0 @@
import { isIterable } from '../is';
const MAP = Symbol('map');
const INTERNAL = Symbol('internal');
const INVALID_TOKEN_REGEX = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
const INVALID_HEADER_CHAR_REGEX = /[^\t\x20-\x7e\x80-\xff]/;
class BaseHeaders implements Headers {
[MAP]: { [k: string]: string[] } = {};
constructor(init?: HeadersInit) {
if (init instanceof BaseHeaders) {
const rawHeaders = init.raw();
for (const headerName of Object.keys(rawHeaders)) {
for (const value of rawHeaders[headerName]) {
this.append(headerName, value);
}
}
} else if (isIterable(init)) {
const pairs = [];
for (const pair of init) {
if (!isIterable(pair)) {
throw new TypeError('Each header pair must be iterable');
}
pairs.push(Array.from(pair));
}
for (const pair of pairs) {
if (pair.length !== 2) {
throw new TypeError('Each header pair must be a name/value tuple');
}
this.append(pair[0], pair[1]);
}
} else if (typeof init === 'object') {
for (const key of Object.keys(init)) {
// @ts-ignore
this.append(key, init[key]);
}
} else if (init) {
throw new TypeError('Provided initializer must be an object');
}
}
get(name: string) {
const _name = `${name}`;
validateName(_name);
const key = find(this[MAP], _name);
if (key === undefined) {
return null;
}
return this[MAP][key].join(', ');
}
forEach(
callback: (value: string, name: string, parent: BaseHeaders) => void,
thisArg: any = undefined
): void {
let pairs = getHeaders(this);
let i = 0;
while (i < pairs.length) {
const [name, value] = pairs[i];
callback.call(thisArg, value, name, this);
pairs = getHeaders(this);
i++;
}
}
set(name: string, value: string) {
name = `${name}`;
value = `${value}`;
validateName(name);
validateValue(value);
const key = find(this[MAP], name);
this[MAP][key !== undefined ? key : name] = [value];
}
append(name: string, value: string) {
name = `${name}`;
value = `${value}`;
validateName(name);
validateValue(value);
const key = find(this[MAP], name);
if (key !== undefined) {
this[MAP][key].push(value);
} else {
this[MAP][name] = [value];
}
}
has(name: string) {
name = `${name}`;
validateName(name);
return find(this[MAP], name) !== undefined;
}
delete(name: string) {
name = `${name}`;
validateName(name);
const key = find(this[MAP], name);
if (key !== undefined) {
delete this[MAP][key];
}
}
raw() {
return this[MAP];
}
keys() {
return createHeadersIterator(this, 'key');
}
values() {
return createHeadersIterator(this, 'value');
}
entries() {
return createHeadersIterator(this, 'key+value');
}
[Symbol.iterator]() {
return createHeadersIterator(this, 'key+value');
}
}
function createHeadersIterator(
target: BaseHeaders,
kind: 'key' | 'value' | 'key+value'
) {
const iterator = Object.create(HeadersIteratorPrototype);
iterator[INTERNAL] = {
target,
kind,
index: 0,
};
return iterator;
}
function validateName(name: string) {
name = `${name}`;
if (INVALID_TOKEN_REGEX.test(name)) {
throw new TypeError(`${name} is not a legal HTTP header name`);
}
}
function validateValue(value: string) {
value = `${value}`;
if (INVALID_HEADER_CHAR_REGEX.test(value)) {
throw new TypeError(`${value} is not a legal HTTP header value`);
}
}
function find(
map: { [k: string]: string[] },
name: string
): string | undefined {
name = name.toLowerCase();
for (const key in map) {
if (key.toLowerCase() === name) {
return key;
}
}
return undefined;
}
Object.defineProperty(BaseHeaders.prototype, Symbol.toStringTag, {
value: 'Headers',
writable: false,
enumerable: false,
configurable: true,
});
Object.defineProperties(BaseHeaders.prototype, {
append: { enumerable: true },
delete: { enumerable: true },
entries: { enumerable: true },
forEach: { enumerable: true },
get: { enumerable: true },
has: { enumerable: true },
keys: { enumerable: true },
raw: { enumerable: false },
set: { enumerable: true },
values: { enumerable: true },
});
function getHeaders(
headers: BaseHeaders,
kind: 'key' | 'value' | 'key+value' = 'key+value'
) {
const fn =
kind === 'key'
? (key: string) => key.toLowerCase()
: kind === 'value'
? (key: string) => headers[MAP][key].join(', ')
: (key: string) => [key.toLowerCase(), headers[MAP][key].join(', ')];
return Object.keys(headers[MAP])
.sort()
.map(key => fn(key));
}
const HeadersIteratorPrototype = Object.setPrototypeOf(
{
next() {
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
throw new TypeError('Value of `this` is not a HeadersIterator');
}
const { target, kind, index } = this[INTERNAL];
const values = getHeaders(target, kind);
const len = values.length;
if (index >= len) {
return {
value: undefined,
done: true,
};
}
this[INTERNAL].index = index + 1;
return {
value: values[index],
done: false,
};
},
},
Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))
);
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
value: 'HeadersIterator',
writable: false,
enumerable: false,
configurable: true,
});
export { BaseHeaders as Headers };

View File

@@ -1,124 +0,0 @@
import { Body, cloneBody, extractContentType, getInstanceBody } from './body';
import { Headers as BaseHeaders } from './headers';
import { notImplemented } from '../utils';
export const INTERNALS = Symbol('internal request');
class BaseRequest extends Body implements Request {
[INTERNALS]: {
credentials: RequestCredentials;
headers: Headers;
method: string;
redirect: RequestRedirect;
url: URL;
};
constructor(input: BaseRequest | string, init: RequestInit = {}) {
const method = init.method?.toUpperCase() ?? 'GET';
if (
(method === 'GET' || method === 'HEAD') &&
(init.body || (input instanceof BaseRequest && getInstanceBody(input)))
) {
throw new TypeError('Request with GET/HEAD method cannot have body');
}
let inputBody: BodyInit | null = null;
if (init.body) {
inputBody = init.body;
} else if (input instanceof BaseRequest && getInstanceBody(input)) {
inputBody = cloneBody(input);
}
super(inputBody);
const headers = new BaseHeaders(
init.headers || getProp(input, 'headers') || {}
);
if (inputBody !== null) {
const contentType = extractContentType(this);
if (contentType !== null && !headers.has('Content-Type')) {
headers.append('Content-Type', contentType);
}
}
this[INTERNALS] = {
credentials:
init.credentials || getProp(input, 'credentials') || 'same-origin',
headers,
method,
redirect: init.redirect || getProp(input, 'redirect') || 'follow',
url: new URL(typeof input === 'string' ? input : input.url),
};
}
get url() {
return this[INTERNALS].url.toString();
}
get credentials() {
return this[INTERNALS].credentials;
}
get method() {
return this[INTERNALS].method;
}
get headers() {
return this[INTERNALS].headers;
}
get redirect() {
return this[INTERNALS].redirect;
}
// @ts-ignore
public clone() {
return new BaseRequest(this);
}
get cache() {
return notImplemented('Request', 'cache');
}
get integrity() {
return notImplemented('Request', 'integrity');
}
get keepalive() {
return notImplemented('Request', 'keepalive');
}
get mode() {
return notImplemented('Request', 'mode');
}
get destination() {
return notImplemented('Request', 'destination');
}
get referrer() {
return notImplemented('Request', 'referrer');
}
get referrerPolicy() {
return notImplemented('Request', 'referrerPolicy');
}
get signal() {
return notImplemented('Request', 'signal');
}
get [Symbol.toStringTag]() {
return 'Request';
}
}
export { BaseRequest as Request };
function getProp<K extends keyof BaseRequest>(
input: BaseRequest | string,
key: K
): BaseRequest[K] | undefined {
return input instanceof BaseRequest ? input[key] : undefined;
}

View File

@@ -1,113 +0,0 @@
import { Body, BodyInit, cloneBody, extractContentType } from './body';
const INTERNALS = Symbol('internal response');
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
class BaseResponse extends Body implements Response {
[INTERNALS]: {
headers: Headers;
status: number;
statusText: string;
type: 'default' | 'error';
url?: URL;
};
constructor(body?: BodyInit | null, init?: ResponseInit) {
super(body);
this[INTERNALS] = {
headers: new Headers(init?.headers),
status: init?.status || 200,
statusText: init?.statusText || '',
type: 'default',
url: init?.url ? new URL(init.url) : undefined,
};
if (this[INTERNALS].status < 200 || this[INTERNALS].status > 599) {
throw new RangeError(
`Responses may only be constructed with status codes in the range 200 to 599, inclusive.`
);
}
if (body !== null && !this[INTERNALS].headers.has('Content-Type')) {
const contentType = extractContentType(this);
if (contentType) {
this[INTERNALS].headers.append('Content-Type', contentType);
}
}
}
static redirect(url: string, status = 302) {
if (!REDIRECTS.has(status)) {
throw new RangeError(
'Failed to execute "redirect" on "response": Invalid status code'
);
}
return new Response(null, {
headers: { Location: url },
status,
});
}
static error() {
const response = new BaseResponse(null, { status: 0, statusText: '' });
response[INTERNALS].type = 'error';
return response;
}
get url() {
return this[INTERNALS].url?.toString() || '';
}
get ok() {
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;
}
get status() {
return this[INTERNALS].status;
}
get statusText() {
return this[INTERNALS].statusText;
}
get headers() {
return this[INTERNALS].headers;
}
get redirected() {
return (
this[INTERNALS].status > 299 &&
this[INTERNALS].status < 400 &&
this[INTERNALS].headers.has('Location')
);
}
get type() {
return this[INTERNALS].type;
}
// @ts-ignore
clone() {
return new BaseResponse(cloneBody(this), {
headers: this.headers,
status: this.status,
statusText: this.statusText,
url: this.url,
});
}
get [Symbol.toStringTag]() {
return 'Response';
}
}
export interface ResponseInit {
headers?: HeadersInit;
status?: number;
statusText?: string;
url?: string;
}
export { BaseResponse as Response };

View File

@@ -1,26 +0,0 @@
import { DeprecationError } from '../error';
import { FetchEvent } from '../spec-compliant/fetch-event';
import { NextRequest } from './request';
export class NextFetchEvent extends FetchEvent {
sourcePage: string;
constructor(params: { request: NextRequest; page: string }) {
//@ts-ignore
super(params.request);
this.sourcePage = params.page;
}
// @ts-ignore
get request() {
throw new DeprecationError({
page: this.sourcePage,
});
}
respondWith() {
throw new DeprecationError({
page: this.sourcePage,
});
}
}

View File

@@ -1,98 +0,0 @@
import type { IResult } from 'ua-parser-js';
import cookie from 'cookie';
import parseua from 'ua-parser-js';
import { Request, RequestInit as NodeFetchRequestInit } from 'node-fetch';
export const INTERNALS = Symbol('internal request');
export class NextRequest extends Request {
[INTERNALS]: {
cookieParser(): { [key: string]: string };
geo: { city?: string; country?: string; region?: string };
ip?: string;
page?: { name?: string; params?: { [key: string]: string } };
ua?: IResult | null;
url: URL;
};
constructor(input: Request | string, init: RequestInit = {}) {
//@ts-ignore
super(input, init);
const cookieParser = () => {
const value = this.headers.get('cookie');
return value ? cookie.parse(value) : {};
};
this[INTERNALS] = {
cookieParser,
geo: init.geo || {},
ip: init.ip,
page: init.page,
url: new URL(typeof input === 'string' ? input : input.url),
};
}
public get cookies() {
return this[INTERNALS].cookieParser();
}
public get geo() {
return this[INTERNALS].geo;
}
public get ip() {
return this[INTERNALS].ip;
}
public get preflight() {
return this.headers.get('x-middleware-preflight');
}
public get nextUrl() {
return this[INTERNALS].url;
}
public get page() {
return {
name: this[INTERNALS].page?.name,
params: this[INTERNALS].page?.params,
};
}
public get ua() {
if (typeof this[INTERNALS].ua !== 'undefined') {
return this[INTERNALS].ua || undefined;
}
const uaString = this.headers.get('user-agent');
if (!uaString) {
this[INTERNALS].ua = null;
return this[INTERNALS].ua || undefined;
}
this[INTERNALS].ua = {
...parseua(uaString),
};
return this[INTERNALS].ua;
}
//@ts-ignore
public get url() {
return this[INTERNALS].url.toString();
}
}
export interface RequestInit extends NodeFetchRequestInit {
geo?: {
city?: string;
country?: string;
region?: string;
};
ip?: string;
page?: {
name?: string;
params?: { [key: string]: string };
};
}

View File

@@ -1,97 +0,0 @@
import type { CookieSerializeOptions } from 'cookie';
import cookie from 'cookie';
import { Response, ResponseInit as NodeFetchResponseInit } from 'node-fetch';
const INTERNALS = Symbol('internal response');
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
export class SpecResponse extends Response {
[INTERNALS]: {
cookieParser(): { [key: string]: string };
url?: URL;
};
constructor(body?: BodyInit | null, init: ResponseInit = {}) {
// TODO - why is this failing?
// @ts-ignore
super(body, init);
const cookieParser = () => {
const value = this.headers.get('cookie');
return value ? cookie.parse(value) : {};
};
this[INTERNALS] = {
cookieParser,
url: init.url ? new URL(init.url) : undefined,
};
}
public get cookies() {
return this[INTERNALS].cookieParser();
}
public cookie(
name: string,
value: { [key: string]: any } | string,
opts: CookieSerializeOptions = {}
) {
const val =
typeof value === 'object' ? 'j:' + JSON.stringify(value) : String(value);
if (opts.maxAge) {
opts.expires = new Date(Date.now() + opts.maxAge);
opts.maxAge /= 1000;
}
if (opts.path == null) {
opts.path = '/';
}
this.headers.append(
'Set-Cookie',
cookie.serialize(name, String(val), opts)
);
return this;
}
public clearCookie(name: string, opts: CookieSerializeOptions = {}) {
return this.cookie(name, '', { expires: new Date(1), path: '/', ...opts });
}
static redirect(url: string | URL, status = 302) {
if (!REDIRECTS.has(status)) {
throw new RangeError(
'Failed to execute "redirect" on "response": Invalid status code'
);
}
return new SpecResponse(null, {
headers: { Location: typeof url === 'string' ? url : url.toString() },
status,
});
}
static rewrite(destination: string | URL) {
return new SpecResponse(null, {
headers: {
'x-middleware-rewrite':
typeof destination === 'string'
? destination
: destination.toString(),
},
});
}
static next() {
return new SpecResponse(null, {
headers: {
'x-middleware-next': '1',
},
});
}
}
interface ResponseInit extends NodeFetchResponseInit {
url?: string;
}

View File

@@ -1,26 +0,0 @@
import { Response } from 'node-fetch';
export interface NodeHeaders {
[header: string]: string | string[] | undefined;
}
export interface RequestData {
geo?: {
city?: string;
country?: string;
region?: string;
};
headers: NodeHeaders;
ip?: string;
method: string;
page?: {
name?: string;
params?: { [key: string]: string };
};
url: string;
}
export interface FetchEventResult {
response: Response;
waitUntil: Promise<any>;
}

Some files were not shown because too many files have changed in this diff Show More