mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-12 04:22:14 +00:00
Compare commits
12 Commits
vercel-plu
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
04029013a6 | ||
|
|
c65e7fa883 | ||
|
|
27b68be93f | ||
|
|
99fa729966 | ||
|
|
2bb3da80e0 | ||
|
|
b852f34a27 | ||
|
|
ce8e6e3806 | ||
|
|
983946650e | ||
|
|
59e4572e76 | ||
|
|
5c297122cb | ||
|
|
28f3bf9ef6 | ||
|
|
a936e92b8b |
@@ -14,4 +14,6 @@ packages/client/lib
|
||||
packages/node/src/bridge.ts
|
||||
packages/node-bridge/bridge.js
|
||||
packages/node-bridge/launcher.js
|
||||
packages/node-bridge/helpers.js
|
||||
packages/node-bridge/source-map-support.js
|
||||
packages/middleware/src/entries.js
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -27,5 +27,4 @@ test/lib/deployment/failed-page.txt
|
||||
/public
|
||||
__pycache__
|
||||
.vercel
|
||||
.output
|
||||
.turbo
|
||||
|
||||
1
examples/angular/.gitignore
vendored
1
examples/angular/.gitignore
vendored
@@ -41,4 +41,3 @@ testem.log
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
.vercel
|
||||
.output
|
||||
|
||||
1
examples/remix/.gitignore
vendored
1
examples/remix/.gitignore
vendored
@@ -2,7 +2,6 @@ node_modules
|
||||
|
||||
.cache
|
||||
.vercel
|
||||
.output
|
||||
|
||||
public/build
|
||||
api/_build
|
||||
|
||||
3
examples/sanity/.env.template
Normal file
3
examples/sanity/.env.template
Normal file
@@ -0,0 +1,3 @@
|
||||
# Run `vercel env pull` to generate a .env file from your Vercel project
|
||||
SANITY_STUDIO_API_PROJECT_ID=
|
||||
SANITY_STUDIO_API_DATASET=
|
||||
12
examples/sanity/.npmignore
Normal file
12
examples/sanity/.npmignore
Normal file
@@ -0,0 +1,12 @@
|
||||
# Logs
|
||||
/logs
|
||||
*.log
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
/coverage
|
||||
|
||||
# Dependency directories
|
||||
node_modules
|
||||
|
||||
# Compiled sanity studio
|
||||
/dist
|
||||
29
examples/sanity/README.md
Normal file
29
examples/sanity/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Sanity Blogging Content Studio
|
||||
|
||||
Congratulations, you have now installed Sanity Studio, an open source real-time content editing environment connected to the Sanity backend.
|
||||
|
||||
Now you can do the following things:
|
||||
|
||||
- [Read “getting started” in the docs](https://www.sanity.io/docs/introduction/getting-started?utm_source=readme)
|
||||
- Check out the example frontend: [React/Next.js](https://github.com/sanity-io/tutorial-sanity-blog-react-next)
|
||||
- [Read the blog post about this template](https://www.sanity.io/blog/build-your-own-blog-with-sanity-and-next-js?utm_source=readme)
|
||||
- [Join the community Slack](https://slack.sanity.io/?utm_source=readme)
|
||||
- [Extend and build plugins](https://www.sanity.io/docs/content-studio/extending?utm_source=readme)
|
||||
|
||||
## Develop locally
|
||||
|
||||
Install dependencies:
|
||||
|
||||
```sh
|
||||
npx @sanity/cli install
|
||||
```
|
||||
|
||||
Pull down environment variables from your Vercel project (requires the [Vercel CLI](https://vercel.com/cli)):
|
||||
|
||||
```sh
|
||||
vercel env pull
|
||||
```
|
||||
|
||||
|
||||
|
||||
You can also run `npx @sanity/init` in this repo and agree to reconfigure it. You'll then be able to select from existing projects. The CLI will update `sanity.json` with the project ID and dataset name.
|
||||
7
examples/sanity/config/.checksums
Normal file
7
examples/sanity/config/.checksums
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"#": "Used by Sanity to keep track of configuration file checksums, do not delete or modify!",
|
||||
"@sanity/default-layout": "bb034f391ba508a6ca8cd971967cbedeb131c4d19b17b28a0895f32db5d568ea",
|
||||
"@sanity/default-login": "6fb6d3800aa71346e1b84d95bbcaa287879456f2922372bb0294e30b968cd37f",
|
||||
"@sanity/form-builder": "b38478227ba5e22c91981da4b53436df22e48ff25238a55a973ed620be5068aa",
|
||||
"@sanity/data-aspects": "d199e2c199b3e26cd28b68dc84d7fc01c9186bf5089580f2e2446994d36b3cb6"
|
||||
}
|
||||
3
examples/sanity/config/@sanity/data-aspects.json
Normal file
3
examples/sanity/config/@sanity/data-aspects.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"listOptions": {}
|
||||
}
|
||||
6
examples/sanity/config/@sanity/default-layout.json
Normal file
6
examples/sanity/config/@sanity/default-layout.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"toolSwitcher": {
|
||||
"order": [],
|
||||
"hidden": []
|
||||
}
|
||||
}
|
||||
7
examples/sanity/config/@sanity/default-login.json
Normal file
7
examples/sanity/config/@sanity/default-login.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"providers": {
|
||||
"mode": "append",
|
||||
"redirectOnSingle": false,
|
||||
"entries": []
|
||||
}
|
||||
}
|
||||
5
examples/sanity/config/@sanity/form-builder.json
Normal file
5
examples/sanity/config/@sanity/form-builder.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"images": {
|
||||
"directUploads": true
|
||||
}
|
||||
}
|
||||
30
examples/sanity/package.json
Normal file
30
examples/sanity/package.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"name": "verceltemplateblogstudio",
|
||||
"private": true,
|
||||
"version": "1.0.0",
|
||||
"description": "This is the public list of examples for **Vercel**",
|
||||
"main": "package.json",
|
||||
"author": "Knut Melvær <knut@sanity.io>",
|
||||
"license": "UNLICENSED",
|
||||
"scripts": {
|
||||
"start": "sanity start",
|
||||
"build": "sanity build"
|
||||
},
|
||||
"keywords": [
|
||||
"sanity"
|
||||
],
|
||||
"dependencies": {
|
||||
"@sanity/core": "^2.26",
|
||||
"@sanity/default-layout": "^2.26",
|
||||
"@sanity/default-login": "^2.26",
|
||||
"@sanity/desk-tool": "^2.26",
|
||||
"@sanity/vision": "^2.26",
|
||||
"prop-types": "^15.7",
|
||||
"react": "^17.0",
|
||||
"react-dom": "^17.0",
|
||||
"styled-components": "^5.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sanity/cli": "^2.26"
|
||||
}
|
||||
}
|
||||
1
examples/sanity/plugins/.gitkeep
Normal file
1
examples/sanity/plugins/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
User-specific packages can be placed here
|
||||
29
examples/sanity/sanity.json
Normal file
29
examples/sanity/sanity.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"root": true,
|
||||
"project": {
|
||||
"name": "vercel-template-blog-studio"
|
||||
},
|
||||
"api": {
|
||||
"projectId": "YOUR_PROJECT_ID",
|
||||
"dataset": "YOUR_DATASET_NAME"
|
||||
},
|
||||
"plugins": [
|
||||
"@sanity/base",
|
||||
"@sanity/default-layout",
|
||||
"@sanity/default-login",
|
||||
"@sanity/desk-tool"
|
||||
],
|
||||
"env": {
|
||||
"development": {
|
||||
"plugins": [
|
||||
"@sanity/vision"
|
||||
]
|
||||
}
|
||||
},
|
||||
"parts": [
|
||||
{
|
||||
"name": "part:@sanity/base/schema",
|
||||
"path": "./schemas/schema"
|
||||
}
|
||||
]
|
||||
}
|
||||
48
examples/sanity/schemas/author.js
Normal file
48
examples/sanity/schemas/author.js
Normal file
@@ -0,0 +1,48 @@
|
||||
export default {
|
||||
name: 'author',
|
||||
title: 'Author',
|
||||
type: 'document',
|
||||
fields: [
|
||||
{
|
||||
name: 'name',
|
||||
title: 'Name',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
name: 'slug',
|
||||
title: 'Slug',
|
||||
type: 'slug',
|
||||
options: {
|
||||
source: 'name',
|
||||
maxLength: 96,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'image',
|
||||
title: 'Image',
|
||||
type: 'image',
|
||||
options: {
|
||||
hotspot: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'bio',
|
||||
title: 'Bio',
|
||||
type: 'array',
|
||||
of: [
|
||||
{
|
||||
title: 'Block',
|
||||
type: 'block',
|
||||
styles: [{title: 'Normal', value: 'normal'}],
|
||||
lists: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
preview: {
|
||||
select: {
|
||||
title: 'name',
|
||||
media: 'image',
|
||||
},
|
||||
},
|
||||
}
|
||||
65
examples/sanity/schemas/blockContent.js
Normal file
65
examples/sanity/schemas/blockContent.js
Normal file
@@ -0,0 +1,65 @@
|
||||
/**
|
||||
* This is the schema definition for the rich text fields used for
|
||||
* for this blog studio. When you import it in schemas.js it can be
|
||||
* reused in other parts of the studio with:
|
||||
* {
|
||||
* name: 'someName',
|
||||
* title: 'Some title',
|
||||
* type: 'blockContent'
|
||||
* }
|
||||
*/
|
||||
export default {
|
||||
title: 'Block Content',
|
||||
name: 'blockContent',
|
||||
type: 'array',
|
||||
of: [
|
||||
{
|
||||
title: 'Block',
|
||||
type: 'block',
|
||||
// Styles let you set what your user can mark up blocks with. These
|
||||
// correspond with HTML tags, but you can set any title or value
|
||||
// you want and decide how you want to deal with it where you want to
|
||||
// use your content.
|
||||
styles: [
|
||||
{title: 'Normal', value: 'normal'},
|
||||
{title: 'H1', value: 'h1'},
|
||||
{title: 'H2', value: 'h2'},
|
||||
{title: 'H3', value: 'h3'},
|
||||
{title: 'H4', value: 'h4'},
|
||||
{title: 'Quote', value: 'blockquote'},
|
||||
],
|
||||
lists: [{title: 'Bullet', value: 'bullet'}],
|
||||
// Marks let you mark up inline text in the block editor.
|
||||
marks: {
|
||||
// Decorators usually describe a single property – e.g. a typographic
|
||||
// preference or highlighting by editors.
|
||||
decorators: [
|
||||
{title: 'Strong', value: 'strong'},
|
||||
{title: 'Emphasis', value: 'em'},
|
||||
],
|
||||
// Annotations can be any object structure – e.g. a link or a footnote.
|
||||
annotations: [
|
||||
{
|
||||
title: 'URL',
|
||||
name: 'link',
|
||||
type: 'object',
|
||||
fields: [
|
||||
{
|
||||
title: 'URL',
|
||||
name: 'href',
|
||||
type: 'url',
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
// You can add additional types here. Note that you can't use
|
||||
// primitive types such as 'string' and 'number' in the same array
|
||||
// as a block type.
|
||||
{
|
||||
type: 'image',
|
||||
options: {hotspot: true},
|
||||
},
|
||||
],
|
||||
}
|
||||
17
examples/sanity/schemas/category.js
Normal file
17
examples/sanity/schemas/category.js
Normal file
@@ -0,0 +1,17 @@
|
||||
export default {
|
||||
name: 'category',
|
||||
title: 'Category',
|
||||
type: 'document',
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
title: 'Title',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
name: 'description',
|
||||
title: 'Description',
|
||||
type: 'text',
|
||||
},
|
||||
],
|
||||
}
|
||||
65
examples/sanity/schemas/post.js
Normal file
65
examples/sanity/schemas/post.js
Normal file
@@ -0,0 +1,65 @@
|
||||
export default {
|
||||
name: 'post',
|
||||
title: 'Post',
|
||||
type: 'document',
|
||||
fields: [
|
||||
{
|
||||
name: 'title',
|
||||
title: 'Title',
|
||||
type: 'string',
|
||||
},
|
||||
{
|
||||
name: 'slug',
|
||||
title: 'Slug',
|
||||
type: 'slug',
|
||||
options: {
|
||||
source: 'title',
|
||||
maxLength: 96,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'author',
|
||||
title: 'Author',
|
||||
type: 'reference',
|
||||
to: {type: 'author'},
|
||||
},
|
||||
{
|
||||
name: 'mainImage',
|
||||
title: 'Main image',
|
||||
type: 'image',
|
||||
options: {
|
||||
hotspot: true,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: 'categories',
|
||||
title: 'Categories',
|
||||
type: 'array',
|
||||
of: [{type: 'reference', to: {type: 'category'}}],
|
||||
},
|
||||
{
|
||||
name: 'publishedAt',
|
||||
title: 'Published at',
|
||||
type: 'datetime',
|
||||
},
|
||||
{
|
||||
name: 'body',
|
||||
title: 'Body',
|
||||
type: 'blockContent',
|
||||
},
|
||||
],
|
||||
|
||||
preview: {
|
||||
select: {
|
||||
title: 'title',
|
||||
author: 'author.name',
|
||||
media: 'mainImage',
|
||||
},
|
||||
prepare(selection) {
|
||||
const {author} = selection
|
||||
return Object.assign({}, selection, {
|
||||
subtitle: author && `by ${author}`,
|
||||
})
|
||||
},
|
||||
},
|
||||
}
|
||||
29
examples/sanity/schemas/schema.js
Normal file
29
examples/sanity/schemas/schema.js
Normal file
@@ -0,0 +1,29 @@
|
||||
// First, we must import the schema creator
|
||||
import createSchema from 'part:@sanity/base/schema-creator'
|
||||
|
||||
// Then import schema types from any plugins that might expose them
|
||||
import schemaTypes from 'all:part:@sanity/base/schema-type'
|
||||
|
||||
// We import object and document schemas
|
||||
import blockContent from './blockContent'
|
||||
import category from './category'
|
||||
import post from './post'
|
||||
import author from './author'
|
||||
|
||||
// Then we give our schema to the builder and provide the result to Sanity
|
||||
export default createSchema({
|
||||
// We name our schema
|
||||
name: 'default',
|
||||
// Then proceed to concatenate our document type
|
||||
// to the ones provided by any plugins that are installed
|
||||
types: schemaTypes.concat([
|
||||
// The following are document types which will appear
|
||||
// in the studio.
|
||||
post,
|
||||
author,
|
||||
category,
|
||||
// When added to this list, object types can be used as
|
||||
// { type: 'typename' } in other document schemas
|
||||
blockContent,
|
||||
]),
|
||||
})
|
||||
1
examples/sanity/static/.gitkeep
Normal file
1
examples/sanity/static/.gitkeep
Normal file
@@ -0,0 +1 @@
|
||||
Files placed here will be served by the Sanity server under the `/static`-prefix
|
||||
BIN
examples/sanity/static/favicon.ico
Normal file
BIN
examples/sanity/static/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.1 KiB |
6
examples/sanity/tsconfig.json
Normal file
6
examples/sanity/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
// Note: This config is only used to help editors like VS Code understand/resolve
|
||||
// parts, the actual transpilation is done by babel. Any compiler configuration in
|
||||
// here will be ignored.
|
||||
"include": ["./node_modules/@sanity/base/types/**/*.ts", "./**/*.ts", "./**/*.tsx"]
|
||||
}
|
||||
9911
examples/sanity/yarn.lock
Normal file
9911
examples/sanity/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
1
examples/solidstart/.gitignore
vendored
1
examples/solidstart/.gitignore
vendored
@@ -2,7 +2,6 @@ dist
|
||||
worker
|
||||
.solid
|
||||
.vercel
|
||||
.output
|
||||
|
||||
# dependencies
|
||||
/node_modules
|
||||
|
||||
1
examples/sveltekit/.gitignore
vendored
1
examples/sveltekit/.gitignore
vendored
@@ -7,4 +7,3 @@ node_modules
|
||||
.env.*
|
||||
!.env.example
|
||||
.vercel
|
||||
.output
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "2.13.1-canary.1",
|
||||
"version": "2.14.1-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -30,7 +30,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "^2.4.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.21",
|
||||
"@vercel/frameworks": "0.6.1-canary.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
|
||||
@@ -1,411 +0,0 @@
|
||||
import fs from 'fs-extra';
|
||||
import { join, parse, relative, dirname, basename, extname } from 'path';
|
||||
import glob from './fs/glob';
|
||||
import { normalizePath } from './fs/normalize-path';
|
||||
import { Lambda } from './lambda';
|
||||
import type { BuildOptions } from './types';
|
||||
import { debug, getIgnoreFilter } from '.';
|
||||
|
||||
// `.output` was already created by the Build Command, so we have
|
||||
// to ensure its contents don't get bundled into the Lambda. Similarily,
|
||||
// we don't want to bundle anything from `.vercel` either. Lastly,
|
||||
// Builders/Runtimes didn't have `vercel.json` or `now.json`.
|
||||
const ignoredPaths = ['.output', '.vercel', 'vercel.json', 'now.json'];
|
||||
|
||||
const shouldIgnorePath = (
|
||||
file: string,
|
||||
ignoreFilter: any,
|
||||
ignoreFile: boolean
|
||||
) => {
|
||||
const isNative = ignoredPaths.some(item => {
|
||||
return file.startsWith(item);
|
||||
});
|
||||
|
||||
if (!ignoreFile) {
|
||||
return isNative;
|
||||
}
|
||||
|
||||
return isNative || ignoreFilter(file);
|
||||
};
|
||||
|
||||
const getSourceFiles = async (workPath: string, ignoreFilter: any) => {
|
||||
const list = await glob('**', {
|
||||
cwd: workPath,
|
||||
});
|
||||
|
||||
// We're not passing this as an `ignore` filter to the `glob` function above,
|
||||
// so that we can re-use exactly the same `getIgnoreFilter` method that the
|
||||
// Build Step uses (literally the same code). Note that this exclusion only applies
|
||||
// when deploying. Locally, another exclusion is needed, which is handled
|
||||
// further below in the `convertRuntimeToPlugin` function.
|
||||
for (const file in list) {
|
||||
if (shouldIgnorePath(file, ignoreFilter, true)) {
|
||||
delete list[file];
|
||||
}
|
||||
}
|
||||
|
||||
return list;
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert legacy Runtime to a Plugin.
|
||||
* @param buildRuntime - a legacy build() function from a Runtime
|
||||
* @param packageName - the name of the package, for example `vercel-plugin-python`
|
||||
* @param ext - the file extension, for example `.py`
|
||||
*/
|
||||
export function _experimental_convertRuntimeToPlugin(
|
||||
buildRuntime: (options: BuildOptions) => Promise<{ output: Lambda }>,
|
||||
packageName: string,
|
||||
ext: string
|
||||
) {
|
||||
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
|
||||
return async function build({ workPath }: { workPath: string }) {
|
||||
// We also don't want to provide any files to Runtimes that were ignored
|
||||
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
|
||||
const ignoreFilter = await getIgnoreFilter(workPath);
|
||||
|
||||
// Retrieve the files that are currently available on the File System,
|
||||
// before the Legacy Runtime has even started to build.
|
||||
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
|
||||
|
||||
// Instead of doing another `glob` to get all the matching source files,
|
||||
// we'll filter the list of existing files down to only the ones
|
||||
// that are matching the entrypoint pattern, so we're first creating
|
||||
// a clean new list to begin.
|
||||
const entrypoints = Object.assign({}, sourceFilesPreBuild);
|
||||
|
||||
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
|
||||
|
||||
// Up next, we'll strip out the files from the list of entrypoints
|
||||
// that aren't actually considered entrypoints.
|
||||
for (const file in entrypoints) {
|
||||
if (!entrypointMatch.test(file)) {
|
||||
delete entrypoints[file];
|
||||
}
|
||||
}
|
||||
|
||||
const pages: { [key: string]: any } = {};
|
||||
const pluginName = packageName.replace('vercel-plugin-', '');
|
||||
const outputPath = join(workPath, '.output');
|
||||
|
||||
const traceDir = join(
|
||||
outputPath,
|
||||
`inputs`,
|
||||
// Legacy Runtimes can only provide API Routes, so that's
|
||||
// why we can use this prefix for all of them. Here, we have to
|
||||
// make sure to not use a cryptic hash name, because people
|
||||
// need to be able to easily inspect the output.
|
||||
`api-routes-${pluginName}`
|
||||
);
|
||||
|
||||
await fs.ensureDir(traceDir);
|
||||
|
||||
const entryRoot = join(outputPath, 'server', 'pages');
|
||||
|
||||
for (const entrypoint of Object.keys(entrypoints)) {
|
||||
const { output } = await buildRuntime({
|
||||
files: sourceFilesPreBuild,
|
||||
entrypoint,
|
||||
workPath,
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
meta: {
|
||||
avoidTopLevelInstall: true,
|
||||
skipDownload: true,
|
||||
},
|
||||
});
|
||||
|
||||
const lambdaFiles = output.files;
|
||||
|
||||
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
||||
// have certain files that are ignored stripped, but locally, that list of
|
||||
// files isn't used by the Legacy Runtimes, so we need to apply the filters
|
||||
// to the outputs that they are returning instead.
|
||||
for (const file in lambdaFiles) {
|
||||
if (shouldIgnorePath(file, ignoreFilter, false)) {
|
||||
delete lambdaFiles[file];
|
||||
}
|
||||
}
|
||||
|
||||
let handlerFileBase = output.handler;
|
||||
let handlerFile = lambdaFiles[handlerFileBase];
|
||||
let handlerHasImport = false;
|
||||
|
||||
const { handler } = output;
|
||||
const handlerMethod = handler.split('.').pop();
|
||||
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
||||
|
||||
// For compiled languages, the launcher file for the Lambda generated
|
||||
// by the Legacy Runtime matches the `handler` defined for it, but for
|
||||
// interpreted languages, the `handler` consists of the launcher file name
|
||||
// without an extension, plus the name of the method inside of that file
|
||||
// that should be invoked, so we have to construct the file path explicitly.
|
||||
if (!handlerFile) {
|
||||
handlerFileBase = handlerFileName + ext;
|
||||
handlerFile = lambdaFiles[handlerFileBase];
|
||||
handlerHasImport = true;
|
||||
}
|
||||
|
||||
if (!handlerFile || !handlerFile.fsPath) {
|
||||
throw new Error(
|
||||
`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`
|
||||
);
|
||||
}
|
||||
|
||||
const handlerExtName = extname(handlerFile.fsPath);
|
||||
|
||||
const entryBase = basename(entrypoint).replace(ext, handlerExtName);
|
||||
const entryPath = join(dirname(entrypoint), entryBase);
|
||||
const entry = join(entryRoot, entryPath);
|
||||
|
||||
// Create the parent directory of the API Route that will be created
|
||||
// for the current entrypoint inside of `.output/server/pages/api`.
|
||||
await fs.ensureDir(dirname(entry));
|
||||
|
||||
// For compiled languages, the launcher file will be binary and therefore
|
||||
// won't try to import a user-provided request handler (instead, it will
|
||||
// contain it). But for interpreted languages, the launcher might try to
|
||||
// load a user-provided request handler from the source file instead of bundling
|
||||
// it, so we have to adjust the import statement inside the launcher to point
|
||||
// to the respective source file. Previously, Legacy Runtimes simply expected
|
||||
// the user-provided request-handler to be copied right next to the launcher,
|
||||
// but with the new File System API, files won't be moved around unnecessarily.
|
||||
if (handlerHasImport) {
|
||||
const { fsPath } = handlerFile;
|
||||
const encoding = 'utf-8';
|
||||
|
||||
// This is the true directory of the user-provided request handler in the
|
||||
// source files, so that's what we will use as an import path in the launcher.
|
||||
const locationPrefix = relative(entry, outputPath);
|
||||
|
||||
let handlerContent = await fs.readFile(fsPath, encoding);
|
||||
|
||||
const importPaths = [
|
||||
// This is the full entrypoint path, like `./api/test.py`. In our tests
|
||||
// Python didn't support importing from a parent directory without using different
|
||||
// code in the launcher that registers it as a location for modules and then changing
|
||||
// the importing syntax, but continuing to import it like before seems to work. If
|
||||
// other languages need this, we should consider excluding Python explicitly.
|
||||
// `./${entrypoint}`,
|
||||
|
||||
// This is the entrypoint path without extension, like `api/test`
|
||||
entrypoint.slice(0, -ext.length),
|
||||
];
|
||||
|
||||
// Generate a list of regular expressions that we can use for
|
||||
// finding matches, but only allow matches if the import path is
|
||||
// wrapped inside single (') or double quotes (").
|
||||
const patterns = importPaths.map(path => {
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
||||
});
|
||||
|
||||
let replacedMatch = null;
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const newContent = handlerContent.replace(
|
||||
pattern,
|
||||
(_, p1, p2, p3) => {
|
||||
return `${p1}${join(locationPrefix, p2)}${p3}`;
|
||||
}
|
||||
);
|
||||
|
||||
if (newContent !== handlerContent) {
|
||||
debug(
|
||||
`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`
|
||||
);
|
||||
|
||||
handlerContent = newContent;
|
||||
replacedMatch = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!replacedMatch) {
|
||||
new Error(
|
||||
`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`
|
||||
);
|
||||
}
|
||||
|
||||
await fs.writeFile(entry, handlerContent, encoding);
|
||||
} else {
|
||||
await fs.copy(handlerFile.fsPath, entry);
|
||||
}
|
||||
|
||||
// Legacy Runtimes based on interpreted languages will create a new launcher file
|
||||
// for every entrypoint, but they will create each one inside `workPath`, which means that
|
||||
// the launcher for one entrypoint will overwrite the launcher provided for the previous
|
||||
// entrypoint. That's why, above, we copy the file contents into the new destination (and
|
||||
// optionally transform them along the way), instead of linking. We then also want to remove
|
||||
// the copy origin right here, so that the `workPath` doesn't contain a useless launcher file
|
||||
// once the build has finished running.
|
||||
await fs.remove(handlerFile.fsPath);
|
||||
debug(`Removed temporary file "${handlerFile.fsPath}"`);
|
||||
|
||||
const nft = `${entry}.nft.json`;
|
||||
|
||||
const json = JSON.stringify({
|
||||
version: 2,
|
||||
files: Object.keys(lambdaFiles)
|
||||
.map(file => {
|
||||
const { fsPath } = lambdaFiles[file];
|
||||
|
||||
if (!fsPath) {
|
||||
throw new Error(
|
||||
`File "${file}" is missing valid \`fsPath\` property`
|
||||
);
|
||||
}
|
||||
|
||||
// The handler was already moved into position above.
|
||||
if (file === handlerFileBase) {
|
||||
return;
|
||||
}
|
||||
|
||||
return normalizePath(relative(dirname(nft), fsPath));
|
||||
})
|
||||
.filter(Boolean),
|
||||
});
|
||||
|
||||
await fs.writeFile(nft, json);
|
||||
|
||||
// Add an entry that will later on be added to the `functions-manifest.json`
|
||||
// file that is placed inside of the `.output` directory.
|
||||
pages[normalizePath(entryPath)] = {
|
||||
// Because the underlying file used as a handler was placed
|
||||
// inside `.output/server/pages/api`, it no longer has the name it originally
|
||||
// had and is now named after the API Route that it's responsible for,
|
||||
// so we have to adjust the name of the Lambda handler accordingly.
|
||||
handler: handler.replace(handlerFileName, parse(entry).name),
|
||||
runtime: output.runtime,
|
||||
memory: output.memory,
|
||||
maxDuration: output.maxDuration,
|
||||
environment: output.environment,
|
||||
allowQuery: output.allowQuery,
|
||||
};
|
||||
}
|
||||
|
||||
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
||||
// to the `functions-manifest.json` file provided in `.output`.
|
||||
await _experimental_updateFunctionsManifest({ workPath, pages });
|
||||
};
|
||||
}
|
||||
|
||||
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
|
||||
try {
|
||||
const str = await fs.readFile(filePath, 'utf8');
|
||||
return JSON.parse(str);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If `.output/functions-manifest.json` exists, append to the pages
|
||||
* property. Otherwise write a new file.
|
||||
*/
|
||||
export async function _experimental_updateFunctionsManifest({
|
||||
workPath,
|
||||
pages,
|
||||
}: {
|
||||
workPath: string;
|
||||
pages: { [key: string]: any };
|
||||
}) {
|
||||
const functionsManifestPath = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'functions-manifest.json'
|
||||
);
|
||||
const functionsManifest = await readJson(functionsManifestPath);
|
||||
|
||||
if (!functionsManifest.version) functionsManifest.version = 2;
|
||||
if (!functionsManifest.pages) functionsManifest.pages = {};
|
||||
|
||||
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
||||
functionsManifest.pages[pageKey] = { ...pageConfig };
|
||||
}
|
||||
|
||||
await fs.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
|
||||
}
|
||||
|
||||
/**
|
||||
* Append routes to the `routes-manifest.json` file.
|
||||
* If the file does not exist, it will be created.
|
||||
*/
|
||||
export async function _experimental_updateRoutesManifest({
|
||||
workPath,
|
||||
redirects,
|
||||
rewrites,
|
||||
headers,
|
||||
dynamicRoutes,
|
||||
staticRoutes,
|
||||
}: {
|
||||
workPath: string;
|
||||
redirects?: {
|
||||
source: string;
|
||||
destination: string;
|
||||
statusCode: number;
|
||||
regex: string;
|
||||
}[];
|
||||
rewrites?: {
|
||||
source: string;
|
||||
destination: string;
|
||||
regex: string;
|
||||
}[];
|
||||
headers?: {
|
||||
source: string;
|
||||
headers: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
regex: string;
|
||||
}[];
|
||||
dynamicRoutes?: {
|
||||
page: string;
|
||||
regex: string;
|
||||
namedRegex?: string;
|
||||
routeKeys?: { [named: string]: string };
|
||||
}[];
|
||||
staticRoutes?: {
|
||||
page: string;
|
||||
regex: string;
|
||||
namedRegex?: string;
|
||||
routeKeys?: { [named: string]: string };
|
||||
}[];
|
||||
}) {
|
||||
const routesManifestPath = join(workPath, '.output', 'routes-manifest.json');
|
||||
|
||||
const routesManifest = await readJson(routesManifestPath);
|
||||
|
||||
if (!routesManifest.version) routesManifest.version = 3;
|
||||
if (routesManifest.pages404 === undefined) routesManifest.pages404 = true;
|
||||
|
||||
if (redirects) {
|
||||
if (!routesManifest.redirects) routesManifest.redirects = [];
|
||||
routesManifest.redirects.push(...redirects);
|
||||
}
|
||||
|
||||
if (rewrites) {
|
||||
if (!routesManifest.rewrites) routesManifest.rewrites = [];
|
||||
routesManifest.rewrites.push(...rewrites);
|
||||
}
|
||||
|
||||
if (headers) {
|
||||
if (!routesManifest.headers) routesManifest.headers = [];
|
||||
routesManifest.headers.push(...headers);
|
||||
}
|
||||
|
||||
if (dynamicRoutes) {
|
||||
if (!routesManifest.dynamicRoutes) routesManifest.dynamicRoutes = [];
|
||||
routesManifest.dynamicRoutes.push(...dynamicRoutes);
|
||||
}
|
||||
|
||||
if (staticRoutes) {
|
||||
if (!routesManifest.staticRoutes) routesManifest.staticRoutes = [];
|
||||
routesManifest.staticRoutes.push(...staticRoutes);
|
||||
}
|
||||
|
||||
await fs.writeFile(routesManifestPath, JSON.stringify(routesManifest));
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
import { getPlatformEnv } from './';
|
||||
import { getPlatformEnv } from './get-platform-env';
|
||||
|
||||
export default function debug(message: string, ...additional: any[]) {
|
||||
if (getPlatformEnv('BUILDER_DEBUG')) {
|
||||
|
||||
23
packages/build-utils/src/get-platform-env.ts
Normal file
23
packages/build-utils/src/get-platform-env.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { NowBuildError } from './errors';
|
||||
|
||||
/**
|
||||
* Helper function to support both `VERCEL_` and legacy `NOW_` env vars.
|
||||
* Throws an error if *both* env vars are defined.
|
||||
*/
|
||||
export const getPlatformEnv = (name: string): string | undefined => {
|
||||
const vName = `VERCEL_${name}`;
|
||||
const nName = `NOW_${name}`;
|
||||
const v = process.env[vName];
|
||||
const n = process.env[nName];
|
||||
if (typeof v === 'string') {
|
||||
if (typeof n === 'string') {
|
||||
throw new NowBuildError({
|
||||
code: 'CONFLICTING_ENV_VAR_NAMES',
|
||||
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`,
|
||||
link: 'https://vercel.link/combining-old-and-new-config',
|
||||
});
|
||||
}
|
||||
return v;
|
||||
}
|
||||
return n;
|
||||
};
|
||||
@@ -2,6 +2,7 @@ import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { Lambda, createLambda, getLambdaOptionsFromFunction } from './lambda';
|
||||
import { NodejsLambda } from './nodejs-lambda';
|
||||
import { Prerender } from './prerender';
|
||||
import download, { DownloadedFiles, isSymbolicLink } from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory';
|
||||
@@ -31,17 +32,18 @@ import {
|
||||
getLatestNodeVersion,
|
||||
getDiscontinuedNodeVersions,
|
||||
} from './fs/node-version';
|
||||
import { NowBuildError } from './errors';
|
||||
import streamToBuffer from './fs/stream-to-buffer';
|
||||
import shouldServe from './should-serve';
|
||||
import debug from './debug';
|
||||
import getIgnoreFilter from './get-ignore-filter';
|
||||
import { getPlatformEnv } from './get-platform-env';
|
||||
|
||||
export {
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
FileRef,
|
||||
Lambda,
|
||||
NodejsLambda,
|
||||
createLambda,
|
||||
Prerender,
|
||||
download,
|
||||
@@ -69,6 +71,7 @@ export {
|
||||
getLatestNodeVersion,
|
||||
getDiscontinuedNodeVersions,
|
||||
getSpawnOptions,
|
||||
getPlatformEnv,
|
||||
streamToBuffer,
|
||||
shouldServe,
|
||||
debug,
|
||||
@@ -89,11 +92,6 @@ export { detectFramework } from './detect-framework';
|
||||
export { DetectorFilesystem } from './detectors/filesystem';
|
||||
export { readConfigFile } from './fs/read-config-file';
|
||||
export { normalizePath } from './fs/normalize-path';
|
||||
export {
|
||||
_experimental_convertRuntimeToPlugin,
|
||||
_experimental_updateFunctionsManifest,
|
||||
_experimental_updateRoutesManifest,
|
||||
} from './convert-runtime-to-plugin';
|
||||
|
||||
export * from './schemas';
|
||||
export * from './types';
|
||||
@@ -117,25 +115,3 @@ export const isOfficialRuntime = (desired: string, name?: string): boolean => {
|
||||
export const isStaticRuntime = (name?: string): boolean => {
|
||||
return isOfficialRuntime('static', name);
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to support both `VERCEL_` and legacy `NOW_` env vars.
|
||||
* Throws an error if *both* env vars are defined.
|
||||
*/
|
||||
export const getPlatformEnv = (name: string): string | undefined => {
|
||||
const vName = `VERCEL_${name}`;
|
||||
const nName = `NOW_${name}`;
|
||||
const v = process.env[vName];
|
||||
const n = process.env[nName];
|
||||
if (typeof v === 'string') {
|
||||
if (typeof n === 'string') {
|
||||
throw new NowBuildError({
|
||||
code: 'CONFLICTING_ENV_VAR_NAMES',
|
||||
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`,
|
||||
link: 'https://vercel.link/combining-old-and-new-config',
|
||||
});
|
||||
}
|
||||
return v;
|
||||
}
|
||||
return n;
|
||||
};
|
||||
|
||||
@@ -12,7 +12,7 @@ interface Environment {
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
interface LambdaOptions {
|
||||
export interface LambdaOptions {
|
||||
files: Files;
|
||||
handler: string;
|
||||
runtime: string;
|
||||
@@ -21,6 +21,10 @@ interface LambdaOptions {
|
||||
environment?: Environment;
|
||||
allowQuery?: string[];
|
||||
regions?: string[];
|
||||
/**
|
||||
* @deprecated Use `files` property instead.
|
||||
*/
|
||||
zipBuffer?: Buffer;
|
||||
}
|
||||
|
||||
interface GetLambdaOptionsFromFunctionOptions {
|
||||
@@ -29,19 +33,19 @@ interface GetLambdaOptionsFromFunctionOptions {
|
||||
}
|
||||
|
||||
export class Lambda {
|
||||
public type: 'Lambda';
|
||||
public files: Files;
|
||||
public handler: string;
|
||||
public runtime: string;
|
||||
public memory?: number;
|
||||
public maxDuration?: number;
|
||||
public environment: Environment;
|
||||
public allowQuery?: string[];
|
||||
public regions?: string[];
|
||||
type: 'Lambda';
|
||||
files: Files;
|
||||
handler: string;
|
||||
runtime: string;
|
||||
memory?: number;
|
||||
maxDuration?: number;
|
||||
environment: Environment;
|
||||
allowQuery?: string[];
|
||||
regions?: string[];
|
||||
/**
|
||||
* @deprecated Use `await lambda.createZip()` instead.
|
||||
*/
|
||||
public zipBuffer?: Buffer;
|
||||
zipBuffer?: Buffer;
|
||||
|
||||
constructor({
|
||||
files,
|
||||
@@ -52,8 +56,11 @@ export class Lambda {
|
||||
environment = {},
|
||||
allowQuery,
|
||||
regions,
|
||||
zipBuffer,
|
||||
}: LambdaOptions) {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
if (!zipBuffer) {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
}
|
||||
assert(typeof handler === 'string', '"handler" is not a string');
|
||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||
assert(typeof environment === 'object', '"environment" is not an object');
|
||||
@@ -90,6 +97,7 @@ export class Lambda {
|
||||
this.environment = environment;
|
||||
this.allowQuery = allowQuery;
|
||||
this.regions = regions;
|
||||
this.zipBuffer = zipBuffer;
|
||||
}
|
||||
|
||||
async createZip(): Promise<Buffer> {
|
||||
|
||||
27
packages/build-utils/src/nodejs-lambda.ts
Normal file
27
packages/build-utils/src/nodejs-lambda.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import { Lambda, LambdaOptions } from './lambda';
|
||||
|
||||
interface NodejsLambdaOptions extends LambdaOptions {
|
||||
shouldAddHelpers: boolean;
|
||||
shouldAddSourcemapSupport: boolean;
|
||||
awsLambdaHandler?: string;
|
||||
}
|
||||
|
||||
export class NodejsLambda extends Lambda {
|
||||
launcherType: 'Nodejs';
|
||||
shouldAddHelpers: boolean;
|
||||
shouldAddSourcemapSupport: boolean;
|
||||
awsLambdaHandler?: string;
|
||||
|
||||
constructor({
|
||||
shouldAddHelpers,
|
||||
shouldAddSourcemapSupport,
|
||||
awsLambdaHandler,
|
||||
...opts
|
||||
}: NodejsLambdaOptions) {
|
||||
super(opts);
|
||||
this.launcherType = 'Nodejs';
|
||||
this.shouldAddHelpers = shouldAddHelpers;
|
||||
this.shouldAddSourcemapSupport = shouldAddSourcemapSupport;
|
||||
this.awsLambdaHandler = awsLambdaHandler;
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
import FileRef from './file-ref';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import { Lambda } from './lambda';
|
||||
|
||||
export interface Env {
|
||||
[name: string]: string | undefined;
|
||||
@@ -368,3 +369,49 @@ export interface ProjectSettings {
|
||||
directoryListing?: boolean;
|
||||
gitForkProtection?: boolean;
|
||||
}
|
||||
|
||||
export interface BuilderV2 {
|
||||
version: 2;
|
||||
build: BuildV2;
|
||||
prepareCache?: PrepareCache;
|
||||
}
|
||||
|
||||
export interface BuilderV3 {
|
||||
version: 3;
|
||||
build: BuildV3;
|
||||
prepareCache?: PrepareCache;
|
||||
startDevServer?: StartDevServer;
|
||||
}
|
||||
|
||||
type ImageFormat = 'image/avif' | 'image/webp';
|
||||
|
||||
export interface Images {
|
||||
domains: string[];
|
||||
sizes: number[];
|
||||
minimumCacheTTL?: number;
|
||||
formats?: ImageFormat[];
|
||||
}
|
||||
|
||||
export interface BuildResultV2 {
|
||||
// TODO: use proper `Route` type from `routing-utils` (perhaps move types to a common package)
|
||||
routes: any[];
|
||||
images?: Images;
|
||||
output: {
|
||||
[key: string]: File | Lambda;
|
||||
};
|
||||
wildcard?: Array<{
|
||||
domain: string;
|
||||
value: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
export interface BuildResultV3 {
|
||||
output: Lambda;
|
||||
}
|
||||
|
||||
export type BuildV2 = (options: BuildOptions) => Promise<BuildResultV2>;
|
||||
export type BuildV3 = (options: BuildOptions) => Promise<BuildResultV3>;
|
||||
export type PrepareCache = (options: PrepareCacheOptions) => Promise<Files>;
|
||||
export type StartDevServer = (
|
||||
options: StartDevServerOptions
|
||||
) => Promise<StartDevServerResult>;
|
||||
|
||||
@@ -1,160 +0,0 @@
|
||||
import { join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import { BuildOptions, createLambda, FileFsRef } from '../src';
|
||||
import { _experimental_convertRuntimeToPlugin } from '../src/convert-runtime-to-plugin';
|
||||
|
||||
async function fsToJson(dir: string, output: Record<string, any> = {}) {
|
||||
const files = await fs.readdir(dir);
|
||||
for (const file of files) {
|
||||
const fsPath = join(dir, file);
|
||||
const stat = await fs.stat(fsPath);
|
||||
if (stat.isDirectory()) {
|
||||
output[file] = {};
|
||||
await fsToJson(fsPath, output[file]);
|
||||
} else {
|
||||
output[file] = await fs.readFile(fsPath, 'utf8');
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
const invalidFuncWorkpath = join(
|
||||
__dirname,
|
||||
'convert-runtime',
|
||||
'invalid-functions'
|
||||
);
|
||||
const pythonApiWorkpath = join(__dirname, 'convert-runtime', 'python-api');
|
||||
|
||||
describe('convert-runtime-to-plugin', () => {
|
||||
afterEach(async () => {
|
||||
await fs.remove(join(invalidFuncWorkpath, '.output'));
|
||||
await fs.remove(join(pythonApiWorkpath, '.output'));
|
||||
});
|
||||
|
||||
it('should create correct fileystem for python', async () => {
|
||||
const ext = '.py';
|
||||
const workPath = pythonApiWorkpath;
|
||||
const handlerName = 'vc__handler__python';
|
||||
const handlerFileName = handlerName + ext;
|
||||
|
||||
const lambdaOptions = {
|
||||
handler: `${handlerName}.vc_handler`,
|
||||
runtime: 'python3.9',
|
||||
memory: 512,
|
||||
maxDuration: 5,
|
||||
environment: {},
|
||||
};
|
||||
|
||||
const buildRuntime = async (opts: BuildOptions) => {
|
||||
const handlerPath = join(workPath, handlerFileName);
|
||||
|
||||
// This is the usual time at which a Legacy Runtime writes its Lambda launcher.
|
||||
await fs.writeFile(handlerPath, '# handler');
|
||||
|
||||
opts.files[handlerFileName] = new FileFsRef({
|
||||
fsPath: handlerPath,
|
||||
});
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: opts.files,
|
||||
...lambdaOptions,
|
||||
});
|
||||
return { output: lambda };
|
||||
};
|
||||
|
||||
const packageName = 'vercel-plugin-python';
|
||||
const build = await _experimental_convertRuntimeToPlugin(
|
||||
buildRuntime,
|
||||
packageName,
|
||||
ext
|
||||
);
|
||||
|
||||
await build({ workPath });
|
||||
|
||||
const output = await fsToJson(join(workPath, '.output'));
|
||||
|
||||
expect(output).toMatchObject({
|
||||
'functions-manifest.json': expect.stringContaining('{'),
|
||||
server: {
|
||||
pages: {
|
||||
api: {
|
||||
'index.py': expect.stringContaining('handler'),
|
||||
'index.py.nft.json': expect.stringContaining('{'),
|
||||
users: {
|
||||
'get.py': expect.stringContaining('handler'),
|
||||
'get.py.nft.json': expect.stringContaining('{'),
|
||||
'post.py': expect.stringContaining('handler'),
|
||||
'post.py.nft.json': expect.stringContaining('{'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const funcManifest = JSON.parse(output['functions-manifest.json']);
|
||||
expect(funcManifest).toMatchObject({
|
||||
version: 2,
|
||||
pages: {
|
||||
'api/index.py': { ...lambdaOptions, handler: 'index.vc_handler' },
|
||||
'api/users/get.py': { ...lambdaOptions, handler: 'get.vc_handler' },
|
||||
'api/users/post.py': {
|
||||
...lambdaOptions,
|
||||
handler: 'post.vc_handler',
|
||||
memory: 512,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const indexJson = JSON.parse(output.server.pages.api['index.py.nft.json']);
|
||||
expect(indexJson).toMatchObject({
|
||||
version: 2,
|
||||
files: [
|
||||
'../../../../api/db/[id].py',
|
||||
'../../../../api/index.py',
|
||||
'../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../api/users/get.py',
|
||||
'../../../../api/users/post.py',
|
||||
'../../../../file.txt',
|
||||
'../../../../util/date.py',
|
||||
'../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
const getJson = JSON.parse(
|
||||
output.server.pages.api.users['get.py.nft.json']
|
||||
);
|
||||
expect(getJson).toMatchObject({
|
||||
version: 2,
|
||||
files: [
|
||||
'../../../../../api/db/[id].py',
|
||||
'../../../../../api/index.py',
|
||||
'../../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../../api/users/get.py',
|
||||
'../../../../../api/users/post.py',
|
||||
'../../../../../file.txt',
|
||||
'../../../../../util/date.py',
|
||||
'../../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
const postJson = JSON.parse(
|
||||
output.server.pages.api.users['post.py.nft.json']
|
||||
);
|
||||
expect(postJson).toMatchObject({
|
||||
version: 2,
|
||||
files: [
|
||||
'../../../../../api/db/[id].py',
|
||||
'../../../../../api/index.py',
|
||||
'../../../../../api/project/[aid]/[bid]/index.py',
|
||||
'../../../../../api/users/get.py',
|
||||
'../../../../../api/users/post.py',
|
||||
'../../../../../file.txt',
|
||||
'../../../../../util/date.py',
|
||||
'../../../../../util/math.py',
|
||||
],
|
||||
});
|
||||
|
||||
expect(output.server.pages['file.txt']).toBeUndefined();
|
||||
expect(output.server.pages.api['file.txt']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
21
packages/build-utils/test/unit.nodejs-lambda.test.ts
vendored
Normal file
21
packages/build-utils/test/unit.nodejs-lambda.test.ts
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
import { NodejsLambda, FileBlob } from '../src';
|
||||
|
||||
describe('Test `NodejsLambda`', () => {
|
||||
it('should create an instance', () => {
|
||||
const helloSrc = 'module.exports = (req, res) => res.end("hi");';
|
||||
const lambda = new NodejsLambda({
|
||||
files: {
|
||||
'api/hello.js': new FileBlob({ data: helloSrc }),
|
||||
},
|
||||
handler: 'api/hello.js',
|
||||
runtime: 'node14.x',
|
||||
shouldAddHelpers: true,
|
||||
shouldAddSourcemapSupport: false,
|
||||
});
|
||||
expect(lambda.handler).toEqual('api/hello.js');
|
||||
expect(lambda.runtime).toEqual('node14.x');
|
||||
expect(lambda.shouldAddHelpers).toEqual(true);
|
||||
expect(lambda.shouldAddSourcemapSupport).toEqual(false);
|
||||
expect(lambda.awsLambdaHandler).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "23.1.3-canary.74",
|
||||
"version": "24.0.1-canary.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -43,14 +43,12 @@
|
||||
"node": ">= 12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.13.1-canary.1",
|
||||
"@vercel/go": "1.2.4-canary.6",
|
||||
"@vercel/node": "1.12.2-canary.9",
|
||||
"@vercel/python": "2.1.2-canary.4",
|
||||
"@vercel/ruby": "1.2.10-canary.2",
|
||||
"update-notifier": "4.1.0",
|
||||
"vercel-plugin-middleware": "0.0.0-canary.26",
|
||||
"vercel-plugin-node": "1.12.2-canary.41"
|
||||
"@vercel/build-utils": "2.14.1-canary.0",
|
||||
"@vercel/go": "1.3.1-canary.0",
|
||||
"@vercel/node": "1.13.1-canary.0",
|
||||
"@vercel/python": "2.2.1-canary.0",
|
||||
"@vercel/ruby": "1.3.1-canary.0",
|
||||
"update-notifier": "4.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/env": "11.1.2",
|
||||
@@ -90,11 +88,11 @@
|
||||
"@types/update-notifier": "5.1.0",
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@vercel/client": "10.2.3-canary.52",
|
||||
"@vercel/client": "10.3.1-canary.0",
|
||||
"@vercel/fetch-retry": "5.0.3",
|
||||
"@vercel/frameworks": "0.5.1-canary.21",
|
||||
"@vercel/frameworks": "0.6.1-canary.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.17.0",
|
||||
"@vercel/nft": "0.17.5",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
"ajv": "6.12.2",
|
||||
|
||||
@@ -56,10 +56,6 @@ const help = () => {
|
||||
|
||||
${chalk.cyan(`$ ${pkgName} bisect --bad example-310pce9i0.vercel.app`)}
|
||||
|
||||
${chalk.gray('–')} Bisect specifying a deployment that was working 3 days ago
|
||||
|
||||
${chalk.cyan(`$ ${pkgName} bisect --good 3d`)}
|
||||
|
||||
${chalk.gray('–')} Automated bisect with a run script
|
||||
|
||||
${chalk.cyan(`$ ${pkgName} bisect --run ./test.sh`)}
|
||||
@@ -201,7 +197,11 @@ export default async function main(client: Client): Promise<number> {
|
||||
|
||||
if (badDeployment.target !== goodDeployment.target) {
|
||||
output.error(
|
||||
`Bad deployment target "${badDeployment.target || 'preview'}" does not match good deployment target "${goodDeployment.target || 'preview'}"`
|
||||
`Bad deployment target "${
|
||||
badDeployment.target || 'preview'
|
||||
}" does not match good deployment target "${
|
||||
goodDeployment.target || 'preview'
|
||||
}"`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
@@ -1,911 +0,0 @@
|
||||
import { loadEnvConfig, processEnv } from '@next/env';
|
||||
import {
|
||||
execCommand,
|
||||
getScriptName,
|
||||
GlobOptions,
|
||||
scanParentDirs,
|
||||
spawnAsync,
|
||||
glob as buildUtilsGlob,
|
||||
detectFileSystemAPI,
|
||||
detectBuilders,
|
||||
PackageJson,
|
||||
} from '@vercel/build-utils';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import Sema from 'async-sema';
|
||||
import chalk from 'chalk';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { assert } from 'console';
|
||||
import fs from 'fs-extra';
|
||||
import ogGlob from 'glob';
|
||||
import { dirname, isAbsolute, join, parse, relative } from 'path';
|
||||
import pluralize from 'pluralize';
|
||||
import Client from '../util/client';
|
||||
import { VercelConfig } from '../util/dev/types';
|
||||
import { emoji, prependEmoji } from '../util/emoji';
|
||||
import { CantParseJSONFile } from '../util/errors-ts';
|
||||
import getArgs from '../util/get-args';
|
||||
import handleError from '../util/handle-error';
|
||||
import confirm from '../util/input/confirm';
|
||||
import { isSettingValue } from '../util/is-setting-value';
|
||||
import cmd from '../util/output/cmd';
|
||||
import logo from '../util/output/logo';
|
||||
import param from '../util/output/param';
|
||||
import stamp from '../util/output/stamp';
|
||||
import { getCommandName, getPkgName } from '../util/pkg-name';
|
||||
import { loadCliPlugins } from '../util/plugins';
|
||||
import { findFramework } from '../util/projects/find-framework';
|
||||
import { VERCEL_DIR } from '../util/projects/link';
|
||||
import { readProjectSettings } from '../util/projects/project-settings';
|
||||
import readJSONFile from '../util/read-json-file';
|
||||
import pull from './pull';
|
||||
|
||||
const sema = new Sema(16, {
|
||||
capacity: 100,
|
||||
});
|
||||
|
||||
const help = () => {
|
||||
return console.log(`
|
||||
${chalk.bold(`${logo} ${getPkgName()} build`)}
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
-A ${chalk.bold.underline('FILE')}, --local-config=${chalk.bold.underline(
|
||||
'FILE'
|
||||
)} Path to the local ${'`vercel.json`'} file
|
||||
-Q ${chalk.bold.underline('DIR')}, --global-config=${chalk.bold.underline(
|
||||
'DIR'
|
||||
)} Path to the global ${'`.vercel`'} directory
|
||||
--cwd [path] The current working directory
|
||||
-d, --debug Debug mode [off]
|
||||
-y, --yes Skip the confirmation prompt
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} Build the project
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} build`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} build --cwd ./path-to-project`)}
|
||||
`);
|
||||
};
|
||||
|
||||
const OUTPUT_DIR = '.output';
|
||||
|
||||
export default async function main(client: Client) {
|
||||
if (process.env.__VERCEL_BUILD_RUNNING) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} build`
|
||||
)} must not recursively invoke itself. Check the Build Command in the Project Settings or the ${cmd(
|
||||
'build'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
return 1;
|
||||
} else {
|
||||
process.env.__VERCEL_BUILD_RUNNING = '1';
|
||||
}
|
||||
|
||||
let argv;
|
||||
const buildStamp = stamp();
|
||||
try {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--debug': Boolean,
|
||||
'--cwd': String,
|
||||
});
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (argv['--help']) {
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
|
||||
let cwd = argv['--cwd'] || process.cwd();
|
||||
|
||||
let project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
// If there are no project settings, only then do we pull them down
|
||||
while (!project?.settings) {
|
||||
const confirmed = await confirm(
|
||||
`No Project Settings found locally. Run ${getCommandName(
|
||||
'pull'
|
||||
)} for retrieving them?`,
|
||||
true
|
||||
);
|
||||
if (!confirmed) {
|
||||
client.output.print(`Aborted. No Project Settings retrieved.\n`);
|
||||
return 0;
|
||||
}
|
||||
const result = await pull(client);
|
||||
if (result !== 0) {
|
||||
return result;
|
||||
}
|
||||
project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
}
|
||||
|
||||
// If `rootDirectory` exists, then `baseDir` will be the repo's root directory.
|
||||
const baseDir = cwd;
|
||||
|
||||
cwd = project.settings.rootDirectory
|
||||
? join(cwd, project.settings.rootDirectory)
|
||||
: cwd;
|
||||
|
||||
// Load the environment
|
||||
const { combinedEnv, loadedEnvFiles } = loadEnvConfig(cwd, false, {
|
||||
info: () => ({}), // we don't want to log this yet.
|
||||
error: (...args: any[]) => client.output.error(args.join(' ')),
|
||||
});
|
||||
|
||||
// Set process.env with loaded environment variables
|
||||
processEnv(loadedEnvFiles);
|
||||
|
||||
const spawnOpts: {
|
||||
env: Record<string, string | undefined>;
|
||||
} = {
|
||||
env: { ...combinedEnv, VERCEL: '1' },
|
||||
};
|
||||
|
||||
process.chdir(cwd);
|
||||
|
||||
const pkg = await readJSONFile<PackageJson>('./package.json');
|
||||
if (pkg instanceof CantParseJSONFile) {
|
||||
throw pkg;
|
||||
}
|
||||
const vercelConfig = await readJSONFile<VercelConfig>('./vercel.json');
|
||||
if (vercelConfig instanceof CantParseJSONFile) {
|
||||
throw vercelConfig;
|
||||
}
|
||||
|
||||
if (!process.env.NOW_BUILDER) {
|
||||
// This validation is only necessary when
|
||||
// a user runs `vercel build` locally.
|
||||
const globFiles = await buildUtilsGlob('**', { cwd });
|
||||
const zeroConfig = await detectBuilders(Object.keys(globFiles), pkg);
|
||||
const { reason } = await detectFileSystemAPI({
|
||||
files: globFiles,
|
||||
projectSettings: project.settings,
|
||||
builders: zeroConfig.builders || [],
|
||||
pkg,
|
||||
vercelConfig,
|
||||
tag: '',
|
||||
enableFlag: true,
|
||||
});
|
||||
|
||||
if (reason) {
|
||||
client.output.error(`${cmd(`${getPkgName()} build`)} failed: ${reason}`);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
const framework = findFramework(project.settings.framework);
|
||||
// If this is undefined, we bail. If it is null, then findFramework should return "Other",
|
||||
// so this should really never happen, but just in case....
|
||||
if (framework === undefined) {
|
||||
client.output.error(
|
||||
`Framework detection failed or is malformed. Please run ${getCommandName(
|
||||
'pull'
|
||||
)} again.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const buildState = { ...project.settings };
|
||||
const formatSetting = (
|
||||
name: string,
|
||||
override: string | null | undefined,
|
||||
defaults: typeof framework.settings.outputDirectory
|
||||
) =>
|
||||
` - ${chalk.bold(`${name}:`)} ${`${
|
||||
override
|
||||
? override + ` (override)`
|
||||
: 'placeholder' in defaults
|
||||
? chalk.italic(`${defaults.placeholder}`)
|
||||
: defaults.value
|
||||
}`}`;
|
||||
console.log(`Retrieved Project Settings:`);
|
||||
console.log(
|
||||
chalk.dim(` - ${chalk.bold(`Framework Preset:`)} ${framework.name}`)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Build Command',
|
||||
project.settings.buildCommand,
|
||||
framework.settings.buildCommand
|
||||
)
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Output Directory',
|
||||
project.settings.outputDirectory,
|
||||
framework.settings.outputDirectory
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
buildState.outputDirectory =
|
||||
project.settings.outputDirectory ||
|
||||
(isSettingValue(framework.settings.outputDirectory)
|
||||
? framework.settings.outputDirectory.value
|
||||
: null);
|
||||
buildState.rootDirectory = project.settings.rootDirectory;
|
||||
|
||||
if (loadedEnvFiles.length > 0) {
|
||||
console.log(
|
||||
`Loaded Environment Variables from ${loadedEnvFiles.length} ${pluralize(
|
||||
'file',
|
||||
loadedEnvFiles.length
|
||||
)}:`
|
||||
);
|
||||
for (let envFile of loadedEnvFiles) {
|
||||
console.log(chalk.dim(` - ${envFile.path}`));
|
||||
}
|
||||
}
|
||||
|
||||
// Load plugins
|
||||
const debug = argv['--debug'];
|
||||
let plugins;
|
||||
try {
|
||||
plugins = await loadCliPlugins(cwd, client.output);
|
||||
} catch (error) {
|
||||
client.output.error('Failed to load CLI Plugins');
|
||||
handleError(error, { debug });
|
||||
return 1;
|
||||
}
|
||||
|
||||
const origLog = console.log;
|
||||
const origErr = console.error;
|
||||
const prefixedLog = (
|
||||
prefix: string,
|
||||
args: any[],
|
||||
logger: (...args: any[]) => void
|
||||
) => {
|
||||
if (typeof args[0] === 'string') {
|
||||
args[0] = `${prefix} ${args[0]}`;
|
||||
} else {
|
||||
args.unshift(prefix);
|
||||
}
|
||||
return logger(...args);
|
||||
};
|
||||
|
||||
if (plugins?.pluginCount && plugins?.pluginCount > 0) {
|
||||
console.log(
|
||||
`Loaded ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
)}`
|
||||
);
|
||||
// preBuild Plugins
|
||||
if (plugins.preBuildPlugins.length > 0) {
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
)} before Build Command:`
|
||||
);
|
||||
for (let item of plugins.preBuildPlugins) {
|
||||
const { name, plugin, color } = item;
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
const pluginStamp = stamp();
|
||||
const fullName = name + '.preBuild';
|
||||
const prefix = chalk.gray(' > ') + color(fullName + ':');
|
||||
client.output.debug(`Running ${fullName}:`);
|
||||
try {
|
||||
console.log = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origLog);
|
||||
console.error = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origErr);
|
||||
await plugin.preBuild();
|
||||
client.output.debug(
|
||||
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
|
||||
);
|
||||
} catch (error) {
|
||||
client.output.error(`${prefix} failed`);
|
||||
handleError(error, { debug });
|
||||
return 1;
|
||||
} finally {
|
||||
console.log = origLog;
|
||||
console.error = origErr;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean the output directory
|
||||
fs.removeSync(join(cwd, OUTPUT_DIR));
|
||||
|
||||
if (framework && process.env.VERCEL_URL && 'envPrefix' in framework) {
|
||||
for (const key of Object.keys(process.env)) {
|
||||
if (key.startsWith('VERCEL_')) {
|
||||
const newKey = `${framework.envPrefix}${key}`;
|
||||
// Set `process.env` and `spawnOpts.env` to make sure the variables are
|
||||
// available to the `build` step and the CLI Plugins.
|
||||
process.env[newKey] = process.env[newKey] || process.env[key];
|
||||
spawnOpts.env[newKey] = process.env[newKey];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Required for Next.js to produce the correct `.nft.json` files.
|
||||
spawnOpts.env.NEXT_PRIVATE_OUTPUT_TRACE_ROOT = baseDir;
|
||||
|
||||
// Yarn v2 PnP mode may be activated, so force
|
||||
// "node-modules" linker style
|
||||
const env = {
|
||||
YARN_NODE_LINKER: 'node-modules',
|
||||
...spawnOpts.env,
|
||||
};
|
||||
|
||||
if (typeof buildState.buildCommand === 'string') {
|
||||
console.log(`Running Build Command: ${cmd(buildState.buildCommand)}`);
|
||||
await execCommand(buildState.buildCommand, {
|
||||
...spawnOpts,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
} else if (fs.existsSync(join(cwd, 'package.json'))) {
|
||||
await runPackageJsonScript(
|
||||
client,
|
||||
cwd,
|
||||
['vercel-build', 'now-build', 'build'],
|
||||
spawnOpts
|
||||
);
|
||||
} else if (typeof framework.settings.buildCommand.value === 'string') {
|
||||
console.log(
|
||||
`Running Build Command: ${cmd(framework.settings.buildCommand.value)}`
|
||||
);
|
||||
await execCommand(framework.settings.buildCommand.value, {
|
||||
...spawnOpts,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
}
|
||||
|
||||
if (!fs.existsSync(join(cwd, OUTPUT_DIR))) {
|
||||
let dotNextDir: string | null = null;
|
||||
|
||||
// If a custom `outputDirectory` was set, we'll need to verify
|
||||
// if it's `.next` output, or just static output.
|
||||
const userOutputDirectory = project.settings.outputDirectory;
|
||||
|
||||
if (typeof userOutputDirectory === 'string') {
|
||||
if (fs.existsSync(join(cwd, userOutputDirectory, 'BUILD_ID'))) {
|
||||
dotNextDir = join(cwd, userOutputDirectory);
|
||||
client.output.debug(
|
||||
`Consider ${param(userOutputDirectory)} as ${param('.next')} output.`
|
||||
);
|
||||
}
|
||||
} else if (fs.existsSync(join(cwd, '.next'))) {
|
||||
dotNextDir = join(cwd, '.next');
|
||||
client.output.debug(`Found ${param('.next')} directory.`);
|
||||
}
|
||||
|
||||
// We cannot rely on the `framework` alone, as it might be a static export,
|
||||
// and the current build might use a different project that's not in the settings.
|
||||
const isNextOutput = Boolean(dotNextDir);
|
||||
const nextExport = await getNextExportStatus(dotNextDir);
|
||||
const outputDir =
|
||||
isNextOutput && !nextExport ? OUTPUT_DIR : join(OUTPUT_DIR, 'static');
|
||||
const getDistDir = framework.getFsOutputDir || framework.getOutputDirName;
|
||||
const distDir =
|
||||
(nextExport?.exportDetail.outDirectory
|
||||
? relative(cwd, nextExport.exportDetail.outDirectory)
|
||||
: false) ||
|
||||
dotNextDir ||
|
||||
userOutputDirectory ||
|
||||
(await getDistDir(cwd));
|
||||
|
||||
await fs.ensureDir(join(cwd, outputDir));
|
||||
|
||||
const copyStamp = stamp();
|
||||
client.output.spinner(
|
||||
`Copying files from ${param(distDir)} to ${param(outputDir)}`
|
||||
);
|
||||
const files = await glob(join(relative(cwd, distDir), '**'), {
|
||||
ignore: [
|
||||
'node_modules/**',
|
||||
'.vercel/**',
|
||||
'.env',
|
||||
'.env.*',
|
||||
'.*ignore',
|
||||
'_middleware.ts',
|
||||
'_middleware.mts',
|
||||
'_middleware.cts',
|
||||
'_middleware.mjs',
|
||||
'_middleware.cjs',
|
||||
'_middleware.js',
|
||||
'api/**',
|
||||
'.git/**',
|
||||
'.next/cache/**',
|
||||
],
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
await Promise.all(
|
||||
files.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
distDir === '.'
|
||||
? join(cwd, outputDir, relative(cwd, f))
|
||||
: f.replace(distDir, outputDir)
|
||||
)
|
||||
)
|
||||
);
|
||||
client.output.stopSpinner();
|
||||
console.log(
|
||||
`Copied ${files.length.toLocaleString()} files from ${param(
|
||||
distDir
|
||||
)} to ${param(outputDir)} ${copyStamp()}`
|
||||
);
|
||||
|
||||
const buildManifestPath = join(cwd, OUTPUT_DIR, 'build-manifest.json');
|
||||
const routesManifestPath = join(cwd, OUTPUT_DIR, 'routes-manifest.json');
|
||||
|
||||
if (!fs.existsSync(buildManifestPath)) {
|
||||
client.output.debug(
|
||||
`Generating build manifest: ${param(buildManifestPath)}`
|
||||
);
|
||||
const buildManifest = {
|
||||
version: 1,
|
||||
cache: framework.cachePattern ? [framework.cachePattern] : [],
|
||||
};
|
||||
await fs.writeJSON(buildManifestPath, buildManifest, { spaces: 2 });
|
||||
}
|
||||
|
||||
if (!fs.existsSync(routesManifestPath)) {
|
||||
client.output.debug(
|
||||
`Generating routes manifest: ${param(routesManifestPath)}`
|
||||
);
|
||||
const routesManifest = {
|
||||
version: 3,
|
||||
pages404: true,
|
||||
basePath: '',
|
||||
redirects: framework.defaultRedirects ?? [],
|
||||
headers: framework.defaultHeaders ?? [],
|
||||
dynamicRoutes: [],
|
||||
dataRoutes: [],
|
||||
rewrites: framework.defaultRewrites ?? [],
|
||||
};
|
||||
await fs.writeJSON(
|
||||
join(cwd, OUTPUT_DIR, 'routes-manifest.json'),
|
||||
routesManifest,
|
||||
{ spaces: 2 }
|
||||
);
|
||||
}
|
||||
|
||||
// Special Next.js processing.
|
||||
if (nextExport) {
|
||||
client.output.debug('Found `next export` output.');
|
||||
|
||||
const htmlFiles = await buildUtilsGlob(
|
||||
'**/*.html',
|
||||
join(cwd, OUTPUT_DIR, 'static')
|
||||
);
|
||||
|
||||
if (nextExport.exportDetail.success !== true) {
|
||||
client.output.error(
|
||||
`Export of Next.js app failed. Please check your build logs.`
|
||||
);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'server', 'pages'));
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'static'));
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(htmlFiles).map(async fileName => {
|
||||
await sema.acquire();
|
||||
|
||||
const input = join(cwd, OUTPUT_DIR, 'static', fileName);
|
||||
const target = join(cwd, OUTPUT_DIR, 'server', 'pages', fileName);
|
||||
|
||||
await fs.mkdirp(dirname(target));
|
||||
|
||||
await fs.promises.rename(input, target).finally(() => {
|
||||
sema.release();
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
for (const file of [
|
||||
'BUILD_ID',
|
||||
'images-manifest.json',
|
||||
'routes-manifest.json',
|
||||
'build-manifest.json',
|
||||
]) {
|
||||
const input = join(nextExport.dotNextDir, file);
|
||||
|
||||
if (fs.existsSync(input)) {
|
||||
// Do not use `smartCopy`, since we want to overwrite if they already exist.
|
||||
await fs.copyFile(input, join(OUTPUT_DIR, file));
|
||||
}
|
||||
}
|
||||
} else if (isNextOutput) {
|
||||
// The contents of `.output/static` should be placed inside of `.output/static/_next/static`
|
||||
const tempStatic = '___static';
|
||||
await fs.rename(
|
||||
join(cwd, OUTPUT_DIR, 'static'),
|
||||
join(cwd, OUTPUT_DIR, tempStatic)
|
||||
);
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'static', '_next', 'static'));
|
||||
await fs.rename(
|
||||
join(cwd, OUTPUT_DIR, tempStatic),
|
||||
join(cwd, OUTPUT_DIR, 'static', '_next', 'static')
|
||||
);
|
||||
|
||||
// Next.js might reference files from the `static` directory in `middleware-manifest.json`.
|
||||
// Since we move all files from `static` to `static/_next/static`, we'll need to change
|
||||
// those references as well and update the manifest file.
|
||||
const middlewareManifest = join(
|
||||
cwd,
|
||||
OUTPUT_DIR,
|
||||
'server',
|
||||
'middleware-manifest.json'
|
||||
);
|
||||
if (fs.existsSync(middlewareManifest)) {
|
||||
const manifest = await fs.readJSON(middlewareManifest);
|
||||
Object.keys(manifest.middleware).forEach(key => {
|
||||
const files = manifest.middleware[key].files.map((f: string) => {
|
||||
if (f.startsWith('static/')) {
|
||||
const next = f.replace(/^static\//gm, 'static/_next/static/');
|
||||
client.output.debug(
|
||||
`Replacing file in \`middleware-manifest.json\`: ${f} => ${next}`
|
||||
);
|
||||
return next;
|
||||
}
|
||||
|
||||
return f;
|
||||
});
|
||||
|
||||
manifest.middleware[key].files = files;
|
||||
});
|
||||
|
||||
await fs.writeJSON(middlewareManifest, manifest);
|
||||
}
|
||||
|
||||
// We want to pick up directories for user-provided static files into `.`output/static`.
|
||||
// More specifically, the static directory contents would then be mounted to `output/static/static`,
|
||||
// and the public directory contents would be mounted to `output/static`. Old Next.js versions
|
||||
// allow `static`, and newer ones allow both, but since there's nobody that actually uses both,
|
||||
// we can check for the existence of both and pick the first match that we find (first
|
||||
// `public`, then`static`). We can't read both at the same time because that would mean we'd
|
||||
// read public for old Next.js versions that don't support it, which might be breaking (and
|
||||
// we don't want to make vercel build specific framework versions).
|
||||
const nextSrcDirectory = dirname(distDir);
|
||||
|
||||
const publicFiles = await glob('public/**', {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd: nextSrcDirectory,
|
||||
absolute: true,
|
||||
});
|
||||
if (publicFiles.length > 0) {
|
||||
await Promise.all(
|
||||
publicFiles.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
join(
|
||||
OUTPUT_DIR,
|
||||
'static',
|
||||
relative(join(dirname(distDir), 'public'), f)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
const staticFiles = await glob('static/**', {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd: nextSrcDirectory,
|
||||
absolute: true,
|
||||
});
|
||||
await Promise.all(
|
||||
staticFiles.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
join(
|
||||
OUTPUT_DIR,
|
||||
'static',
|
||||
'static',
|
||||
relative(join(dirname(distDir), 'static'), f)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Regardless of the Next.js version, we make sure that it is compatible with
|
||||
// the Filesystem API. We get there by moving all the files needed
|
||||
// into the outputs directory `inputs` folder. Next.js is > 12, we can
|
||||
// read the .nft.json files directly. If there aren't .nft.json files
|
||||
// we trace and create them. We then resolve the files in each nft file list
|
||||
// and move them into the "inputs" directory. We rename them with hashes to
|
||||
// prevent collisions and then update the related .nft files accordingly
|
||||
// to point to the newly named input files. Again, all of this is so that Next.js
|
||||
// works with the Filesystem API (and so .output contains all inputs
|
||||
// needed to run Next.js) and `vc --prebuilt`.
|
||||
const nftFiles = await glob(join(OUTPUT_DIR, '**', '*.nft.json'), {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
ignore: ['cache/**'],
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
// If there are no .nft.json files, we know that Next.js < 12. We then
|
||||
// execute the tracing on our own.
|
||||
if (nftFiles.length === 0) {
|
||||
const serverFiles = await glob(
|
||||
join(OUTPUT_DIR, 'server', 'pages', '**', '*.js'),
|
||||
{
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
ignore: ['webpack-runtime.js'],
|
||||
absolute: true,
|
||||
}
|
||||
);
|
||||
for (let f of serverFiles) {
|
||||
const { ext, dir } = parse(f);
|
||||
const { fileList } = await nodeFileTrace([f], {
|
||||
ignore: [
|
||||
relative(cwd, f),
|
||||
'node_modules/next/dist/pages/**/*',
|
||||
'node_modules/next/dist/compiled/webpack/(bundle4|bundle5).js',
|
||||
'node_modules/react/**/*.development.js',
|
||||
'node_modules/react-dom/**/*.development.js',
|
||||
'node_modules/use-subscription/**/*.development.js',
|
||||
'node_modules/sharp/**/*',
|
||||
],
|
||||
});
|
||||
fileList.delete(relative(cwd, f));
|
||||
|
||||
const nftFileName = f.replace(ext, '.js.nft.json');
|
||||
client.output.debug(`Creating ${nftFileName}`);
|
||||
|
||||
await fs.writeJSON(nftFileName, {
|
||||
version: 2,
|
||||
files: Array.from(fileList).map(fileListEntry =>
|
||||
relative(dir, fileListEntry)
|
||||
),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const requiredServerFilesPath = join(
|
||||
OUTPUT_DIR,
|
||||
'required-server-files.json'
|
||||
);
|
||||
|
||||
if (fs.existsSync(requiredServerFilesPath)) {
|
||||
client.output.debug(`Resolve ${param('required-server-files.json')}.`);
|
||||
|
||||
const requiredServerFilesJson = await fs.readJSON(
|
||||
requiredServerFilesPath
|
||||
);
|
||||
|
||||
await fs.writeJSON(requiredServerFilesPath, {
|
||||
...requiredServerFilesJson,
|
||||
appDir: '.',
|
||||
files: requiredServerFilesJson.files.map((i: string) => {
|
||||
const originalPath = join(requiredServerFilesJson.appDir, i);
|
||||
const relPath = join(OUTPUT_DIR, relative(distDir, originalPath));
|
||||
|
||||
return relPath;
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build Plugins
|
||||
if (plugins?.buildPlugins && plugins.buildPlugins.length > 0) {
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
)} after Build Command:`
|
||||
);
|
||||
let vercelConfig: VercelConfig = {};
|
||||
try {
|
||||
vercelConfig = await fs.readJSON(join(cwd, 'vercel.json'));
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw new Error(`Failed to read vercel.json: ${error.message}`);
|
||||
}
|
||||
}
|
||||
for (let item of plugins.buildPlugins) {
|
||||
const { name, plugin, color } = item;
|
||||
if (typeof plugin.build === 'function') {
|
||||
const pluginStamp = stamp();
|
||||
const fullName = name + '.build';
|
||||
const prefix = chalk.gray(' > ') + color(fullName + ':');
|
||||
client.output.debug(`Running ${fullName}:`);
|
||||
try {
|
||||
console.log = (...args: any[]) => prefixedLog(prefix, args, origLog);
|
||||
console.error = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origErr);
|
||||
await plugin.build({
|
||||
vercelConfig,
|
||||
workPath: cwd,
|
||||
});
|
||||
client.output.debug(
|
||||
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
|
||||
);
|
||||
} catch (error) {
|
||||
client.output.error(`${prefix} failed`);
|
||||
handleError(error, { debug });
|
||||
return 1;
|
||||
} finally {
|
||||
console.log = origLog;
|
||||
console.error = origLog;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`${prependEmoji(
|
||||
`Build Completed in ${chalk.bold(OUTPUT_DIR)} ${chalk.gray(
|
||||
buildStamp()
|
||||
)}`,
|
||||
emoji('success')
|
||||
)}`
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
export async function runPackageJsonScript(
|
||||
client: Client,
|
||||
destPath: string,
|
||||
scriptNames: string | Iterable<string>,
|
||||
spawnOpts?: SpawnOptions
|
||||
) {
|
||||
assert(isAbsolute(destPath));
|
||||
|
||||
const { packageJson, cliType, lockfileVersion } = await scanParentDirs(
|
||||
destPath,
|
||||
true
|
||||
);
|
||||
const scriptName = getScriptName(
|
||||
packageJson,
|
||||
typeof scriptNames === 'string' ? [scriptNames] : scriptNames
|
||||
);
|
||||
if (!scriptName) return false;
|
||||
|
||||
client.output.debug('Running user script...');
|
||||
const runScriptTime = Date.now();
|
||||
|
||||
const opts: any = { cwd: destPath, ...spawnOpts };
|
||||
const env = (opts.env = { ...process.env, ...opts.env });
|
||||
|
||||
if (cliType === 'npm') {
|
||||
opts.prettyCommand = `npm run ${scriptName}`;
|
||||
|
||||
if (typeof lockfileVersion === 'number' && lockfileVersion >= 2) {
|
||||
// Ensure that npm 7 is at the beginning of the `$PATH`
|
||||
env.PATH = `/node16/bin-npm7:${env.PATH}`;
|
||||
}
|
||||
} else {
|
||||
opts.prettyCommand = `yarn run ${scriptName}`;
|
||||
|
||||
// Yarn v2 PnP mode may be activated, so force "node-modules" linker style
|
||||
if (!env.YARN_NODE_LINKER) {
|
||||
env.YARN_NODE_LINKER = 'node-modules';
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Running Build Command: ${cmd(opts.prettyCommand)}\n`);
|
||||
await spawnAsync(cliType, ['run', scriptName], opts);
|
||||
console.log(); // give it some room
|
||||
client.output.debug(`Script complete [${Date.now() - runScriptTime}ms]`);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
if (
|
||||
newPath.endsWith('.nft.json') ||
|
||||
newPath.endsWith('middleware-manifest.json') ||
|
||||
newPath.endsWith('required-server-files.json')
|
||||
) {
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} else {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
}
|
||||
} catch (err: any) {
|
||||
// eslint-disable-line
|
||||
// If a symlink to the same file already exists
|
||||
// then trying to copy it will make an empty file from it.
|
||||
if (err['code'] === 'EEXIST') return;
|
||||
// In some VERY rare cases (1 in a thousand), symlink creation fails on Windows.
|
||||
// In that case, we just fall back to copying.
|
||||
// This issue is reproducible with "pnpm add @material-ui/icons@4.9.1"
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function smartCopy(client: Client, from: string, to: string) {
|
||||
sema.acquire();
|
||||
try {
|
||||
client.output.debug(`Copying from ${from} to ${to}`);
|
||||
await linkOrCopy(from, to);
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
}
|
||||
|
||||
async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
ogGlob(pattern, options, (err, files) => {
|
||||
err ? reject(err) : resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Files will only exist when `next export` was used.
|
||||
*/
|
||||
async function getNextExportStatus(dotNextDir: string | null) {
|
||||
if (!dotNextDir) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const exportDetail: {
|
||||
success: boolean;
|
||||
outDirectory: string;
|
||||
} | null = await fs
|
||||
.readJson(join(dotNextDir, 'export-detail.json'))
|
||||
.catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
|
||||
if (!exportDetail) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const exportMarker: {
|
||||
version: 1;
|
||||
exportTrailingSlash: boolean;
|
||||
hasExportPathMap: boolean;
|
||||
} | null = await fs
|
||||
.readJSON(join(dotNextDir, 'export-marker.json'))
|
||||
.catch(error => {
|
||||
if (error.code === 'ENOENT') {
|
||||
return null;
|
||||
}
|
||||
|
||||
throw error;
|
||||
});
|
||||
|
||||
return {
|
||||
dotNextDir,
|
||||
exportDetail,
|
||||
exportMarker: {
|
||||
trailingSlash: exportMarker?.hasExportPathMap
|
||||
? exportMarker.exportTrailingSlash
|
||||
: false,
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -3,7 +3,6 @@ export default new Map([
|
||||
['aliases', 'alias'],
|
||||
['billing', 'billing'],
|
||||
['bisect', 'bisect'],
|
||||
['build', 'build'],
|
||||
['cc', 'billing'],
|
||||
['cert', 'certs'],
|
||||
['certs', 'certs'],
|
||||
|
||||
@@ -161,7 +161,8 @@ const main = async () => {
|
||||
// * a subcommand (as in: `vercel ls`)
|
||||
const targetOrSubcommand = argv._[2];
|
||||
|
||||
if (targetOrSubcommand === 'build') {
|
||||
const betaCommands: string[] = [];
|
||||
if (betaCommands.includes(targetOrSubcommand)) {
|
||||
console.log(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${
|
||||
@@ -292,14 +293,7 @@ const main = async () => {
|
||||
|
||||
let authConfig = null;
|
||||
|
||||
const subcommandsWithoutToken = [
|
||||
'login',
|
||||
'logout',
|
||||
'help',
|
||||
'init',
|
||||
'update',
|
||||
'build',
|
||||
];
|
||||
const subcommandsWithoutToken = ['login', 'logout', 'help', 'init', 'update'];
|
||||
|
||||
if (authConfigExists) {
|
||||
try {
|
||||
@@ -406,33 +400,20 @@ const main = async () => {
|
||||
} else if (commands.has(singular)) {
|
||||
alternative = singular;
|
||||
}
|
||||
if (targetOrSubcommand === 'build') {
|
||||
output.note(
|
||||
`If you wish to deploy the ${fileType} ${param(
|
||||
targetOrSubcommand
|
||||
)}, run ${getCommandName('deploy build')}.` +
|
||||
console.error(
|
||||
error(
|
||||
`The supplied argument ${param(targetOrSubcommand)} is ambiguous.` +
|
||||
`\nIf you wish to deploy the ${fileType} ${param(
|
||||
targetOrSubcommand
|
||||
)}, first run "cd ${targetOrSubcommand}". ` +
|
||||
(alternative
|
||||
? `\nIf you wish to use the subcommand ${param(
|
||||
targetOrSubcommand
|
||||
)}, use ${param(alternative)} instead.`
|
||||
: '')
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
error(
|
||||
`The supplied argument ${param(targetOrSubcommand)} is ambiguous.` +
|
||||
`\nIf you wish to deploy the ${fileType} ${param(
|
||||
targetOrSubcommand
|
||||
)}, first run "cd ${targetOrSubcommand}". ` +
|
||||
(alternative
|
||||
? `\nIf you wish to use the subcommand ${param(
|
||||
targetOrSubcommand
|
||||
)}, use ${param(alternative)} instead.`
|
||||
: '')
|
||||
)
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
)
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (subcommandExists) {
|
||||
@@ -630,9 +611,6 @@ const main = async () => {
|
||||
case 'bisect':
|
||||
func = await import('./commands/bisect');
|
||||
break;
|
||||
case 'build':
|
||||
func = await import('./commands/build');
|
||||
break;
|
||||
case 'certs':
|
||||
func = await import('./commands/certs');
|
||||
break;
|
||||
|
||||
@@ -40,7 +40,6 @@ import {
|
||||
detectApiExtensions,
|
||||
spawnCommand,
|
||||
isOfficialRuntime,
|
||||
detectFileSystemAPI,
|
||||
} from '@vercel/build-utils';
|
||||
import frameworkList from '@vercel/frameworks';
|
||||
|
||||
@@ -90,7 +89,6 @@ import {
|
||||
} from './types';
|
||||
import { ProjectEnvVariable, ProjectSettings } from '../../types';
|
||||
import exposeSystemEnvs from './expose-system-envs';
|
||||
import { loadCliPlugins } from '../plugins';
|
||||
|
||||
const frontendRuntimeSet = new Set(
|
||||
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
|
||||
@@ -600,32 +598,6 @@ export default class DevServer {
|
||||
);
|
||||
}
|
||||
|
||||
const { reason, metadata } = await detectFileSystemAPI({
|
||||
files,
|
||||
builders: builders || [],
|
||||
projectSettings: projectSettings || this.projectSettings || {},
|
||||
vercelConfig,
|
||||
pkg,
|
||||
tag: '',
|
||||
enableFlag: true,
|
||||
});
|
||||
|
||||
if (reason) {
|
||||
if (metadata.hasMiddleware) {
|
||||
this.output.error(
|
||||
`Detected middleware usage which requires the latest API. ${reason}`
|
||||
);
|
||||
await this.exit();
|
||||
} else if (metadata.plugins.length > 0) {
|
||||
this.output.error(
|
||||
`Detected CLI plugins which requires the latest API. ${reason}`
|
||||
);
|
||||
await this.exit();
|
||||
} else {
|
||||
this.output.warn(`Unable to use latest API. ${reason}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (builders) {
|
||||
if (this.devCommand) {
|
||||
builders = builders.filter(filterFrontendBuilds);
|
||||
@@ -1377,6 +1349,7 @@ export default class DevServer {
|
||||
return false;
|
||||
};
|
||||
|
||||
/*
|
||||
runDevMiddleware = async (
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse
|
||||
@@ -1400,6 +1373,7 @@ export default class DevServer {
|
||||
};
|
||||
}
|
||||
};
|
||||
*/
|
||||
|
||||
/**
|
||||
* Serve project directory as a v2 deployment.
|
||||
@@ -1468,6 +1442,7 @@ export default class DevServer {
|
||||
let prevUrl = req.url;
|
||||
let prevHeaders: HttpHeadersConfig = {};
|
||||
|
||||
/*
|
||||
const middlewareResult = await this.runDevMiddleware(req, res);
|
||||
|
||||
if (middlewareResult) {
|
||||
@@ -1497,6 +1472,7 @@ export default class DevServer {
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
for (const phase of phases) {
|
||||
statusCode = undefined;
|
||||
|
||||
@@ -1,76 +0,0 @@
|
||||
import code from '../util/output/code';
|
||||
import { getColorForPkgName } from '../util/output/color-name-cache';
|
||||
import cliPkgJson from '../util/pkg';
|
||||
import { scanParentDirs } from '@vercel/build-utils';
|
||||
import { Output } from './output';
|
||||
|
||||
const VERCEL_PLUGIN_PREFIX = 'vercel-plugin-';
|
||||
|
||||
export async function loadCliPlugins(cwd: string, output: Output) {
|
||||
const { packageJson } = await scanParentDirs(cwd, true);
|
||||
|
||||
let pluginCount = 0;
|
||||
const preBuildPlugins = [];
|
||||
const buildPlugins = [];
|
||||
const devServerPlugins = [];
|
||||
const devMiddlewarePlugins = [];
|
||||
const deps = new Set(
|
||||
[
|
||||
...Object.keys(packageJson?.dependencies || {}),
|
||||
...Object.keys(packageJson?.devDependencies || {}),
|
||||
...Object.keys(cliPkgJson.dependencies),
|
||||
].filter(dep => dep.startsWith(VERCEL_PLUGIN_PREFIX))
|
||||
);
|
||||
|
||||
for (let dep of deps) {
|
||||
pluginCount++;
|
||||
const resolved = require.resolve(dep, {
|
||||
paths: [cwd, process.cwd(), __dirname],
|
||||
});
|
||||
let plugin;
|
||||
try {
|
||||
plugin = require(resolved);
|
||||
|
||||
const color = getColorForPkgName(dep);
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
preBuildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.build === 'function') {
|
||||
buildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.startDevServer === 'function') {
|
||||
devServerPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.runDevMiddleware === 'function') {
|
||||
devMiddlewarePlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
output.error(`Failed to import ${code(dep)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
pluginCount,
|
||||
preBuildPlugins,
|
||||
buildPlugins,
|
||||
devServerPlugins,
|
||||
devMiddlewarePlugins,
|
||||
};
|
||||
}
|
||||
@@ -23,7 +23,6 @@ const readFile = promisify(fs.readFile);
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
export const VERCEL_DIR = '.vercel';
|
||||
export const VERCEL_OUTPUT_DIR = '.output';
|
||||
export const VERCEL_DIR_FALLBACK = '.now';
|
||||
export const VERCEL_DIR_README = 'README.txt';
|
||||
export const VERCEL_DIR_PROJECT = 'project.json';
|
||||
@@ -256,13 +255,6 @@ export async function linkFolderToProject(
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (!gitIgnore.split(EOL).includes(VERCEL_OUTPUT_DIR)) {
|
||||
gitIgnore += `${
|
||||
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
|
||||
}${VERCEL_OUTPUT_DIR}${EOL}`;
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (contentModified) {
|
||||
await writeFile(gitIgnorePath, gitIgnore);
|
||||
isGitIgnoreUpdated = true;
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
.next
|
||||
yarn.lock
|
||||
.vercel
|
||||
.output
|
||||
.vercel
|
||||
@@ -367,7 +367,7 @@ module.exports = async function prepare(session, binaryPath) {
|
||||
},
|
||||
'project-link-gitignore': {
|
||||
'package.json': '{}',
|
||||
'.gitignore': '.output',
|
||||
'.gitignore': '',
|
||||
},
|
||||
'project-link-legacy': {
|
||||
'index.html': 'Hello',
|
||||
|
||||
68
packages/cli/test/integration.js
vendored
68
packages/cli/test/integration.js
vendored
@@ -2271,62 +2271,6 @@ test('[vercel dev] fails when development commad calls vercel dev recursively',
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel build] fails when build commad calls vercel build recursively', async t => {
|
||||
const dir = fixture('build-fail-on-recursion-command');
|
||||
const projectName = `build-fail-on-recursion-command-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const build = execa(binaryPath, ['build', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await waitForPrompt(build, chunk =>
|
||||
chunk.includes('No Project Settings found locally')
|
||||
);
|
||||
build.stdin.write('yes\n');
|
||||
|
||||
await setupProject(build, projectName, {
|
||||
buildCommand: `${binaryPath} build`,
|
||||
});
|
||||
|
||||
const { exitCode, stderr } = await build;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('[vercel build] fails when build script calls vercel build recursively', async t => {
|
||||
const dir = fixture('build-fail-on-recursion-script');
|
||||
const projectName = `build-fail-on-recursion-script-${
|
||||
Math.random().toString(36).split('.')[1]
|
||||
}`;
|
||||
|
||||
const build = execa(binaryPath, ['build', ...defaultArgs], {
|
||||
cwd: dir,
|
||||
reject: false,
|
||||
});
|
||||
|
||||
await waitForPrompt(build, chunk =>
|
||||
chunk.includes('No Project Settings found locally')
|
||||
);
|
||||
build.stdin.write('yes\n');
|
||||
|
||||
await setupProject(build, projectName);
|
||||
|
||||
const { exitCode, stderr } = await build;
|
||||
|
||||
t.is(exitCode, 1);
|
||||
t.true(
|
||||
stderr.includes('must not recursively invoke itself'),
|
||||
`Received instead: "${stderr}"`
|
||||
);
|
||||
});
|
||||
|
||||
test('`vercel rm` removes a deployment', async t => {
|
||||
const directory = fixture('static-deployment');
|
||||
|
||||
@@ -2784,7 +2728,7 @@ test('should show prompts to set up project during first deploy', async t => {
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
t.is(gitignore, '.vercel\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3353,7 +3297,7 @@ test('[vc link] should show prompts to set up project', async t => {
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
t.is(gitignore, '.vercel\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3388,7 +3332,7 @@ test('[vc link --confirm] should not show prompts and autolink', async t => {
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
t.is(gitignore, '.vercel\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3423,7 +3367,7 @@ test('[vc link] should not duplicate paths in .gitignore', async t => {
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.output\n.vercel\n');
|
||||
t.is(gitignore, '.vercel\n');
|
||||
});
|
||||
|
||||
test('[vc dev] should show prompts to set up project', async t => {
|
||||
@@ -3447,7 +3391,7 @@ test('[vc dev] should show prompts to set up project', async t => {
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
t.is(gitignore, '.vercel\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
@@ -3514,7 +3458,7 @@ test('[vc link] should show project prompts but not framework when `builds` defi
|
||||
|
||||
// Ensure .gitignore is created
|
||||
const gitignore = await readFile(path.join(dir, '.gitignore'), 'utf8');
|
||||
t.is(gitignore, '.vercel\n.output\n');
|
||||
t.is(gitignore, '.vercel\n');
|
||||
|
||||
// Ensure .vercel/project.json and .vercel/README.txt are created
|
||||
t.is(
|
||||
|
||||
@@ -336,6 +336,7 @@ describe('DevServer', () => {
|
||||
})
|
||||
);
|
||||
|
||||
/*
|
||||
it(
|
||||
'should support edge middleware',
|
||||
testFixture('edge-middleware', async server => {
|
||||
@@ -394,4 +395,5 @@ describe('DevServer', () => {
|
||||
expect(body).toStrictEqual('is strict mode? yes');
|
||||
})
|
||||
);
|
||||
*/
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/client",
|
||||
"version": "10.2.3-canary.52",
|
||||
"version": "10.3.1-canary.0",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"homepage": "https://vercel.com",
|
||||
@@ -40,7 +40,7 @@
|
||||
]
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.13.1-canary.1",
|
||||
"@vercel/build-utils": "2.14.1-canary.0",
|
||||
"@zeit/fetch": "5.2.0",
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "3.0.0",
|
||||
|
||||
6
packages/frameworks/logos/sanity.svg
Normal file
6
packages/frameworks/logos/sanity.svg
Normal file
@@ -0,0 +1,6 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 128 128" height="128" width="128">
|
||||
<rect fill="#F03E2F" height="128" width="128"></rect>
|
||||
<path fill="white" d="M39.4229 33.1629C39.4229 44.1614 46.3362 50.7055 60.1767 54.1563L74.8429 57.4971C87.9417 60.453 95.9185 67.7945 95.9185 79.7554C96.0204 84.9662 94.296 90.053 91.0345 94.1634C91.0345 82.23 84.751 75.7822 69.595 71.9052L55.1947 68.6881C43.6633 66.1035 34.7628 60.068 34.7628 47.076C34.7021 42.0589 36.3415 37.1644 39.4229 33.1629Z"></path>
|
||||
<path fill="#F9B1AB" d="M82.0221 76.827C88.2776 80.759 91.0205 86.2583 91.0205 94.1497C85.8426 100.666 76.7462 104.323 66.0545 104.323C48.0576 104.323 35.4626 95.6207 32.6637 80.4978H49.9468C52.172 87.4406 58.0636 90.6577 65.9285 90.6577C75.5287 90.6577 81.9102 85.6258 82.0361 76.7995"></path>
|
||||
<path fill="#F9B1AB" d="M48.4074 49.4682C45.5509 47.8004 43.2073 45.404 41.6255 42.5332C40.0437 39.6624 39.2825 36.4244 39.423 33.1629C44.419 26.7013 53.1095 22.7556 63.7033 22.7556C82.0361 22.7556 92.6439 32.2693 95.2608 45.66H78.6354C76.8021 40.3807 72.212 36.27 63.8433 36.27C54.9008 36.27 48.7992 41.3843 48.4494 49.4682"></path>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.1 KiB |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/frameworks",
|
||||
"version": "0.5.1-canary.21",
|
||||
"version": "0.6.1-canary.0",
|
||||
"main": "./dist/frameworks.js",
|
||||
"types": "./dist/frameworks.d.ts",
|
||||
"files": [
|
||||
@@ -20,7 +20,7 @@
|
||||
"@types/js-yaml": "3.12.1",
|
||||
"@types/node": "12.0.4",
|
||||
"@types/node-fetch": "2.5.8",
|
||||
"@vercel/routing-utils": "1.11.4-canary.6",
|
||||
"@vercel/routing-utils": "1.12.0",
|
||||
"ajv": "6.12.2",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
|
||||
@@ -2006,6 +2006,50 @@ export const frameworks = [
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Sanity',
|
||||
slug: 'sanity',
|
||||
demo: 'https://sanity-studio-template.vercel.app',
|
||||
logo: 'https://raw.githubusercontent.com/vercel/vercel/main/packages/frameworks/logos/sanity.svg',
|
||||
tagline:
|
||||
'The structured content platform.',
|
||||
description: 'A Sanity Studio',
|
||||
website: 'https://www.sanity.io',
|
||||
envPrefix: 'SANITY_STUDIO_',
|
||||
detectors: {
|
||||
every: [
|
||||
{
|
||||
path: 'sanity.json',
|
||||
},
|
||||
],
|
||||
},
|
||||
settings: {
|
||||
installCommand: {
|
||||
placeholder: '`yarn install` or `npm install`',
|
||||
},
|
||||
buildCommand: {
|
||||
placeholder: '`npm run build` or `sanity build`',
|
||||
value: 'sanity build',
|
||||
},
|
||||
devCommand: {
|
||||
value: 'sanity start --port $PORT',
|
||||
},
|
||||
outputDirectory: {
|
||||
value: 'dist',
|
||||
},
|
||||
},
|
||||
dependency: '@sanity/cli',
|
||||
getOutputDirName: async () => 'dist',
|
||||
defaultRoutes: [
|
||||
{
|
||||
handle: 'filesystem',
|
||||
},
|
||||
{
|
||||
src: '/(.*)',
|
||||
dest: '/index.html',
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'Other',
|
||||
slug: null,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/go",
|
||||
"version": "1.2.4-canary.6",
|
||||
"version": "1.3.1-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/go",
|
||||
@@ -24,7 +24,7 @@
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.3.0",
|
||||
"@types/tar": "^4.0.0",
|
||||
"@vercel/build-utils": "2.13.1-canary.1",
|
||||
"@vercel/build-utils": "2.14.1-canary.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"async-retry": "1.3.1",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
entries.js
|
||||
dist
|
||||
2
packages/middleware/.gitignore
vendored
2
packages/middleware/.gitignore
vendored
@@ -1,2 +0,0 @@
|
||||
/dist
|
||||
/test/fixtures/*/.output
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require('fs-extra');
|
||||
const execa = require('execa');
|
||||
const { join } = require('path');
|
||||
|
||||
async function main() {
|
||||
const srcDir = join(__dirname, 'src');
|
||||
const outDir = join(__dirname, 'dist');
|
||||
|
||||
// Start fresh
|
||||
await fs.remove(outDir);
|
||||
|
||||
await execa(
|
||||
'ncc',
|
||||
['build', join(srcDir, 'index.ts'), '-o', outDir, '--external', 'esbuild'],
|
||||
{
|
||||
stdio: 'inherit',
|
||||
}
|
||||
);
|
||||
|
||||
await fs.copyFile(
|
||||
join(__dirname, 'src/entries.js'),
|
||||
join(outDir, 'entries.js')
|
||||
);
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,59 +0,0 @@
|
||||
{
|
||||
"name": "vercel-plugin-middleware",
|
||||
"version": "0.0.0-canary.26",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
"directory": "packages/middleware"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "node build",
|
||||
"test-unit": "jest",
|
||||
"prepublishOnly": "node build"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"dependencies": {
|
||||
"esbuild": "0.13.12"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@peculiar/webcrypto": "1.2.0",
|
||||
"@types/cookie": "0.4.1",
|
||||
"@types/glob": "7.2.0",
|
||||
"@types/http-proxy": "1.17.7",
|
||||
"@types/jest": "27.0.2",
|
||||
"@types/node": "16.11.6",
|
||||
"@types/node-fetch": "^2",
|
||||
"@types/ua-parser-js": "0.7.36",
|
||||
"@types/uuid": "8.3.1",
|
||||
"@vercel/build-utils": "2.13.1-canary.1",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"cookie": "0.4.1",
|
||||
"formdata-node": "4.3.1",
|
||||
"glob": "7.2.0",
|
||||
"http-proxy": "1.18.1",
|
||||
"node-fetch": "^2",
|
||||
"ua-parser-js": "1.0.2",
|
||||
"url": "0.11.0",
|
||||
"uuid": "8.3.2",
|
||||
"web-streams-polyfill": "3.1.1"
|
||||
},
|
||||
"jest": {
|
||||
"preset": "ts-jest",
|
||||
"globals": {
|
||||
"ts-jest": {
|
||||
"diagnostics": false,
|
||||
"isolatedModules": true
|
||||
}
|
||||
},
|
||||
"verbose": false,
|
||||
"testEnvironment": "node",
|
||||
"testMatch": [
|
||||
"<rootDir>/test/**/*.test.ts"
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
import * as middleware from './_temp_middleware';
|
||||
_ENTRIES = typeof _ENTRIES === 'undefined' ? {} : _ENTRIES;
|
||||
_ENTRIES['middleware_pages/_middleware'] = {
|
||||
default: async function (ev) {
|
||||
const result = await middleware.default(ev.request, ev);
|
||||
return {
|
||||
promise: Promise.resolve(),
|
||||
waitUntil: Promise.resolve(),
|
||||
response:
|
||||
result ||
|
||||
new Response(null, {
|
||||
headers: {
|
||||
'x-middleware-next': 1,
|
||||
},
|
||||
}),
|
||||
};
|
||||
},
|
||||
};
|
||||
@@ -1,52 +0,0 @@
|
||||
import path from 'path';
|
||||
import * as esbuild from 'esbuild';
|
||||
|
||||
const processInjectFile = `
|
||||
// envOverride is passed by esbuild plugin
|
||||
const env = envOverride
|
||||
function cwd() {
|
||||
return '/'
|
||||
}
|
||||
function chdir(dir) {
|
||||
throw new Error('process.chdir is not supported')
|
||||
}
|
||||
export const process = {
|
||||
argv: [],
|
||||
env,
|
||||
chdir,
|
||||
cwd,
|
||||
};
|
||||
`;
|
||||
|
||||
export function nodeProcessPolyfillPlugin({ env = {} } = {}): esbuild.Plugin {
|
||||
return {
|
||||
name: 'node-process-polyfill',
|
||||
setup({ initialOptions, onResolve, onLoad }) {
|
||||
onResolve({ filter: /_virtual-process-polyfill_\.js/ }, ({ path }) => {
|
||||
return {
|
||||
path,
|
||||
sideEffects: false,
|
||||
};
|
||||
});
|
||||
|
||||
onLoad({ filter: /_virtual-process-polyfill_\.js/ }, () => {
|
||||
const contents = `const envOverride = ${JSON.stringify(
|
||||
env
|
||||
)};\n${processInjectFile}`;
|
||||
return {
|
||||
loader: 'js',
|
||||
contents,
|
||||
};
|
||||
});
|
||||
|
||||
const polyfills = [
|
||||
path.resolve(__dirname, '_virtual-process-polyfill_.js'),
|
||||
];
|
||||
if (initialOptions.inject) {
|
||||
initialOptions.inject.push(...polyfills);
|
||||
} else {
|
||||
initialOptions.inject = [...polyfills];
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -1,341 +0,0 @@
|
||||
import util from 'util';
|
||||
import { extname, join, basename } from 'path';
|
||||
import * as esbuild from 'esbuild';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { IncomingMessage, ServerResponse } from 'http';
|
||||
import libGlob from 'glob';
|
||||
import Proxy from 'http-proxy';
|
||||
import { _experimental_updateFunctionsManifest } from '@vercel/build-utils';
|
||||
|
||||
import { run } from './websandbox';
|
||||
import type { FetchEventResult } from './websandbox/types';
|
||||
|
||||
import { ParsedUrlQuery, stringify as stringifyQs } from 'querystring';
|
||||
import {
|
||||
format as formatUrl,
|
||||
parse as parseUrl,
|
||||
UrlWithParsedQuery,
|
||||
} from 'url';
|
||||
import { toNodeHeaders } from './websandbox/utils';
|
||||
import { nodeProcessPolyfillPlugin } from './esbuild-plugins';
|
||||
|
||||
const glob = util.promisify(libGlob);
|
||||
const SUPPORTED_EXTENSIONS = ['.js', '.ts'];
|
||||
|
||||
// File name of the `entries.js` file that gets copied into the
|
||||
// project directory. Use a name that is unlikely to conflict.
|
||||
const TMP_ENTRIES_NAME = '.output/inputs/middleware/___vc_entries.js';
|
||||
const TMP_MIDDLEWARE_BUNDLE = '.output/inputs/middleware/_temp_middleware.js';
|
||||
|
||||
async function getMiddlewareFile(workingDirectory: string) {
|
||||
// Only the root-level `_middleware.*` files are considered.
|
||||
// For more granular routing, the Project's Framework (i.e. Next.js)
|
||||
// middleware support should be used.
|
||||
const middlewareFiles = await glob(join(workingDirectory, '_middleware.*'));
|
||||
|
||||
if (middlewareFiles.length === 0) {
|
||||
// No middleware file at the root of the project, so bail...
|
||||
return;
|
||||
}
|
||||
|
||||
if (middlewareFiles.length > 1) {
|
||||
throw new Error(
|
||||
`Only one middleware file is allowed. Found: ${middlewareFiles.join(
|
||||
', '
|
||||
)}`
|
||||
);
|
||||
}
|
||||
|
||||
const ext = extname(middlewareFiles[0]);
|
||||
if (!SUPPORTED_EXTENSIONS.includes(ext)) {
|
||||
throw new Error(`Unsupported file type: ${ext}`);
|
||||
}
|
||||
|
||||
return middlewareFiles[0];
|
||||
}
|
||||
|
||||
export async function build({ workPath }: { workPath: string }) {
|
||||
const entriesPath = join(workPath, TMP_ENTRIES_NAME);
|
||||
const transientFilePath = join(workPath, TMP_MIDDLEWARE_BUNDLE);
|
||||
const middlewareFile = await getMiddlewareFile(workPath);
|
||||
if (!middlewareFile) return;
|
||||
|
||||
console.log('Compiling middleware file: %j', middlewareFile);
|
||||
|
||||
/**
|
||||
* Two builds happen here, because esbuild doesn't offer a way to add a banner
|
||||
* to individual input files, and the entries wrapper relies on running in
|
||||
* non-strict mode to access the ENTRIES global.
|
||||
*
|
||||
* To work around this, we bundle the middleware directly and add
|
||||
* 'use strict'; to make the entire bundle run in strict mode. We then bundle
|
||||
* a second time, adding the global ENTRIES wrapper and preserving the
|
||||
* 'use strict' for the entire scope of the original bundle.
|
||||
*/
|
||||
try {
|
||||
await esbuild.build({
|
||||
entryPoints: [middlewareFile],
|
||||
bundle: true,
|
||||
absWorkingDir: workPath,
|
||||
outfile: transientFilePath,
|
||||
banner: {
|
||||
js: '"use strict";',
|
||||
},
|
||||
plugins: [nodeProcessPolyfillPlugin({ env: process.env })],
|
||||
format: 'cjs',
|
||||
});
|
||||
// Create `_ENTRIES` wrapper
|
||||
await fsp.copyFile(join(__dirname, 'entries.js'), entriesPath);
|
||||
await esbuild.build({
|
||||
entryPoints: [entriesPath],
|
||||
bundle: true,
|
||||
absWorkingDir: workPath,
|
||||
outfile: join(workPath, '.output/server/pages/_middleware.js'),
|
||||
});
|
||||
} finally {
|
||||
await fsp.unlink(transientFilePath);
|
||||
await fsp.unlink(entriesPath);
|
||||
}
|
||||
|
||||
const fileName = basename(middlewareFile);
|
||||
const pages: { [key: string]: any } = {};
|
||||
|
||||
pages[fileName] = {
|
||||
runtime: 'web',
|
||||
env: [],
|
||||
files: ['server/pages/_middleware.js'],
|
||||
name: 'pages/_middleware',
|
||||
page: '/',
|
||||
regexp: '^/.*$',
|
||||
sortingIndex: 1,
|
||||
};
|
||||
|
||||
await _experimental_updateFunctionsManifest({ workPath, pages });
|
||||
}
|
||||
|
||||
const stringifyQuery = (req: IncomingMessage, query: ParsedUrlQuery) => {
|
||||
const initialQueryValues = Object.values((req as any).__NEXT_INIT_QUERY);
|
||||
|
||||
return stringifyQs(query, undefined, undefined, {
|
||||
encodeURIComponent(value: any) {
|
||||
if (initialQueryValues.some(val => val === value)) {
|
||||
return encodeURIComponent(value);
|
||||
}
|
||||
return value;
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
// eslint-disable-next-line
|
||||
async function runMiddlewareCatchAll(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
requestId: string,
|
||||
name: string,
|
||||
path: string
|
||||
) {
|
||||
let result: FetchEventResult | null = null;
|
||||
const parsedUrl = parseUrl(req.url!, true);
|
||||
try {
|
||||
result = await runMiddleware({
|
||||
request: req,
|
||||
response: res,
|
||||
name: name,
|
||||
path,
|
||||
requestId: requestId,
|
||||
parsedUrl,
|
||||
parsed: parseUrl(req.url!, true),
|
||||
});
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
return { finished: true, error: err };
|
||||
}
|
||||
|
||||
if (result === null) {
|
||||
return { finished: true };
|
||||
}
|
||||
|
||||
if (
|
||||
!result.response.headers.has('x-middleware-rewrite') &&
|
||||
!result.response.headers.has('x-middleware-next') &&
|
||||
!result.response.headers.has('Location')
|
||||
) {
|
||||
result.response.headers.set('x-middleware-refresh', '1');
|
||||
}
|
||||
|
||||
result.response.headers.delete('x-middleware-next');
|
||||
|
||||
for (const [key, value] of Object.entries(
|
||||
toNodeHeaders(result.response.headers)
|
||||
)) {
|
||||
if (key !== 'content-encoding' && value !== undefined) {
|
||||
res.setHeader(key, value);
|
||||
}
|
||||
}
|
||||
|
||||
const preflight =
|
||||
req.method === 'HEAD' && req.headers['x-middleware-preflight'];
|
||||
|
||||
if (preflight) {
|
||||
res.writeHead(200);
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
res.statusCode = result.response.status;
|
||||
res.statusMessage = result.response.statusText;
|
||||
|
||||
const location = result.response.headers.get('Location');
|
||||
if (location) {
|
||||
res.statusCode = result.response.status;
|
||||
if (res.statusCode === 308) {
|
||||
res.setHeader('Refresh', `0;url=${location}`);
|
||||
}
|
||||
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
if (result.response.headers.has('x-middleware-rewrite')) {
|
||||
const rewrite = result.response.headers.get('x-middleware-rewrite')!;
|
||||
const rewriteParsed = parseUrl(rewrite, true);
|
||||
if (rewriteParsed.protocol) {
|
||||
return proxyRequest(req, res, rewriteParsed);
|
||||
}
|
||||
|
||||
(req as any)._nextRewroteUrl = rewrite;
|
||||
(req as any)._nextDidRewrite = (req as any)._nextRewroteUrl !== req.url;
|
||||
|
||||
return {
|
||||
finished: false,
|
||||
pathname: rewriteParsed.pathname,
|
||||
query: {
|
||||
...parsedUrl.query,
|
||||
...rewriteParsed.query,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (result.response.headers.has('x-middleware-refresh')) {
|
||||
res.writeHead(result.response.status);
|
||||
|
||||
if (result.response.body instanceof Buffer) {
|
||||
res.write(result.response.body);
|
||||
} else {
|
||||
//@ts-ignore
|
||||
for await (const chunk of result.response.body || []) {
|
||||
res.write(chunk);
|
||||
}
|
||||
}
|
||||
res.end();
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
finished: false,
|
||||
};
|
||||
}
|
||||
|
||||
const proxyRequest = async (
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
parsedUrl: UrlWithParsedQuery
|
||||
) => {
|
||||
const { query } = parsedUrl;
|
||||
delete (parsedUrl as any).query;
|
||||
parsedUrl.search = stringifyQuery(req, query);
|
||||
|
||||
const target = formatUrl(parsedUrl);
|
||||
const proxy = new Proxy({
|
||||
target,
|
||||
changeOrigin: true,
|
||||
ignorePath: true,
|
||||
xfwd: true,
|
||||
proxyTimeout: 30_000, // limit proxying to 30 seconds
|
||||
});
|
||||
|
||||
await new Promise((proxyResolve, proxyReject) => {
|
||||
let finished = false;
|
||||
|
||||
proxy.on('proxyReq', (proxyReq: any) => {
|
||||
proxyReq.on('close', () => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyResolve(true);
|
||||
}
|
||||
});
|
||||
});
|
||||
proxy.on('error', (err: any) => {
|
||||
if (!finished) {
|
||||
finished = true;
|
||||
proxyReject(err);
|
||||
}
|
||||
});
|
||||
proxy.web(req, res);
|
||||
});
|
||||
|
||||
return {
|
||||
finished: true,
|
||||
};
|
||||
};
|
||||
|
||||
async function runMiddleware(params: {
|
||||
request: IncomingMessage;
|
||||
response: ServerResponse;
|
||||
parsedUrl: UrlWithParsedQuery;
|
||||
parsed: UrlWithParsedQuery;
|
||||
requestId: string;
|
||||
name: string;
|
||||
path: string;
|
||||
}): Promise<FetchEventResult | null> {
|
||||
const page: { name?: string; params?: { [key: string]: string } } = {};
|
||||
let result: FetchEventResult | null = null;
|
||||
|
||||
result = await run({
|
||||
name: params.name,
|
||||
path: params.path,
|
||||
request: {
|
||||
headers: params.request.headers,
|
||||
method: params.request.method || 'GET',
|
||||
url: params.request.url!,
|
||||
// url: (params.request as any).__NEXT_INIT_URL,
|
||||
page,
|
||||
},
|
||||
});
|
||||
|
||||
result.waitUntil.catch((error: any) => {
|
||||
console.error(`Uncaught: middleware waitUntil errored`, error);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// Should run the middleware in the `vm` sandbox and return the result
|
||||
// back to `vercel dev`. If no middleware file exists then this function
|
||||
// should return `finished: false` (very quickly, since this is being
|
||||
// invoked for every HTTP request!).
|
||||
export async function runDevMiddleware(
|
||||
req: IncomingMessage,
|
||||
res: ServerResponse,
|
||||
workingDirectory: string
|
||||
): ReturnType<typeof runMiddlewareCatchAll> {
|
||||
const middlewareFile = await getMiddlewareFile(workingDirectory);
|
||||
if (!middlewareFile) {
|
||||
return {
|
||||
finished: false,
|
||||
};
|
||||
}
|
||||
return runMiddlewareCatchAll(
|
||||
req,
|
||||
res,
|
||||
'',
|
||||
basename(middlewareFile),
|
||||
middlewareFile
|
||||
);
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
import type { RequestData, FetchEventResult } from './types';
|
||||
import { DeprecationError } from './error';
|
||||
import { fromNodeHeaders } from './utils';
|
||||
import { NextFetchEvent } from './spec-extension/fetch-event';
|
||||
import { NextRequest, RequestInit } from './spec-extension/request';
|
||||
import { SpecResponse } from './spec-extension/response';
|
||||
import { waitUntilSymbol } from './spec-compliant/fetch-event';
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
export async function adapter(params: {
|
||||
handler: (request: NextRequest, event: NextFetchEvent) => Promise<Response>;
|
||||
page: string;
|
||||
request: RequestData;
|
||||
}): Promise<FetchEventResult> {
|
||||
const url = params.request.url.startsWith('/')
|
||||
? `https://${params.request.headers.host}${params.request.url}`
|
||||
: params.request.url;
|
||||
|
||||
const request = new NextRequestHint({
|
||||
page: params.page,
|
||||
input: url,
|
||||
init: {
|
||||
geo: params.request.geo,
|
||||
//@ts-ignore
|
||||
headers: fromNodeHeaders(params.request.headers),
|
||||
ip: params.request.ip,
|
||||
method: params.request.method,
|
||||
page: params.request.page,
|
||||
},
|
||||
});
|
||||
|
||||
const event = new NextFetchEvent({ request, page: params.page });
|
||||
const original = await params.handler(request, event);
|
||||
|
||||
return {
|
||||
response: original || SpecResponse.next(),
|
||||
waitUntil: Promise.all(event[waitUntilSymbol]),
|
||||
};
|
||||
}
|
||||
|
||||
class NextRequestHint extends NextRequest {
|
||||
sourcePage: string;
|
||||
|
||||
constructor(params: {
|
||||
init: RequestInit;
|
||||
input: Request | string;
|
||||
page: string;
|
||||
}) {
|
||||
//@ts-ignore
|
||||
super(params.input, params.init);
|
||||
this.sourcePage = params.page;
|
||||
}
|
||||
|
||||
get request() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
|
||||
respondWith() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
|
||||
waitUntil() {
|
||||
throw new DeprecationError({ page: this.sourcePage });
|
||||
}
|
||||
}
|
||||
@@ -1,12 +0,0 @@
|
||||
export class DeprecationError extends Error {
|
||||
constructor({ page }: { page: string }) {
|
||||
super(`The middleware "${page}" accepts an async API directly with the form:
|
||||
|
||||
export function middleware(request, event) {
|
||||
return new Response("Hello " + request.url)
|
||||
}
|
||||
|
||||
Read more: https://nextjs.org/docs/messages/middleware-new-signature
|
||||
`);
|
||||
}
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
import { isBlob } from './is';
|
||||
import { streamToIterator } from './utils';
|
||||
|
||||
const carriage = '\r\n';
|
||||
const dashes = '--';
|
||||
const carriageLength = 2;
|
||||
|
||||
function escape(str: string) {
|
||||
return str.replace(/"/g, '\\"');
|
||||
}
|
||||
|
||||
function getFooter(boundary: string) {
|
||||
return `${dashes}${boundary}${dashes}${carriage.repeat(2)}`;
|
||||
}
|
||||
|
||||
function getHeader(boundary: string, name: string, field: FormDataEntryValue) {
|
||||
let header = '';
|
||||
header += `${dashes}${boundary}${carriage}`;
|
||||
header += `Content-Disposition: form-data; name="${escape(name)}"`;
|
||||
|
||||
if (isBlob(field)) {
|
||||
header += `; filename="${escape(field.name)}"${carriage}`;
|
||||
header += `Content-Type: ${field.type || 'application/octet-stream'}`;
|
||||
}
|
||||
|
||||
return `${header}${carriage.repeat(2)}`;
|
||||
}
|
||||
|
||||
export function getBoundary() {
|
||||
const array = new Uint8Array(32);
|
||||
crypto.getRandomValues(array);
|
||||
|
||||
let str = '';
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
str += array[i].toString(16).padStart(2, '0');
|
||||
}
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
export async function* formDataIterator(
|
||||
form: FormData,
|
||||
boundary: string
|
||||
): AsyncIterableIterator<Uint8Array> {
|
||||
const encoder = new TextEncoder();
|
||||
//@ts-ignore
|
||||
for (const [name, value] of form) {
|
||||
yield encoder.encode(getHeader(boundary, name, value));
|
||||
|
||||
if (isBlob(value)) {
|
||||
// @ts-ignore /shrug
|
||||
const stream: ReadableStream<Uint8Array> = value.stream();
|
||||
yield* streamToIterator(stream);
|
||||
} else {
|
||||
yield encoder.encode(value);
|
||||
}
|
||||
|
||||
yield encoder.encode(carriage);
|
||||
}
|
||||
|
||||
yield encoder.encode(getFooter(boundary));
|
||||
}
|
||||
|
||||
export function getFormDataLength(form: FormData, boundary: string) {
|
||||
let length = 0;
|
||||
|
||||
//@ts-ignore
|
||||
for (const [name, value] of form) {
|
||||
length += Buffer.byteLength(getHeader(boundary, name, value));
|
||||
length += isBlob(value) ? value.size : Buffer.byteLength(String(value));
|
||||
length += carriageLength;
|
||||
}
|
||||
|
||||
length += Buffer.byteLength(getFooter(boundary));
|
||||
return length;
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from './sandbox/sandbox';
|
||||
@@ -1,80 +0,0 @@
|
||||
/**
|
||||
* The ArrayBuffer object is used to represent a generic, fixed-length raw
|
||||
* binary data buffer. It is an array of bytes, often referred to in other
|
||||
* languages as a "byte array". You cannot directly manipulate the contents of
|
||||
* an ArrayBuffer; instead, you create one of the typed array objects or a
|
||||
* DataView object which represents the buffer in a specific format, and use
|
||||
* that to read and write the contents of the buffer.
|
||||
*/
|
||||
export function isArrayBuffer(value: any): value is ArrayBuffer {
|
||||
return Object.prototype.isPrototypeOf.call(ArrayBuffer, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* ArrayBufferView is a helper type representing any of the following JS
|
||||
* TypedArray types which correspond to the list below. It is checked by duck
|
||||
* typing the provided object.
|
||||
*/
|
||||
export function isArrayBufferView(value: any): value is ArrayBufferView {
|
||||
return ArrayBuffer.isView(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The DataView view provides a low-level interface for reading and writing
|
||||
* multiple number types in a binary ArrayBuffer, without having to care about
|
||||
* the platform's endianness.
|
||||
*/
|
||||
export function isDataView(value: any): value is DataView {
|
||||
return Object.prototype.isPrototypeOf.call(DataView, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The URLSearchParams interface defines utility methods to work with the
|
||||
* query string of a URL.
|
||||
*/
|
||||
export function isURLSearchParams(value: any): value is URLSearchParams {
|
||||
return Object.prototype.isPrototypeOf.call(URLSearchParams, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The Blob object represents a blob, which is a file-like object of immutable,
|
||||
* raw data; they can be read as text or binary data. Blobs can represent data
|
||||
* that isn't necessarily in a JavaScript-native format.
|
||||
*/
|
||||
export function isBlob(value: any): value is Blob {
|
||||
return Object.prototype.isPrototypeOf.call(Blob, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The FormData interface provides a way to easily construct a set of key/value
|
||||
* pairs representing form fields and their values, which can then be easily
|
||||
* sent using the XMLHttpRequest.send() method. It uses the same format a
|
||||
* form would use if the encoding type were set to "multipart/form-data".
|
||||
*/
|
||||
export function isFormData(value: any): value is FormData {
|
||||
return Object.prototype.isPrototypeOf.call(FormData, value);
|
||||
}
|
||||
|
||||
/**
|
||||
* The ReadableStream interface of the Streams API represents a readable stream
|
||||
* of byte data. Because we want to allow alternative implementations we also
|
||||
* duck type here.
|
||||
*/
|
||||
export function isReadableStream(value: any): value is ReadableStream {
|
||||
return (
|
||||
value &&
|
||||
(Object.prototype.isPrototypeOf.call(ReadableStream, value) ||
|
||||
(value.constructor.name === 'ReadableStream' && 'getReader' in value))
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks in an object implements an Iterable interface
|
||||
*/
|
||||
export function isIterable(object: any): object is Iterable<unknown> {
|
||||
return (
|
||||
object &&
|
||||
Symbol.iterator in object &&
|
||||
typeof object[Symbol.iterator] === 'function'
|
||||
);
|
||||
}
|
||||
@@ -1,127 +0,0 @@
|
||||
import { Crypto as WebCrypto } from '@peculiar/webcrypto';
|
||||
import { TransformStream } from 'web-streams-polyfill';
|
||||
import { v4 as uuid } from 'uuid';
|
||||
import crypto from 'crypto';
|
||||
|
||||
export function atob(b64Encoded: string) {
|
||||
return Buffer.from(b64Encoded, 'base64').toString('binary');
|
||||
}
|
||||
|
||||
export function btoa(str: string) {
|
||||
return Buffer.from(str, 'binary').toString('base64');
|
||||
}
|
||||
|
||||
class TextEncoderRuntime {
|
||||
encoder: TextEncoder;
|
||||
|
||||
constructor() {
|
||||
this.encoder = new TextEncoder();
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this.encoder.encoding;
|
||||
}
|
||||
|
||||
public encode(input: string) {
|
||||
return this.encoder.encode(input);
|
||||
}
|
||||
}
|
||||
|
||||
class TextDecoderRuntime {
|
||||
decoder: TextDecoder;
|
||||
|
||||
constructor() {
|
||||
this.decoder = new TextDecoder();
|
||||
}
|
||||
|
||||
get encoding() {
|
||||
return this.decoder.encoding;
|
||||
}
|
||||
|
||||
get fatal() {
|
||||
return this.decoder.fatal;
|
||||
}
|
||||
|
||||
get ignoreBOM() {
|
||||
return this.decoder.ignoreBOM;
|
||||
}
|
||||
|
||||
public decode(input: BufferSource, options?: TextDecodeOptions) {
|
||||
return this.decoder.decode(input, options);
|
||||
}
|
||||
}
|
||||
|
||||
export { TextDecoderRuntime as TextDecoder };
|
||||
export { TextEncoderRuntime as TextEncoder };
|
||||
|
||||
export class Crypto extends WebCrypto {
|
||||
// @ts-ignore Remove once types are updated and we deprecate node 12
|
||||
randomUUID = crypto.randomUUID || uuid;
|
||||
}
|
||||
|
||||
export class ReadableStream<T> {
|
||||
constructor(opts: UnderlyingSource = {}) {
|
||||
let closed = false;
|
||||
let pullPromise: any;
|
||||
|
||||
let transformController: TransformStreamDefaultController;
|
||||
const { readable, writable } = new TransformStream(
|
||||
{
|
||||
start: (controller: TransformStreamDefaultController) => {
|
||||
transformController = controller;
|
||||
},
|
||||
},
|
||||
undefined,
|
||||
{
|
||||
highWaterMark: 1,
|
||||
}
|
||||
);
|
||||
|
||||
const writer = writable.getWriter();
|
||||
const encoder = new TextEncoder();
|
||||
const controller: ReadableStreamController<T> = {
|
||||
get desiredSize() {
|
||||
return transformController.desiredSize;
|
||||
},
|
||||
close: () => {
|
||||
if (!closed) {
|
||||
closed = true;
|
||||
writer.close();
|
||||
}
|
||||
},
|
||||
enqueue: (chunk: T) => {
|
||||
writer.write(typeof chunk === 'string' ? encoder.encode(chunk) : chunk);
|
||||
pull();
|
||||
},
|
||||
error: (reason: any) => {
|
||||
transformController.error(reason);
|
||||
},
|
||||
};
|
||||
|
||||
const pull = () => {
|
||||
if (opts.pull) {
|
||||
if (!pullPromise) {
|
||||
pullPromise = Promise.resolve().then(() => {
|
||||
pullPromise = 0;
|
||||
opts.pull!(controller);
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (opts.start) {
|
||||
opts.start(controller);
|
||||
}
|
||||
|
||||
if (opts.cancel) {
|
||||
readable.cancel = (reason: any) => {
|
||||
opts.cancel!(reason);
|
||||
return readable.cancel(reason);
|
||||
};
|
||||
}
|
||||
|
||||
pull();
|
||||
|
||||
return readable;
|
||||
}
|
||||
}
|
||||
@@ -1,228 +0,0 @@
|
||||
import type { RequestData, FetchEventResult, NodeHeaders } from '../types';
|
||||
import { Blob, File, FormData } from 'formdata-node';
|
||||
import { dirname, extname, resolve } from 'path';
|
||||
import { readFileSync } from 'fs';
|
||||
import { TransformStream } from 'web-streams-polyfill';
|
||||
import * as polyfills from './polyfills';
|
||||
import cookie from 'cookie';
|
||||
import vm from 'vm';
|
||||
import fetch, {
|
||||
Headers,
|
||||
RequestInit,
|
||||
Response,
|
||||
Request,
|
||||
RequestInfo,
|
||||
} from 'node-fetch';
|
||||
import { adapter } from '../adapter';
|
||||
import * as esbuild from 'esbuild';
|
||||
import m from 'module';
|
||||
|
||||
interface URLLike {
|
||||
href: string;
|
||||
}
|
||||
|
||||
let cache:
|
||||
| {
|
||||
context: { [key: string]: any };
|
||||
paths: Map<string, string>;
|
||||
require: Map<string, any>;
|
||||
sandbox: vm.Context;
|
||||
}
|
||||
| undefined;
|
||||
|
||||
const WEBPACK_HASH_REGEX =
|
||||
/__webpack_require__\.h = function\(\) \{ return "[0-9a-f]+"; \}/g;
|
||||
|
||||
/**
|
||||
* The cache is cleared when a path is cached and the content has changed. The
|
||||
* hack ignores changes than only change the compilation hash. Instead it is
|
||||
* probably better to disable HMR for middleware entries.
|
||||
*/
|
||||
export function clearSandboxCache(path: string, content: Buffer | string) {
|
||||
const prev = cache?.paths.get(path)?.replace(WEBPACK_HASH_REGEX, '');
|
||||
if (prev === undefined) return;
|
||||
if (prev === content.toString().replace(WEBPACK_HASH_REGEX, '')) return;
|
||||
cache = undefined;
|
||||
}
|
||||
|
||||
export async function run(params: {
|
||||
name: string;
|
||||
path: string;
|
||||
request: RequestData;
|
||||
}): Promise<FetchEventResult> {
|
||||
if (cache === undefined) {
|
||||
const context: { [key: string]: any } = {
|
||||
atob: polyfills.atob,
|
||||
Blob,
|
||||
btoa: polyfills.btoa,
|
||||
clearInterval,
|
||||
clearTimeout,
|
||||
console: {
|
||||
assert: console.assert.bind(console),
|
||||
error: console.error.bind(console),
|
||||
info: console.info.bind(console),
|
||||
log: console.log.bind(console),
|
||||
time: console.time.bind(console),
|
||||
timeEnd: console.timeEnd.bind(console),
|
||||
timeLog: console.timeLog.bind(console),
|
||||
warn: console.warn.bind(console),
|
||||
},
|
||||
Crypto: polyfills.Crypto,
|
||||
crypto: new polyfills.Crypto(),
|
||||
Response,
|
||||
Headers,
|
||||
Request,
|
||||
fetch: (input: RequestInfo, init: RequestInit = {}) => {
|
||||
const url = getFetchURL(input, params.request.headers);
|
||||
init.headers = getFetchHeaders(params.name, init);
|
||||
if (isRequestLike(input)) {
|
||||
return fetch(url, {
|
||||
...init,
|
||||
headers: {
|
||||
...Object.fromEntries(input.headers),
|
||||
...Object.fromEntries(init.headers),
|
||||
},
|
||||
});
|
||||
}
|
||||
return fetch(url, init);
|
||||
},
|
||||
File,
|
||||
FormData,
|
||||
process: { env: { ...process.env } },
|
||||
ReadableStream: polyfills.ReadableStream,
|
||||
setInterval,
|
||||
setTimeout,
|
||||
TextDecoder: polyfills.TextDecoder,
|
||||
TextEncoder: polyfills.TextEncoder,
|
||||
TransformStream,
|
||||
URL,
|
||||
URLSearchParams,
|
||||
};
|
||||
|
||||
context.self = context;
|
||||
|
||||
cache = {
|
||||
context,
|
||||
require: new Map<string, any>([
|
||||
[require.resolve('cookie'), { exports: cookie }],
|
||||
]),
|
||||
paths: new Map<string, string>(),
|
||||
sandbox: vm.createContext(context),
|
||||
};
|
||||
}
|
||||
try {
|
||||
const content = readFileSync(params.path, 'utf-8');
|
||||
const esBuildResult = esbuild.transformSync(content, {
|
||||
format: 'cjs',
|
||||
banner: '"use strict";',
|
||||
});
|
||||
const x = vm.runInNewContext(m.wrap(esBuildResult.code), cache.sandbox, {
|
||||
filename: params.path,
|
||||
});
|
||||
const module = {
|
||||
exports: {},
|
||||
loaded: false,
|
||||
id: params.path,
|
||||
};
|
||||
x(
|
||||
module.exports,
|
||||
sandboxRequire.bind(null, params.path),
|
||||
module,
|
||||
dirname(params.path),
|
||||
params.path
|
||||
);
|
||||
const adapterResult = await adapter({
|
||||
request: params.request,
|
||||
// @ts-ignore
|
||||
handler: module.exports.default,
|
||||
page: params.path,
|
||||
});
|
||||
return adapterResult;
|
||||
} catch (error) {
|
||||
cache = undefined;
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
function sandboxRequire(referrer: string, specifier: string) {
|
||||
const resolved = require.resolve(specifier, {
|
||||
paths: [resolve(dirname(referrer))],
|
||||
});
|
||||
|
||||
const cached = cache?.require.get(resolved);
|
||||
if (cached !== undefined) {
|
||||
return cached.exports;
|
||||
}
|
||||
|
||||
const module = {
|
||||
exports: {},
|
||||
loaded: false,
|
||||
id: resolved,
|
||||
};
|
||||
|
||||
cache?.require.set(resolved, module);
|
||||
|
||||
const transformOptions: esbuild.TransformOptions = {
|
||||
format: 'cjs',
|
||||
banner: '"use strict";',
|
||||
};
|
||||
if (extname(resolved) === '.json') {
|
||||
transformOptions.loader = 'json';
|
||||
}
|
||||
const transformedContent = esbuild.transformSync(
|
||||
readFileSync(resolved, 'utf-8'),
|
||||
transformOptions
|
||||
).code;
|
||||
const fn = vm.runInContext(
|
||||
`(function(module,exports,require,__dirname,__filename) {${transformedContent}\n})`,
|
||||
cache!.sandbox
|
||||
);
|
||||
|
||||
try {
|
||||
fn(
|
||||
module,
|
||||
module.exports,
|
||||
sandboxRequire.bind(null, resolved),
|
||||
dirname(resolved),
|
||||
resolved
|
||||
);
|
||||
} finally {
|
||||
cache?.require.delete(resolved);
|
||||
}
|
||||
module.loaded = true;
|
||||
return module.exports;
|
||||
}
|
||||
|
||||
function getFetchHeaders(middleware: string, init: RequestInit) {
|
||||
const headers = new Headers(init.headers ?? {});
|
||||
const prevsub = headers.get(`x-middleware-subrequest`) || '';
|
||||
const value = prevsub.split(':').concat(middleware).join(':');
|
||||
headers.set(`x-middleware-subrequest`, value);
|
||||
headers.set(`user-agent`, `Next.js Middleware`);
|
||||
return headers;
|
||||
}
|
||||
|
||||
function getFetchURL(input: RequestInfo, headers: NodeHeaders = {}): string {
|
||||
const initurl = isRequestLike(input)
|
||||
? input.url
|
||||
: isURLLike(input)
|
||||
? input.href
|
||||
: input;
|
||||
if (initurl.startsWith('/')) {
|
||||
const host = headers.host?.toString();
|
||||
const localhost =
|
||||
host === '127.0.0.1' ||
|
||||
host === 'localhost' ||
|
||||
host?.startsWith('localhost:');
|
||||
return `${localhost ? 'http' : 'https'}://${host}${initurl}`;
|
||||
}
|
||||
return initurl;
|
||||
}
|
||||
|
||||
function isURLLike(obj: unknown): obj is URLLike {
|
||||
return Boolean(obj && typeof obj === 'object' && 'href' in obj);
|
||||
}
|
||||
|
||||
function isRequestLike(obj: unknown): obj is Request {
|
||||
return Boolean(obj && typeof obj === 'object' && 'url' in obj);
|
||||
}
|
||||
@@ -1,237 +0,0 @@
|
||||
import { formDataIterator, getBoundary } from '../form-data';
|
||||
import { streamToIterator } from '../utils';
|
||||
import * as util from '../is';
|
||||
import { URLSearchParams } from 'url';
|
||||
|
||||
const INTERNALS = Symbol('internal body');
|
||||
|
||||
abstract class BaseBody implements Body {
|
||||
abstract headers: Headers;
|
||||
|
||||
[INTERNALS]: {
|
||||
bodyInit?: BodyInit;
|
||||
boundary?: string;
|
||||
disturbed: boolean;
|
||||
stream?: ReadableStream<Uint8Array> | null;
|
||||
};
|
||||
|
||||
constructor(bodyInit?: BodyInit) {
|
||||
this[INTERNALS] = {
|
||||
bodyInit: bodyInit,
|
||||
disturbed: false,
|
||||
};
|
||||
|
||||
if (util.isFormData(bodyInit)) {
|
||||
this[INTERNALS].boundary = getBoundary();
|
||||
}
|
||||
}
|
||||
|
||||
get body(): ReadableStream<Uint8Array> | null {
|
||||
const body = this[INTERNALS].bodyInit;
|
||||
if (!body) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
const that = this;
|
||||
if (!this[INTERNALS].stream) {
|
||||
const readable = new ReadableStream({
|
||||
async start(controller) {
|
||||
if (typeof body === 'string') {
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(body));
|
||||
} else if (util.isBlob(body)) {
|
||||
const buffer = await body.arrayBuffer();
|
||||
controller.enqueue(new Uint8Array(buffer));
|
||||
} else if (util.isDataView(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isArrayBuffer(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isArrayBufferView(body)) {
|
||||
controller.enqueue(body);
|
||||
} else if (util.isURLSearchParams(body)) {
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(body.toString()));
|
||||
} else if (util.isFormData(body)) {
|
||||
for await (const chunk of formDataIterator(
|
||||
body,
|
||||
that[INTERNALS].boundary!
|
||||
)) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
} else if (util.isReadableStream(body)) {
|
||||
for await (const chunk of streamToIterator(body)) {
|
||||
if (chunk.length) {
|
||||
controller.enqueue(chunk);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const text = Object.prototype.toString.call(body);
|
||||
const encoder = new TextEncoder();
|
||||
controller.enqueue(encoder.encode(text));
|
||||
}
|
||||
|
||||
controller.close();
|
||||
},
|
||||
});
|
||||
|
||||
// Spy on reading chunks to set the stream as disturbed
|
||||
const getReader = readable.getReader.bind(readable);
|
||||
readable.getReader = () => {
|
||||
const reader = getReader();
|
||||
const read = reader.read.bind(reader);
|
||||
reader.read = () => {
|
||||
this[INTERNALS].disturbed = true;
|
||||
return read();
|
||||
};
|
||||
return reader;
|
||||
};
|
||||
|
||||
this[INTERNALS].stream = readable;
|
||||
}
|
||||
|
||||
return this[INTERNALS].stream!;
|
||||
}
|
||||
|
||||
get bodyUsed(): boolean {
|
||||
return this[INTERNALS].disturbed;
|
||||
}
|
||||
|
||||
_consume() {
|
||||
if (this[INTERNALS].disturbed) {
|
||||
return Promise.reject(
|
||||
new TypeError(
|
||||
`Body has already been used. It can only be used once. Use tee() first if you need to read it twice.`
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
this[INTERNALS].disturbed = true;
|
||||
const body = this.body;
|
||||
return new Promise<Uint8Array>((resolve, reject) => {
|
||||
let buffer = new Uint8Array(0);
|
||||
if (!body) {
|
||||
return resolve(buffer);
|
||||
}
|
||||
|
||||
const reader = body.getReader();
|
||||
(function pump() {
|
||||
reader.read().then(({ value, done }) => {
|
||||
if (done) {
|
||||
return resolve(buffer);
|
||||
} else if (value) {
|
||||
const merge = new Uint8Array(buffer.length + value.length);
|
||||
merge.set(buffer);
|
||||
merge.set(value, buffer.length);
|
||||
buffer = merge;
|
||||
}
|
||||
|
||||
pump();
|
||||
}, reject);
|
||||
})();
|
||||
});
|
||||
}
|
||||
|
||||
async arrayBuffer() {
|
||||
const buffer = await this._consume();
|
||||
const arrayBuffer = new ArrayBuffer(buffer.length);
|
||||
const view = new Uint8Array(arrayBuffer);
|
||||
|
||||
for (let i = 0; i < buffer.length; ++i) {
|
||||
view[i] = buffer[i];
|
||||
}
|
||||
|
||||
return arrayBuffer;
|
||||
}
|
||||
|
||||
async blob() {
|
||||
const buffer = await this._consume();
|
||||
return new Blob([buffer]);
|
||||
}
|
||||
|
||||
async formData() {
|
||||
const bodyInit = this[INTERNALS].bodyInit;
|
||||
if (util.isURLSearchParams(bodyInit)) {
|
||||
const form = new FormData();
|
||||
for (const [key, value] of bodyInit) {
|
||||
form.append(key, value);
|
||||
}
|
||||
return form;
|
||||
} else if (util.isFormData(bodyInit)) {
|
||||
return bodyInit;
|
||||
} else {
|
||||
throw new TypeError(
|
||||
`Unrecognized Content-Type header value. FormData can only parse the following MIME types: multipart/form-data, application/x-www-form-urlencoded.`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async text() {
|
||||
const decoder = new TextDecoder();
|
||||
const buffer = await this._consume();
|
||||
return decoder.decode(buffer);
|
||||
}
|
||||
|
||||
async json() {
|
||||
const text = await this.text();
|
||||
|
||||
try {
|
||||
return JSON.parse(text);
|
||||
} catch (err: any) {
|
||||
throw new TypeError(`invalid json body reason: ${err.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { BaseBody as Body };
|
||||
|
||||
export type BodyInit =
|
||||
| null
|
||||
| string
|
||||
| Blob
|
||||
| BufferSource
|
||||
| FormData
|
||||
| URLSearchParams
|
||||
| ReadableStream<Uint8Array>;
|
||||
|
||||
export function extractContentType(instance: BaseBody) {
|
||||
const body = instance[INTERNALS].bodyInit;
|
||||
if (typeof body === 'string') {
|
||||
return 'text/plain;charset=UTF-8';
|
||||
} else if (util.isBlob(body)) {
|
||||
return body.type;
|
||||
} else if (util.isDataView(body)) {
|
||||
return null;
|
||||
} else if (util.isArrayBuffer(body)) {
|
||||
return null;
|
||||
} else if (util.isArrayBufferView(body)) {
|
||||
return null;
|
||||
} else if (util.isURLSearchParams(body)) {
|
||||
return 'application/x-www-form-urlencoded;charset=UTF-8';
|
||||
} else if (util.isFormData(body)) {
|
||||
return `multipart/form-data;boundary=${instance[INTERNALS].boundary}`;
|
||||
} else if (util.isReadableStream(body)) {
|
||||
return null;
|
||||
} else {
|
||||
return 'text/plain;charset=UTF-8';
|
||||
}
|
||||
}
|
||||
|
||||
export function cloneBody(instance: BaseBody) {
|
||||
if (instance.bodyUsed) {
|
||||
throw new Error('cannot clone body after it is used');
|
||||
}
|
||||
|
||||
const body = instance[INTERNALS].bodyInit;
|
||||
if (util.isReadableStream(body)) {
|
||||
const [r1, r2] = body.tee();
|
||||
instance[INTERNALS].bodyInit = r1;
|
||||
return r2;
|
||||
}
|
||||
|
||||
return body || null;
|
||||
}
|
||||
|
||||
export function getInstanceBody(instance: BaseBody) {
|
||||
return instance[INTERNALS].bodyInit;
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
export const responseSymbol = Symbol('response');
|
||||
export const passThroughSymbol = Symbol('passThrough');
|
||||
export const waitUntilSymbol = Symbol('waitUntil');
|
||||
|
||||
export class FetchEvent {
|
||||
readonly [waitUntilSymbol]: Promise<any>[] = [];
|
||||
[responseSymbol]?: Promise<Response>;
|
||||
[passThroughSymbol] = false;
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-useless-constructor
|
||||
constructor() {}
|
||||
|
||||
respondWith(response: Response | Promise<Response>): void {
|
||||
if (!this[responseSymbol]) {
|
||||
this[responseSymbol] = Promise.resolve(response);
|
||||
}
|
||||
}
|
||||
|
||||
passThroughOnException(): void {
|
||||
this[passThroughSymbol] = true;
|
||||
}
|
||||
|
||||
waitUntil(promise: Promise<any>): void {
|
||||
this[waitUntilSymbol].push(promise);
|
||||
}
|
||||
}
|
||||
@@ -1,238 +0,0 @@
|
||||
import { isIterable } from '../is';
|
||||
|
||||
const MAP = Symbol('map');
|
||||
const INTERNAL = Symbol('internal');
|
||||
const INVALID_TOKEN_REGEX = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/;
|
||||
const INVALID_HEADER_CHAR_REGEX = /[^\t\x20-\x7e\x80-\xff]/;
|
||||
|
||||
class BaseHeaders implements Headers {
|
||||
[MAP]: { [k: string]: string[] } = {};
|
||||
|
||||
constructor(init?: HeadersInit) {
|
||||
if (init instanceof BaseHeaders) {
|
||||
const rawHeaders = init.raw();
|
||||
for (const headerName of Object.keys(rawHeaders)) {
|
||||
for (const value of rawHeaders[headerName]) {
|
||||
this.append(headerName, value);
|
||||
}
|
||||
}
|
||||
} else if (isIterable(init)) {
|
||||
const pairs = [];
|
||||
for (const pair of init) {
|
||||
if (!isIterable(pair)) {
|
||||
throw new TypeError('Each header pair must be iterable');
|
||||
}
|
||||
pairs.push(Array.from(pair));
|
||||
}
|
||||
|
||||
for (const pair of pairs) {
|
||||
if (pair.length !== 2) {
|
||||
throw new TypeError('Each header pair must be a name/value tuple');
|
||||
}
|
||||
this.append(pair[0], pair[1]);
|
||||
}
|
||||
} else if (typeof init === 'object') {
|
||||
for (const key of Object.keys(init)) {
|
||||
// @ts-ignore
|
||||
this.append(key, init[key]);
|
||||
}
|
||||
} else if (init) {
|
||||
throw new TypeError('Provided initializer must be an object');
|
||||
}
|
||||
}
|
||||
|
||||
get(name: string) {
|
||||
const _name = `${name}`;
|
||||
validateName(_name);
|
||||
const key = find(this[MAP], _name);
|
||||
if (key === undefined) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this[MAP][key].join(', ');
|
||||
}
|
||||
|
||||
forEach(
|
||||
callback: (value: string, name: string, parent: BaseHeaders) => void,
|
||||
thisArg: any = undefined
|
||||
): void {
|
||||
let pairs = getHeaders(this);
|
||||
let i = 0;
|
||||
while (i < pairs.length) {
|
||||
const [name, value] = pairs[i];
|
||||
callback.call(thisArg, value, name, this);
|
||||
pairs = getHeaders(this);
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
set(name: string, value: string) {
|
||||
name = `${name}`;
|
||||
value = `${value}`;
|
||||
validateName(name);
|
||||
validateValue(value);
|
||||
const key = find(this[MAP], name);
|
||||
this[MAP][key !== undefined ? key : name] = [value];
|
||||
}
|
||||
|
||||
append(name: string, value: string) {
|
||||
name = `${name}`;
|
||||
value = `${value}`;
|
||||
validateName(name);
|
||||
validateValue(value);
|
||||
const key = find(this[MAP], name);
|
||||
if (key !== undefined) {
|
||||
this[MAP][key].push(value);
|
||||
} else {
|
||||
this[MAP][name] = [value];
|
||||
}
|
||||
}
|
||||
|
||||
has(name: string) {
|
||||
name = `${name}`;
|
||||
validateName(name);
|
||||
return find(this[MAP], name) !== undefined;
|
||||
}
|
||||
|
||||
delete(name: string) {
|
||||
name = `${name}`;
|
||||
validateName(name);
|
||||
const key = find(this[MAP], name);
|
||||
if (key !== undefined) {
|
||||
delete this[MAP][key];
|
||||
}
|
||||
}
|
||||
|
||||
raw() {
|
||||
return this[MAP];
|
||||
}
|
||||
|
||||
keys() {
|
||||
return createHeadersIterator(this, 'key');
|
||||
}
|
||||
|
||||
values() {
|
||||
return createHeadersIterator(this, 'value');
|
||||
}
|
||||
|
||||
entries() {
|
||||
return createHeadersIterator(this, 'key+value');
|
||||
}
|
||||
|
||||
[Symbol.iterator]() {
|
||||
return createHeadersIterator(this, 'key+value');
|
||||
}
|
||||
}
|
||||
|
||||
function createHeadersIterator(
|
||||
target: BaseHeaders,
|
||||
kind: 'key' | 'value' | 'key+value'
|
||||
) {
|
||||
const iterator = Object.create(HeadersIteratorPrototype);
|
||||
iterator[INTERNAL] = {
|
||||
target,
|
||||
kind,
|
||||
index: 0,
|
||||
};
|
||||
return iterator;
|
||||
}
|
||||
|
||||
function validateName(name: string) {
|
||||
name = `${name}`;
|
||||
if (INVALID_TOKEN_REGEX.test(name)) {
|
||||
throw new TypeError(`${name} is not a legal HTTP header name`);
|
||||
}
|
||||
}
|
||||
|
||||
function validateValue(value: string) {
|
||||
value = `${value}`;
|
||||
if (INVALID_HEADER_CHAR_REGEX.test(value)) {
|
||||
throw new TypeError(`${value} is not a legal HTTP header value`);
|
||||
}
|
||||
}
|
||||
|
||||
function find(
|
||||
map: { [k: string]: string[] },
|
||||
name: string
|
||||
): string | undefined {
|
||||
name = name.toLowerCase();
|
||||
for (const key in map) {
|
||||
if (key.toLowerCase() === name) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
Object.defineProperty(BaseHeaders.prototype, Symbol.toStringTag, {
|
||||
value: 'Headers',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
Object.defineProperties(BaseHeaders.prototype, {
|
||||
append: { enumerable: true },
|
||||
delete: { enumerable: true },
|
||||
entries: { enumerable: true },
|
||||
forEach: { enumerable: true },
|
||||
get: { enumerable: true },
|
||||
has: { enumerable: true },
|
||||
keys: { enumerable: true },
|
||||
raw: { enumerable: false },
|
||||
set: { enumerable: true },
|
||||
values: { enumerable: true },
|
||||
});
|
||||
|
||||
function getHeaders(
|
||||
headers: BaseHeaders,
|
||||
kind: 'key' | 'value' | 'key+value' = 'key+value'
|
||||
) {
|
||||
const fn =
|
||||
kind === 'key'
|
||||
? (key: string) => key.toLowerCase()
|
||||
: kind === 'value'
|
||||
? (key: string) => headers[MAP][key].join(', ')
|
||||
: (key: string) => [key.toLowerCase(), headers[MAP][key].join(', ')];
|
||||
|
||||
return Object.keys(headers[MAP])
|
||||
.sort()
|
||||
.map(key => fn(key));
|
||||
}
|
||||
|
||||
const HeadersIteratorPrototype = Object.setPrototypeOf(
|
||||
{
|
||||
next() {
|
||||
if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {
|
||||
throw new TypeError('Value of `this` is not a HeadersIterator');
|
||||
}
|
||||
|
||||
const { target, kind, index } = this[INTERNAL];
|
||||
const values = getHeaders(target, kind);
|
||||
const len = values.length;
|
||||
if (index >= len) {
|
||||
return {
|
||||
value: undefined,
|
||||
done: true,
|
||||
};
|
||||
}
|
||||
|
||||
this[INTERNAL].index = index + 1;
|
||||
|
||||
return {
|
||||
value: values[index],
|
||||
done: false,
|
||||
};
|
||||
},
|
||||
},
|
||||
Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))
|
||||
);
|
||||
|
||||
Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {
|
||||
value: 'HeadersIterator',
|
||||
writable: false,
|
||||
enumerable: false,
|
||||
configurable: true,
|
||||
});
|
||||
|
||||
export { BaseHeaders as Headers };
|
||||
@@ -1,124 +0,0 @@
|
||||
import { Body, cloneBody, extractContentType, getInstanceBody } from './body';
|
||||
import { Headers as BaseHeaders } from './headers';
|
||||
import { notImplemented } from '../utils';
|
||||
|
||||
export const INTERNALS = Symbol('internal request');
|
||||
|
||||
class BaseRequest extends Body implements Request {
|
||||
[INTERNALS]: {
|
||||
credentials: RequestCredentials;
|
||||
headers: Headers;
|
||||
method: string;
|
||||
redirect: RequestRedirect;
|
||||
url: URL;
|
||||
};
|
||||
|
||||
constructor(input: BaseRequest | string, init: RequestInit = {}) {
|
||||
const method = init.method?.toUpperCase() ?? 'GET';
|
||||
|
||||
if (
|
||||
(method === 'GET' || method === 'HEAD') &&
|
||||
(init.body || (input instanceof BaseRequest && getInstanceBody(input)))
|
||||
) {
|
||||
throw new TypeError('Request with GET/HEAD method cannot have body');
|
||||
}
|
||||
|
||||
let inputBody: BodyInit | null = null;
|
||||
if (init.body) {
|
||||
inputBody = init.body;
|
||||
} else if (input instanceof BaseRequest && getInstanceBody(input)) {
|
||||
inputBody = cloneBody(input);
|
||||
}
|
||||
|
||||
super(inputBody);
|
||||
|
||||
const headers = new BaseHeaders(
|
||||
init.headers || getProp(input, 'headers') || {}
|
||||
);
|
||||
if (inputBody !== null) {
|
||||
const contentType = extractContentType(this);
|
||||
if (contentType !== null && !headers.has('Content-Type')) {
|
||||
headers.append('Content-Type', contentType);
|
||||
}
|
||||
}
|
||||
|
||||
this[INTERNALS] = {
|
||||
credentials:
|
||||
init.credentials || getProp(input, 'credentials') || 'same-origin',
|
||||
headers,
|
||||
method,
|
||||
redirect: init.redirect || getProp(input, 'redirect') || 'follow',
|
||||
url: new URL(typeof input === 'string' ? input : input.url),
|
||||
};
|
||||
}
|
||||
|
||||
get url() {
|
||||
return this[INTERNALS].url.toString();
|
||||
}
|
||||
|
||||
get credentials() {
|
||||
return this[INTERNALS].credentials;
|
||||
}
|
||||
|
||||
get method() {
|
||||
return this[INTERNALS].method;
|
||||
}
|
||||
|
||||
get headers() {
|
||||
return this[INTERNALS].headers;
|
||||
}
|
||||
|
||||
get redirect() {
|
||||
return this[INTERNALS].redirect;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
public clone() {
|
||||
return new BaseRequest(this);
|
||||
}
|
||||
|
||||
get cache() {
|
||||
return notImplemented('Request', 'cache');
|
||||
}
|
||||
|
||||
get integrity() {
|
||||
return notImplemented('Request', 'integrity');
|
||||
}
|
||||
|
||||
get keepalive() {
|
||||
return notImplemented('Request', 'keepalive');
|
||||
}
|
||||
|
||||
get mode() {
|
||||
return notImplemented('Request', 'mode');
|
||||
}
|
||||
|
||||
get destination() {
|
||||
return notImplemented('Request', 'destination');
|
||||
}
|
||||
|
||||
get referrer() {
|
||||
return notImplemented('Request', 'referrer');
|
||||
}
|
||||
|
||||
get referrerPolicy() {
|
||||
return notImplemented('Request', 'referrerPolicy');
|
||||
}
|
||||
|
||||
get signal() {
|
||||
return notImplemented('Request', 'signal');
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() {
|
||||
return 'Request';
|
||||
}
|
||||
}
|
||||
|
||||
export { BaseRequest as Request };
|
||||
|
||||
function getProp<K extends keyof BaseRequest>(
|
||||
input: BaseRequest | string,
|
||||
key: K
|
||||
): BaseRequest[K] | undefined {
|
||||
return input instanceof BaseRequest ? input[key] : undefined;
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
import { Body, BodyInit, cloneBody, extractContentType } from './body';
|
||||
|
||||
const INTERNALS = Symbol('internal response');
|
||||
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
|
||||
|
||||
class BaseResponse extends Body implements Response {
|
||||
[INTERNALS]: {
|
||||
headers: Headers;
|
||||
status: number;
|
||||
statusText: string;
|
||||
type: 'default' | 'error';
|
||||
url?: URL;
|
||||
};
|
||||
|
||||
constructor(body?: BodyInit | null, init?: ResponseInit) {
|
||||
super(body);
|
||||
|
||||
this[INTERNALS] = {
|
||||
headers: new Headers(init?.headers),
|
||||
status: init?.status || 200,
|
||||
statusText: init?.statusText || '',
|
||||
type: 'default',
|
||||
url: init?.url ? new URL(init.url) : undefined,
|
||||
};
|
||||
|
||||
if (this[INTERNALS].status < 200 || this[INTERNALS].status > 599) {
|
||||
throw new RangeError(
|
||||
`Responses may only be constructed with status codes in the range 200 to 599, inclusive.`
|
||||
);
|
||||
}
|
||||
|
||||
if (body !== null && !this[INTERNALS].headers.has('Content-Type')) {
|
||||
const contentType = extractContentType(this);
|
||||
if (contentType) {
|
||||
this[INTERNALS].headers.append('Content-Type', contentType);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
static redirect(url: string, status = 302) {
|
||||
if (!REDIRECTS.has(status)) {
|
||||
throw new RangeError(
|
||||
'Failed to execute "redirect" on "response": Invalid status code'
|
||||
);
|
||||
}
|
||||
|
||||
return new Response(null, {
|
||||
headers: { Location: url },
|
||||
status,
|
||||
});
|
||||
}
|
||||
|
||||
static error() {
|
||||
const response = new BaseResponse(null, { status: 0, statusText: '' });
|
||||
response[INTERNALS].type = 'error';
|
||||
return response;
|
||||
}
|
||||
|
||||
get url() {
|
||||
return this[INTERNALS].url?.toString() || '';
|
||||
}
|
||||
|
||||
get ok() {
|
||||
return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300;
|
||||
}
|
||||
|
||||
get status() {
|
||||
return this[INTERNALS].status;
|
||||
}
|
||||
|
||||
get statusText() {
|
||||
return this[INTERNALS].statusText;
|
||||
}
|
||||
|
||||
get headers() {
|
||||
return this[INTERNALS].headers;
|
||||
}
|
||||
|
||||
get redirected() {
|
||||
return (
|
||||
this[INTERNALS].status > 299 &&
|
||||
this[INTERNALS].status < 400 &&
|
||||
this[INTERNALS].headers.has('Location')
|
||||
);
|
||||
}
|
||||
|
||||
get type() {
|
||||
return this[INTERNALS].type;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
clone() {
|
||||
return new BaseResponse(cloneBody(this), {
|
||||
headers: this.headers,
|
||||
status: this.status,
|
||||
statusText: this.statusText,
|
||||
url: this.url,
|
||||
});
|
||||
}
|
||||
|
||||
get [Symbol.toStringTag]() {
|
||||
return 'Response';
|
||||
}
|
||||
}
|
||||
|
||||
export interface ResponseInit {
|
||||
headers?: HeadersInit;
|
||||
status?: number;
|
||||
statusText?: string;
|
||||
url?: string;
|
||||
}
|
||||
|
||||
export { BaseResponse as Response };
|
||||
@@ -1,26 +0,0 @@
|
||||
import { DeprecationError } from '../error';
|
||||
import { FetchEvent } from '../spec-compliant/fetch-event';
|
||||
import { NextRequest } from './request';
|
||||
|
||||
export class NextFetchEvent extends FetchEvent {
|
||||
sourcePage: string;
|
||||
|
||||
constructor(params: { request: NextRequest; page: string }) {
|
||||
//@ts-ignore
|
||||
super(params.request);
|
||||
this.sourcePage = params.page;
|
||||
}
|
||||
|
||||
// @ts-ignore
|
||||
get request() {
|
||||
throw new DeprecationError({
|
||||
page: this.sourcePage,
|
||||
});
|
||||
}
|
||||
|
||||
respondWith() {
|
||||
throw new DeprecationError({
|
||||
page: this.sourcePage,
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
import type { IResult } from 'ua-parser-js';
|
||||
import cookie from 'cookie';
|
||||
import parseua from 'ua-parser-js';
|
||||
import { Request, RequestInit as NodeFetchRequestInit } from 'node-fetch';
|
||||
|
||||
export const INTERNALS = Symbol('internal request');
|
||||
|
||||
export class NextRequest extends Request {
|
||||
[INTERNALS]: {
|
||||
cookieParser(): { [key: string]: string };
|
||||
geo: { city?: string; country?: string; region?: string };
|
||||
ip?: string;
|
||||
page?: { name?: string; params?: { [key: string]: string } };
|
||||
ua?: IResult | null;
|
||||
url: URL;
|
||||
};
|
||||
|
||||
constructor(input: Request | string, init: RequestInit = {}) {
|
||||
//@ts-ignore
|
||||
super(input, init);
|
||||
|
||||
const cookieParser = () => {
|
||||
const value = this.headers.get('cookie');
|
||||
return value ? cookie.parse(value) : {};
|
||||
};
|
||||
|
||||
this[INTERNALS] = {
|
||||
cookieParser,
|
||||
geo: init.geo || {},
|
||||
ip: init.ip,
|
||||
page: init.page,
|
||||
url: new URL(typeof input === 'string' ? input : input.url),
|
||||
};
|
||||
}
|
||||
|
||||
public get cookies() {
|
||||
return this[INTERNALS].cookieParser();
|
||||
}
|
||||
|
||||
public get geo() {
|
||||
return this[INTERNALS].geo;
|
||||
}
|
||||
|
||||
public get ip() {
|
||||
return this[INTERNALS].ip;
|
||||
}
|
||||
|
||||
public get preflight() {
|
||||
return this.headers.get('x-middleware-preflight');
|
||||
}
|
||||
|
||||
public get nextUrl() {
|
||||
return this[INTERNALS].url;
|
||||
}
|
||||
|
||||
public get page() {
|
||||
return {
|
||||
name: this[INTERNALS].page?.name,
|
||||
params: this[INTERNALS].page?.params,
|
||||
};
|
||||
}
|
||||
|
||||
public get ua() {
|
||||
if (typeof this[INTERNALS].ua !== 'undefined') {
|
||||
return this[INTERNALS].ua || undefined;
|
||||
}
|
||||
|
||||
const uaString = this.headers.get('user-agent');
|
||||
if (!uaString) {
|
||||
this[INTERNALS].ua = null;
|
||||
return this[INTERNALS].ua || undefined;
|
||||
}
|
||||
|
||||
this[INTERNALS].ua = {
|
||||
...parseua(uaString),
|
||||
};
|
||||
|
||||
return this[INTERNALS].ua;
|
||||
}
|
||||
|
||||
//@ts-ignore
|
||||
public get url() {
|
||||
return this[INTERNALS].url.toString();
|
||||
}
|
||||
}
|
||||
|
||||
export interface RequestInit extends NodeFetchRequestInit {
|
||||
geo?: {
|
||||
city?: string;
|
||||
country?: string;
|
||||
region?: string;
|
||||
};
|
||||
ip?: string;
|
||||
page?: {
|
||||
name?: string;
|
||||
params?: { [key: string]: string };
|
||||
};
|
||||
}
|
||||
@@ -1,97 +0,0 @@
|
||||
import type { CookieSerializeOptions } from 'cookie';
|
||||
import cookie from 'cookie';
|
||||
import { Response, ResponseInit as NodeFetchResponseInit } from 'node-fetch';
|
||||
|
||||
const INTERNALS = Symbol('internal response');
|
||||
const REDIRECTS = new Set([301, 302, 303, 307, 308]);
|
||||
|
||||
export class SpecResponse extends Response {
|
||||
[INTERNALS]: {
|
||||
cookieParser(): { [key: string]: string };
|
||||
url?: URL;
|
||||
};
|
||||
|
||||
constructor(body?: BodyInit | null, init: ResponseInit = {}) {
|
||||
// TODO - why is this failing?
|
||||
// @ts-ignore
|
||||
super(body, init);
|
||||
|
||||
const cookieParser = () => {
|
||||
const value = this.headers.get('cookie');
|
||||
return value ? cookie.parse(value) : {};
|
||||
};
|
||||
|
||||
this[INTERNALS] = {
|
||||
cookieParser,
|
||||
url: init.url ? new URL(init.url) : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
public get cookies() {
|
||||
return this[INTERNALS].cookieParser();
|
||||
}
|
||||
|
||||
public cookie(
|
||||
name: string,
|
||||
value: { [key: string]: any } | string,
|
||||
opts: CookieSerializeOptions = {}
|
||||
) {
|
||||
const val =
|
||||
typeof value === 'object' ? 'j:' + JSON.stringify(value) : String(value);
|
||||
|
||||
if (opts.maxAge) {
|
||||
opts.expires = new Date(Date.now() + opts.maxAge);
|
||||
opts.maxAge /= 1000;
|
||||
}
|
||||
|
||||
if (opts.path == null) {
|
||||
opts.path = '/';
|
||||
}
|
||||
|
||||
this.headers.append(
|
||||
'Set-Cookie',
|
||||
cookie.serialize(name, String(val), opts)
|
||||
);
|
||||
return this;
|
||||
}
|
||||
|
||||
public clearCookie(name: string, opts: CookieSerializeOptions = {}) {
|
||||
return this.cookie(name, '', { expires: new Date(1), path: '/', ...opts });
|
||||
}
|
||||
|
||||
static redirect(url: string | URL, status = 302) {
|
||||
if (!REDIRECTS.has(status)) {
|
||||
throw new RangeError(
|
||||
'Failed to execute "redirect" on "response": Invalid status code'
|
||||
);
|
||||
}
|
||||
|
||||
return new SpecResponse(null, {
|
||||
headers: { Location: typeof url === 'string' ? url : url.toString() },
|
||||
status,
|
||||
});
|
||||
}
|
||||
|
||||
static rewrite(destination: string | URL) {
|
||||
return new SpecResponse(null, {
|
||||
headers: {
|
||||
'x-middleware-rewrite':
|
||||
typeof destination === 'string'
|
||||
? destination
|
||||
: destination.toString(),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
static next() {
|
||||
return new SpecResponse(null, {
|
||||
headers: {
|
||||
'x-middleware-next': '1',
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
interface ResponseInit extends NodeFetchResponseInit {
|
||||
url?: string;
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
export interface NodeHeaders {
|
||||
[header: string]: string | string[] | undefined;
|
||||
}
|
||||
|
||||
export interface RequestData {
|
||||
geo?: {
|
||||
city?: string;
|
||||
country?: string;
|
||||
region?: string;
|
||||
};
|
||||
headers: NodeHeaders;
|
||||
ip?: string;
|
||||
method: string;
|
||||
page?: {
|
||||
name?: string;
|
||||
params?: { [key: string]: string };
|
||||
};
|
||||
url: string;
|
||||
}
|
||||
|
||||
export interface FetchEventResult {
|
||||
response: Response;
|
||||
waitUntil: Promise<any>;
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
import type { NodeHeaders } from './types';
|
||||
import { Headers } from 'node-fetch';
|
||||
|
||||
export async function* streamToIterator<T>(
|
||||
readable: ReadableStream<T>
|
||||
): AsyncIterableIterator<T> {
|
||||
const reader = readable.getReader();
|
||||
while (true) {
|
||||
const { value, done } = await reader.read();
|
||||
if (done) break;
|
||||
if (value) {
|
||||
yield value;
|
||||
}
|
||||
}
|
||||
reader.releaseLock();
|
||||
}
|
||||
|
||||
export function notImplemented(name: string, method: string): any {
|
||||
throw new Error(
|
||||
`Failed to get the '${method}' property on '${name}': the property is not implemented`
|
||||
);
|
||||
}
|
||||
|
||||
export function fromNodeHeaders(object: NodeHeaders): Headers {
|
||||
const headers = new Headers();
|
||||
for (const [key, value] of Object.entries(object)) {
|
||||
const values = Array.isArray(value) ? value : [value];
|
||||
for (const v of values) {
|
||||
if (v !== undefined) {
|
||||
headers.append(key, v);
|
||||
}
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
export function toNodeHeaders(headers?: Headers): NodeHeaders {
|
||||
const result: NodeHeaders = {};
|
||||
if (headers) {
|
||||
for (const [key, value] of headers.entries()) {
|
||||
result[key] = value;
|
||||
if (key.toLowerCase() === 'set-cookie') {
|
||||
result[key] = splitCookiesString(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
/*
|
||||
Set-Cookie header field-values are sometimes comma joined in one string. This splits them without choking on commas
|
||||
that are within a single set-cookie field-value, such as in the Expires portion.
|
||||
This is uncommon, but explicitly allowed - see https://tools.ietf.org/html/rfc2616#section-4.2
|
||||
Node.js does this for every header *except* set-cookie - see https://github.com/nodejs/node/blob/d5e363b77ebaf1caf67cd7528224b651c86815c1/lib/_http_incoming.js#L128
|
||||
React Native's fetch does this for *every* header, including set-cookie.
|
||||
|
||||
Based on: https://github.com/google/j2objc/commit/16820fdbc8f76ca0c33472810ce0cb03d20efe25
|
||||
Credits to: https://github.com/tomball for original and https://github.com/chrusart for JavaScript implementation
|
||||
*/
|
||||
export function splitCookiesString(cookiesString: string) {
|
||||
const cookiesStrings = [];
|
||||
let pos = 0;
|
||||
let start;
|
||||
let ch;
|
||||
let lastComma;
|
||||
let nextStart;
|
||||
let cookiesSeparatorFound;
|
||||
|
||||
function skipWhitespace() {
|
||||
while (pos < cookiesString.length && /\s/.test(cookiesString.charAt(pos))) {
|
||||
pos += 1;
|
||||
}
|
||||
return pos < cookiesString.length;
|
||||
}
|
||||
|
||||
function notSpecialChar() {
|
||||
ch = cookiesString.charAt(pos);
|
||||
|
||||
return ch !== '=' && ch !== ';' && ch !== ',';
|
||||
}
|
||||
|
||||
while (pos < cookiesString.length) {
|
||||
start = pos;
|
||||
cookiesSeparatorFound = false;
|
||||
|
||||
while (skipWhitespace()) {
|
||||
ch = cookiesString.charAt(pos);
|
||||
if (ch === ',') {
|
||||
// ',' is a cookie separator if we have later first '=', not ';' or ','
|
||||
lastComma = pos;
|
||||
pos += 1;
|
||||
|
||||
skipWhitespace();
|
||||
nextStart = pos;
|
||||
|
||||
while (pos < cookiesString.length && notSpecialChar()) {
|
||||
pos += 1;
|
||||
}
|
||||
|
||||
// currently special character
|
||||
if (pos < cookiesString.length && cookiesString.charAt(pos) === '=') {
|
||||
// we found cookies separator
|
||||
cookiesSeparatorFound = true;
|
||||
// pos is inside the next cookie, so back up and return it.
|
||||
pos = nextStart;
|
||||
cookiesStrings.push(cookiesString.substring(start, lastComma));
|
||||
start = pos;
|
||||
} else {
|
||||
// in param ',' or param separator ';',
|
||||
// we continue from that comma
|
||||
pos = lastComma + 1;
|
||||
}
|
||||
} else {
|
||||
pos += 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (!cookiesSeparatorFound || pos >= cookiesString.length) {
|
||||
cookiesStrings.push(cookiesString.substring(start, cookiesString.length));
|
||||
}
|
||||
}
|
||||
|
||||
return cookiesStrings;
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`build() should build simple middleware 1`] = `
|
||||
Object {
|
||||
"pages": Object {
|
||||
"_middleware.js": Object {
|
||||
"env": Array [],
|
||||
"files": Array [
|
||||
"server/pages/_middleware.js",
|
||||
],
|
||||
"name": "pages/_middleware",
|
||||
"page": "/",
|
||||
"regexp": "^/.*$",
|
||||
"runtime": "web",
|
||||
"sortingIndex": 1,
|
||||
},
|
||||
},
|
||||
"version": 2,
|
||||
}
|
||||
`;
|
||||
99
packages/middleware/test/build.test.ts
vendored
99
packages/middleware/test/build.test.ts
vendored
@@ -1,99 +0,0 @@
|
||||
import { join } from 'path';
|
||||
import { promises as fsp } from 'fs';
|
||||
import { build } from '../src';
|
||||
import { Response } from 'node-fetch';
|
||||
|
||||
const setupFixture = async (fixture: string) => {
|
||||
const fixturePath = join(__dirname, `fixtures/${fixture}`);
|
||||
await build({
|
||||
workPath: fixturePath,
|
||||
});
|
||||
|
||||
const functionsManifest = JSON.parse(
|
||||
await fsp.readFile(
|
||||
join(fixturePath, '.output/functions-manifest.json'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
|
||||
const outputFile = join(fixturePath, '.output/server/pages/_middleware.js');
|
||||
expect(await fsp.stat(outputFile)).toBeTruthy();
|
||||
require(outputFile);
|
||||
//@ts-ignore
|
||||
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
|
||||
return {
|
||||
middleware,
|
||||
functionsManifest,
|
||||
};
|
||||
};
|
||||
|
||||
describe('build()', () => {
|
||||
beforeEach(() => {
|
||||
//@ts-ignore
|
||||
global.Response = Response;
|
||||
});
|
||||
afterEach(() => {
|
||||
//@ts-ignore
|
||||
delete global.Response;
|
||||
//@ts-ignore
|
||||
delete global._ENTRIES;
|
||||
});
|
||||
it('should build simple middleware', async () => {
|
||||
const { functionsManifest, middleware } = await setupFixture('simple');
|
||||
|
||||
expect(functionsManifest).toMatchSnapshot();
|
||||
expect(typeof middleware).toStrictEqual('function');
|
||||
const handledResponse = await middleware({
|
||||
request: {
|
||||
url: 'http://google.com',
|
||||
},
|
||||
});
|
||||
const unhandledResponse = await middleware({
|
||||
request: {
|
||||
url: 'literallyanythingelse',
|
||||
},
|
||||
});
|
||||
expect(String(handledResponse.response.body)).toEqual('Hi from the edge!');
|
||||
expect(
|
||||
(handledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual(null);
|
||||
expect(unhandledResponse.response.body).toEqual(null);
|
||||
expect(
|
||||
(unhandledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual('1');
|
||||
});
|
||||
|
||||
it('should build simple middleware with env vars', async () => {
|
||||
const expectedEnvVar = 'expected-env-var';
|
||||
const fixture = join(__dirname, 'fixtures/env');
|
||||
process.env.ENV_VAR_SHOULD_BE_DEFINED = expectedEnvVar;
|
||||
await build({
|
||||
workPath: fixture,
|
||||
});
|
||||
// env var should be inlined in the output
|
||||
delete process.env.ENV_VAR_SHOULD_BE_DEFINED;
|
||||
|
||||
const outputFile = join(fixture, '.output/server/pages/_middleware.js');
|
||||
expect(await fsp.stat(outputFile)).toBeTruthy();
|
||||
|
||||
require(outputFile);
|
||||
//@ts-ignore
|
||||
const middleware = global._ENTRIES['middleware_pages/_middleware'].default;
|
||||
expect(typeof middleware).toStrictEqual('function');
|
||||
const handledResponse = await middleware({
|
||||
request: {},
|
||||
});
|
||||
expect(String(handledResponse.response.body)).toEqual(expectedEnvVar);
|
||||
expect(
|
||||
(handledResponse.response as Response).headers.get('x-middleware-next')
|
||||
).toEqual(null);
|
||||
});
|
||||
|
||||
it('should create a middleware that runs in strict mode', async () => {
|
||||
const { middleware } = await setupFixture('use-strict');
|
||||
const response = await middleware({
|
||||
request: {},
|
||||
});
|
||||
expect(String(response.response.body)).toEqual('is strict mode? yes');
|
||||
});
|
||||
});
|
||||
@@ -1,3 +0,0 @@
|
||||
export default req => {
|
||||
return new Response(process.env.ENV_VAR_SHOULD_BE_DEFINED);
|
||||
};
|
||||
@@ -1,5 +0,0 @@
|
||||
export default req => {
|
||||
if (req.url === 'http://google.com') {
|
||||
return new Response('Hi from the edge!');
|
||||
}
|
||||
};
|
||||
@@ -1,6 +0,0 @@
|
||||
export default function (req) {
|
||||
const isStrict = (function () {
|
||||
return !this;
|
||||
})();
|
||||
return new Response('is strict mode? ' + (isStrict ? 'yes' : 'no'));
|
||||
}
|
||||
4
packages/middleware/test/tsconfig.json
vendored
4
packages/middleware/test/tsconfig.json
vendored
@@ -1,4 +0,0 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"include": ["*.test.ts"]
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"strict": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext", "dom", "dom.iterable"],
|
||||
"target": "es2018",
|
||||
"module": "commonjs",
|
||||
"outDir": "dist",
|
||||
"sourceMap": false,
|
||||
"declaration": true,
|
||||
"moduleResolution": "node",
|
||||
"typeRoots": ["./@types", "./node_modules/@types"]
|
||||
},
|
||||
"include": ["src/**/*"],
|
||||
"exclude": ["node_modules"]
|
||||
}
|
||||
2
packages/node-bridge/.gitignore
vendored
Normal file
2
packages/node-bridge/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/helpers.js
|
||||
/source-map-support.js
|
||||
62
packages/node-bridge/build.js
Normal file
62
packages/node-bridge/build.js
Normal file
@@ -0,0 +1,62 @@
|
||||
#!/usr/bin/env node
|
||||
const fs = require('fs-extra');
|
||||
const execa = require('execa');
|
||||
const { join } = require('path');
|
||||
|
||||
async function main() {
|
||||
// Build TypeScript files
|
||||
await execa('tsc', [], {
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
// Bundle `helpers.ts` with ncc
|
||||
await fs.remove(join(__dirname, 'helpers.js'));
|
||||
const helpersDir = join(__dirname, 'helpers');
|
||||
await execa(
|
||||
'ncc',
|
||||
[
|
||||
'build',
|
||||
join(__dirname, 'helpers.ts'),
|
||||
'-e',
|
||||
'@vercel/node-bridge',
|
||||
'-e',
|
||||
'@vercel/build-utils',
|
||||
'-e',
|
||||
'typescript',
|
||||
'-o',
|
||||
helpersDir,
|
||||
],
|
||||
{ stdio: 'inherit' }
|
||||
);
|
||||
await fs.rename(join(helpersDir, 'index.js'), join(__dirname, 'helpers.js'));
|
||||
await fs.remove(helpersDir);
|
||||
|
||||
// Bundle `source-map-support/register` with ncc for source maps
|
||||
const sourceMapSupportDir = join(__dirname, 'source-map-support');
|
||||
await execa(
|
||||
'ncc',
|
||||
[
|
||||
'build',
|
||||
join(__dirname, '../../node_modules/source-map-support/register'),
|
||||
'-e',
|
||||
'@vercel/node-bridge',
|
||||
'-e',
|
||||
'@vercel/build-utils',
|
||||
'-e',
|
||||
'typescript',
|
||||
'-o',
|
||||
sourceMapSupportDir,
|
||||
],
|
||||
{ stdio: 'inherit' }
|
||||
);
|
||||
await fs.rename(
|
||||
join(sourceMapSupportDir, 'index.js'),
|
||||
join(__dirname, 'source-map-support.js')
|
||||
);
|
||||
await fs.remove(sourceMapSupportDir);
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import {
|
||||
import type {
|
||||
VercelRequest,
|
||||
VercelResponse,
|
||||
VercelRequestCookies,
|
||||
@@ -6,7 +6,7 @@ import {
|
||||
VercelRequestBody,
|
||||
} from './types';
|
||||
import { Server } from 'http';
|
||||
import type { Bridge } from '@vercel/node-bridge/bridge';
|
||||
import type { Bridge } from './bridge';
|
||||
|
||||
function getBodyParser(req: VercelRequest, body: Buffer) {
|
||||
return function parseBody(): VercelRequestBody {
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node-bridge",
|
||||
"version": "2.1.1-canary.2",
|
||||
"version": "2.1.2-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"repository": {
|
||||
@@ -11,10 +11,12 @@
|
||||
"files": [
|
||||
"bridge.*",
|
||||
"launcher.*",
|
||||
"index.js"
|
||||
"index.js",
|
||||
"helpers.js",
|
||||
"source-map-support.js"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"build": "node build.js",
|
||||
"test-unit": "jest --env node --verbose --runInBand --bail"
|
||||
},
|
||||
"devDependencies": {
|
||||
|
||||
@@ -2,7 +2,7 @@ const fetch = require('node-fetch');
|
||||
const listen = require('test-listen');
|
||||
const qs = require('querystring');
|
||||
|
||||
const { createServerWithHelpers } = require('../dist/helpers');
|
||||
const { createServerWithHelpers } = require('../helpers');
|
||||
|
||||
const mockListener = jest.fn();
|
||||
const consumeEventMock = jest.fn();
|
||||
@@ -240,6 +240,7 @@ describe('req.body', () => {
|
||||
|
||||
test('should work when body is empty and content-type is `application/json`', async () => {
|
||||
mockListener.mockImplementation((req, res) => {
|
||||
console.log(req.body);
|
||||
res.end();
|
||||
});
|
||||
|
||||
@@ -1,5 +1,11 @@
|
||||
/// <reference types="node" />
|
||||
import { Server, IncomingHttpHeaders, OutgoingHttpHeaders } from 'http';
|
||||
import {
|
||||
Server,
|
||||
IncomingHttpHeaders,
|
||||
OutgoingHttpHeaders,
|
||||
ServerResponse,
|
||||
IncomingMessage,
|
||||
} from 'http';
|
||||
export interface VercelProxyEvent {
|
||||
Action: string;
|
||||
body: string;
|
||||
@@ -37,3 +43,20 @@ export type LauncherConfiguration = {
|
||||
awsLambdaHandler?: string;
|
||||
useRequire?: boolean;
|
||||
};
|
||||
|
||||
export type VercelRequestCookies = { [key: string]: string };
|
||||
export type VercelRequestQuery = { [key: string]: string | string[] };
|
||||
export type VercelRequestBody = any;
|
||||
|
||||
export type VercelRequest = IncomingMessage & {
|
||||
query: VercelRequestQuery;
|
||||
cookies: VercelRequestCookies;
|
||||
body: VercelRequestBody;
|
||||
};
|
||||
|
||||
export type VercelResponse = ServerResponse & {
|
||||
send: (body: any) => VercelResponse;
|
||||
json: (jsonBody: any) => VercelResponse;
|
||||
status: (statusCode: number) => VercelResponse;
|
||||
redirect: (statusOrUrl: string | number, url?: string) => VercelResponse;
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/node",
|
||||
"version": "1.12.2-canary.9",
|
||||
"version": "1.13.1-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
|
||||
@@ -32,10 +32,10 @@
|
||||
"@types/cookie": "0.3.3",
|
||||
"@types/etag": "1.8.0",
|
||||
"@types/test-listen": "1.1.0",
|
||||
"@vercel/build-utils": "2.13.1-canary.1",
|
||||
"@vercel/build-utils": "2.14.1-canary.0",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.14.0",
|
||||
"@vercel/node-bridge": "2.1.1-canary.2",
|
||||
"@vercel/nft": "0.17.5",
|
||||
"@vercel/node-bridge": "2.1.2-canary.0",
|
||||
"content-type": "1.0.4",
|
||||
"cookie": "0.4.0",
|
||||
"etag": "1.8.1",
|
||||
|
||||
@@ -135,7 +135,6 @@ async function compile(
|
||||
): Promise<{
|
||||
preparedFiles: Files;
|
||||
shouldAddSourcemapSupport: boolean;
|
||||
watch: string[];
|
||||
}> {
|
||||
const inputFiles = new Set<string>([entrypointPath]);
|
||||
const preparedFiles: Files = {};
|
||||
@@ -198,7 +197,7 @@ async function compile(
|
||||
ts: true,
|
||||
mixedModules: true,
|
||||
ignore: config.excludeFiles,
|
||||
readFile(fsPath: string): Buffer | string | null {
|
||||
async readFile(fsPath: string): Promise<Buffer | string | null> {
|
||||
const relPath = relative(baseDir, fsPath);
|
||||
const cached = sourceCache.get(relPath);
|
||||
if (cached) return cached.toString();
|
||||
@@ -256,11 +255,11 @@ async function compile(
|
||||
);
|
||||
if (
|
||||
!symlinkTarget.startsWith('..' + sep) &&
|
||||
fileList.indexOf(symlinkTarget) === -1
|
||||
!fileList.has(symlinkTarget)
|
||||
) {
|
||||
const stats = statSync(resolve(baseDir, symlinkTarget));
|
||||
if (stats.isFile()) {
|
||||
fileList.push(symlinkTarget);
|
||||
fileList.add(symlinkTarget);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -273,7 +272,7 @@ async function compile(
|
||||
}
|
||||
|
||||
// Compile ES Modules into CommonJS
|
||||
const esmPaths = esmFileList.filter(
|
||||
const esmPaths = [...esmFileList].filter(
|
||||
file =>
|
||||
!file.endsWith('.ts') &&
|
||||
!file.endsWith('.tsx') &&
|
||||
@@ -320,7 +319,6 @@ async function compile(
|
||||
return {
|
||||
preparedFiles,
|
||||
shouldAddSourcemapSupport,
|
||||
watch: fileList,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -377,7 +375,7 @@ export async function build({
|
||||
|
||||
debug('Tracing input files...');
|
||||
const traceTime = Date.now();
|
||||
const { preparedFiles, shouldAddSourcemapSupport, watch } = await compile(
|
||||
const { preparedFiles, shouldAddSourcemapSupport } = await compile(
|
||||
workPath,
|
||||
baseDir,
|
||||
entrypointPath,
|
||||
@@ -433,7 +431,7 @@ export async function build({
|
||||
runtime: nodeVersion.runtime,
|
||||
});
|
||||
|
||||
return { output: lambda, watch };
|
||||
return { output: lambda };
|
||||
}
|
||||
|
||||
export async function prepareCache({
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
{
|
||||
"private": false,
|
||||
"name": "vercel-plugin-go",
|
||||
"version": "1.0.0-canary.37",
|
||||
"main": "dist/index.js",
|
||||
"license": "MIT",
|
||||
"files": [
|
||||
"dist"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/vercel/vercel.git",
|
||||
"directory": "packages/vercel-plugin-go"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "tsc",
|
||||
"prepublishOnly": "tsc"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.13.1-canary.1",
|
||||
"@vercel/go": "1.2.4-canary.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/node": "*",
|
||||
"typescript": "4.3.4"
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
import { _experimental_convertRuntimeToPlugin } from '@vercel/build-utils';
|
||||
import * as go from '@vercel/go';
|
||||
|
||||
export const build = _experimental_convertRuntimeToPlugin(
|
||||
go.build,
|
||||
'vercel-plugin-go',
|
||||
'.go'
|
||||
);
|
||||
|
||||
export const startDevServer = go.startDevServer;
|
||||
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": false,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "dist",
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
}
|
||||
}
|
||||
6
packages/plugin-node/.gitignore
vendored
6
packages/plugin-node/.gitignore
vendored
@@ -1,6 +0,0 @@
|
||||
/dist
|
||||
/test/fixtures/**/.env
|
||||
/test/fixtures/**/.gitignore
|
||||
/test/fixtures/**/.output
|
||||
/test/fixtures/**/types.d.ts
|
||||
/test/fixtures/11-symlinks/symlink
|
||||
45
packages/plugin-node/@types/zeit__ncc/index.d.ts
vendored
45
packages/plugin-node/@types/zeit__ncc/index.d.ts
vendored
@@ -1,45 +0,0 @@
|
||||
declare function ncc(
|
||||
entrypoint: string,
|
||||
options?: ncc.NccOptions
|
||||
): ncc.NccResult;
|
||||
|
||||
declare namespace ncc {
|
||||
export interface NccOptions {
|
||||
watch?: any;
|
||||
sourceMap?: boolean;
|
||||
sourceMapRegister?: boolean;
|
||||
}
|
||||
|
||||
export interface Asset {
|
||||
source: Buffer;
|
||||
permissions: number;
|
||||
}
|
||||
|
||||
export interface Assets {
|
||||
[name: string]: Asset;
|
||||
}
|
||||
|
||||
export interface BuildResult {
|
||||
err: Error | null | undefined;
|
||||
code: string;
|
||||
map: string | undefined;
|
||||
assets: Assets | undefined;
|
||||
permissions: number | undefined;
|
||||
}
|
||||
|
||||
export type HandlerFn = (params: BuildResult) => void;
|
||||
export type HandlerCallback = (fn: HandlerFn) => void;
|
||||
export type RebuildFn = () => void;
|
||||
export type RebuildCallback = (fn: RebuildFn) => void;
|
||||
export type CloseCallback = () => void;
|
||||
|
||||
export interface NccResult {
|
||||
handler: HandlerCallback;
|
||||
rebuild: RebuildCallback;
|
||||
close: CloseCallback;
|
||||
}
|
||||
}
|
||||
|
||||
declare module '@vercel/ncc' {
|
||||
export = ncc;
|
||||
}
|
||||
1
packages/plugin-node/bench/.gitignore
vendored
1
packages/plugin-node/bench/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
lambda
|
||||
@@ -1,19 +0,0 @@
|
||||
const express = require('express');
|
||||
|
||||
const app = express();
|
||||
|
||||
module.exports = app;
|
||||
|
||||
app.use(express.json());
|
||||
|
||||
app.post('*', (req, res) => {
|
||||
if (req.body == null) {
|
||||
return res.status(400).send({ error: 'no JSON object in the request' });
|
||||
}
|
||||
|
||||
return res.status(200).send(JSON.stringify(req.body, null, 4));
|
||||
});
|
||||
|
||||
app.all('*', (req, res) => {
|
||||
res.status(405).send({ error: 'only POST requests are accepted' });
|
||||
});
|
||||
@@ -1,7 +0,0 @@
|
||||
module.exports = (req, res) => {
|
||||
if (req.body == null) {
|
||||
return res.status(400).send({ error: 'no JSON object in the request' });
|
||||
}
|
||||
|
||||
return res.status(200).send(JSON.stringify(req.body, null, 4));
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user