mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-12 12:57:47 +00:00
Compare commits
143 Commits
vercel-plu
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a400b9b29d | ||
|
|
b549c37149 | ||
|
|
30d5e64291 | ||
|
|
47c2c361d2 | ||
|
|
438576fc7c | ||
|
|
b30343ef7b | ||
|
|
2dc0dfa572 | ||
|
|
9ee54b3dd6 | ||
|
|
9d67e0bc06 | ||
|
|
466135cf84 | ||
|
|
eab2e229dc | ||
|
|
698b89a2ba | ||
|
|
bae2a2e4df | ||
|
|
57916bb712 | ||
|
|
12bbd4e8eb | ||
|
|
4e4c7023dc | ||
|
|
41805790e7 | ||
|
|
6ad77ae8e1 | ||
|
|
e8daf36cd7 | ||
|
|
c319a2c499 | ||
|
|
a410baa797 | ||
|
|
625568e659 | ||
|
|
41868c1fe0 | ||
|
|
1b644f1218 | ||
|
|
29ea0fb06b | ||
|
|
b61f049f11 | ||
|
|
16e28f326b | ||
|
|
276397c940 | ||
|
|
85d7311199 | ||
|
|
b8114b8b39 | ||
|
|
d63e8d3187 | ||
|
|
11d3dd04aa | ||
|
|
46bf95ee36 | ||
|
|
92252468c2 | ||
|
|
71b83d5587 | ||
|
|
d9e5fdc5e4 | ||
|
|
58f479c603 | ||
|
|
d62461d952 | ||
|
|
e7f524defb | ||
|
|
bdefd0d05d | ||
|
|
ca522fc9f1 | ||
|
|
4a8504fc45 | ||
|
|
576217b344 | ||
|
|
f03129ac7a | ||
|
|
3eaad1fd52 | ||
|
|
4e471491d8 | ||
|
|
99395fd9fe | ||
|
|
4980fe6513 | ||
|
|
24676ae020 | ||
|
|
72ada9abd8 | ||
|
|
da893e7c57 | ||
|
|
e40eecafc9 | ||
|
|
d9e5342eba | ||
|
|
b0ab13778d | ||
|
|
c76dfbe8c9 | ||
|
|
ed6f3cc27e | ||
|
|
7b0186bffe | ||
|
|
45e22b6b60 | ||
|
|
b2bfae6e2e | ||
|
|
b6ed718f52 | ||
|
|
a84c4a37e6 | ||
|
|
d2ba06c6cb | ||
|
|
06e7753df0 | ||
|
|
235a9c3300 | ||
|
|
1b70402325 | ||
|
|
c76781fac9 | ||
|
|
451e0b0cfb | ||
|
|
cf477d45b2 | ||
|
|
cdd2d69e07 | ||
|
|
43f1f8b257 | ||
|
|
34055e3599 | ||
|
|
26abb0a85a | ||
|
|
a1e337e0dd | ||
|
|
b72ead480f | ||
|
|
77cf68105f | ||
|
|
d800f55dfa | ||
|
|
9dde99f19e | ||
|
|
fae7a083fc | ||
|
|
cbd651d6ee | ||
|
|
6077a706d1 | ||
|
|
cedc82dd9e | ||
|
|
b420006401 | ||
|
|
8ba604e8fc | ||
|
|
fadeee4568 | ||
|
|
2b15ba7f46 | ||
|
|
4cdfd0e58c | ||
|
|
b3ccb5f3ef | ||
|
|
584acc43b7 | ||
|
|
f64be93b94 | ||
|
|
9abd31769e | ||
|
|
09e3b35e74 | ||
|
|
8aa9a0ea05 | ||
|
|
b2d0ed74c6 | ||
|
|
aef936af0f | ||
|
|
501be936c0 | ||
|
|
7eba282af5 | ||
|
|
cf3e4bd726 | ||
|
|
ee5361b00e | ||
|
|
bd929dd5c5 | ||
|
|
4ee064bb61 | ||
|
|
312a2090a6 | ||
|
|
a82f117217 | ||
|
|
e908378486 | ||
|
|
8cda5263eb | ||
|
|
a24fd64bce | ||
|
|
0c515a46d5 | ||
|
|
f19690dc32 | ||
|
|
6b2a1c3866 | ||
|
|
1e54d606d7 | ||
|
|
c4ab0ebe9c | ||
|
|
321f1232a1 | ||
|
|
8a8203e149 | ||
|
|
33527165e7 | ||
|
|
db10383bc8 | ||
|
|
56960e506e | ||
|
|
a17f3a96ec | ||
|
|
9ff86a896c | ||
|
|
6ccb4354f9 | ||
|
|
82ba932447 | ||
|
|
b995618afb | ||
|
|
c897b24417 | ||
|
|
e64b2e1993 | ||
|
|
9358a5469e | ||
|
|
2a24210b7d | ||
|
|
2644a59984 | ||
|
|
4eeb8c298c | ||
|
|
0351f02dff | ||
|
|
0d7fa2f912 | ||
|
|
3b646880e7 | ||
|
|
350a0e5f36 | ||
|
|
5c21d400bd | ||
|
|
04029013a6 | ||
|
|
c65e7fa883 | ||
|
|
27b68be93f | ||
|
|
99fa729966 | ||
|
|
2bb3da80e0 | ||
|
|
b852f34a27 | ||
|
|
ce8e6e3806 | ||
|
|
983946650e | ||
|
|
59e4572e76 | ||
|
|
5c297122cb | ||
|
|
28f3bf9ef6 | ||
|
|
a936e92b8b |
@@ -1,7 +1,12 @@
|
|||||||
node_modules
|
node_modules
|
||||||
dist
|
dist
|
||||||
examples
|
examples
|
||||||
|
|
||||||
|
packages/node/src/bridge.ts
|
||||||
|
|
||||||
packages/*/test/fixtures
|
packages/*/test/fixtures
|
||||||
|
|
||||||
|
# cli
|
||||||
packages/cli/@types
|
packages/cli/@types
|
||||||
packages/cli/download
|
packages/cli/download
|
||||||
packages/cli/dist
|
packages/cli/dist
|
||||||
@@ -9,9 +14,24 @@ packages/cli/test/dev/fixtures
|
|||||||
packages/cli/bin
|
packages/cli/bin
|
||||||
packages/cli/link
|
packages/cli/link
|
||||||
packages/cli/src/util/dev/templates/*.ts
|
packages/cli/src/util/dev/templates/*.ts
|
||||||
|
|
||||||
|
# client
|
||||||
packages/client/tests/fixtures
|
packages/client/tests/fixtures
|
||||||
packages/client/lib
|
packages/client/lib
|
||||||
packages/node/src/bridge.ts
|
|
||||||
|
# node-bridge
|
||||||
packages/node-bridge/bridge.js
|
packages/node-bridge/bridge.js
|
||||||
packages/node-bridge/launcher.js
|
packages/node-bridge/launcher.js
|
||||||
|
packages/node-bridge/helpers.js
|
||||||
|
packages/node-bridge/source-map-support.js
|
||||||
|
|
||||||
|
# middleware
|
||||||
packages/middleware/src/entries.js
|
packages/middleware/src/entries.js
|
||||||
|
|
||||||
|
# static-build
|
||||||
|
packages/static-build/test/fixtures
|
||||||
|
packages/static-build/test/build-fixtures
|
||||||
|
packages/static-build/test/cache-fixtures
|
||||||
|
|
||||||
|
# redwood
|
||||||
|
packages/redwood/test/fixtures
|
||||||
|
|||||||
6
.github/CONTRIBUTING.md
vendored
6
.github/CONTRIBUTING.md
vendored
@@ -16,17 +16,17 @@ yarn install
|
|||||||
yarn bootstrap
|
yarn bootstrap
|
||||||
yarn build
|
yarn build
|
||||||
yarn lint
|
yarn lint
|
||||||
yarn test
|
yarn test-unit
|
||||||
```
|
```
|
||||||
|
|
||||||
Make sure all the tests pass before making changes.
|
Make sure all the tests pass before making changes.
|
||||||
|
|
||||||
## Verifying your change
|
## Verifying your change
|
||||||
|
|
||||||
Once you are done with your changes (we even suggest doing it along the way), make sure all the test still run by running:
|
Once you are done with your changes (we even suggest doing it along the way), make sure all the test still pass by running:
|
||||||
|
|
||||||
```
|
```
|
||||||
yarn build && yarn test
|
yarn test-unit
|
||||||
```
|
```
|
||||||
|
|
||||||
from the root of the project.
|
from the root of the project.
|
||||||
|
|||||||
2
.github/workflows/test-integration-cli.yml
vendored
2
.github/workflows/test-integration-cli.yml
vendored
@@ -11,7 +11,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: CLI
|
name: CLI
|
||||||
timeout-minutes: 30
|
timeout-minutes: 40
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -27,5 +27,4 @@ test/lib/deployment/failed-page.txt
|
|||||||
/public
|
/public
|
||||||
__pycache__
|
__pycache__
|
||||||
.vercel
|
.vercel
|
||||||
.output
|
|
||||||
.turbo
|
.turbo
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
!.yarnrc
|
!.yarnrc
|
||||||
!yarn.lock
|
!yarn.lock
|
||||||
!package.json
|
!package.json
|
||||||
|
!turbo.json
|
||||||
|
|
||||||
# api
|
# api
|
||||||
!api/
|
!api/
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ A Runtime is an npm module that implements the following interface:
|
|||||||
interface Runtime {
|
interface Runtime {
|
||||||
version: number;
|
version: number;
|
||||||
build: (options: BuildOptions) => Promise<BuildResult>;
|
build: (options: BuildOptions) => Promise<BuildResult>;
|
||||||
analyze?: (options: AnalyzeOptions) => Promise<string>;
|
|
||||||
prepareCache?: (options: PrepareCacheOptions) => Promise<CacheOutputs>;
|
prepareCache?: (options: PrepareCacheOptions) => Promise<CacheOutputs>;
|
||||||
shouldServe?: (options: ShouldServeOptions) => Promise<boolean>;
|
shouldServe?: (options: ShouldServeOptions) => Promise<boolean>;
|
||||||
startDevServer?: (
|
startDevServer?: (
|
||||||
@@ -72,26 +71,6 @@ export async function build(options: BuildOptions) {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### `analyze()`
|
|
||||||
|
|
||||||
An **optional** exported function that returns a unique fingerprint used for the
|
|
||||||
purpose of [build
|
|
||||||
de-duplication](https://vercel.com/docs/v2/platform/deployments#deduplication).
|
|
||||||
If the `analyze()` function is not supplied, then a random fingerprint is
|
|
||||||
assigned to each build.
|
|
||||||
|
|
||||||
**Example:**
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { AnalyzeOptions } from '@vercel/build-utils';
|
|
||||||
|
|
||||||
export async function analyze(options: AnalyzeOptions) {
|
|
||||||
// Do calculations to generate a fingerprint based off the source code here…
|
|
||||||
|
|
||||||
return 'fingerprint goes here';
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### `prepareCache()`
|
### `prepareCache()`
|
||||||
|
|
||||||
An **optional** exported function that is executed after [`build()`](#build) is
|
An **optional** exported function that is executed after [`build()`](#build) is
|
||||||
@@ -328,15 +307,15 @@ This is a [class](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Refere
|
|||||||
|
|
||||||
This is an abstract enumeration type that is implemented by one of the following possible `String` values:
|
This is an abstract enumeration type that is implemented by one of the following possible `String` values:
|
||||||
|
|
||||||
|
- `nodejs14.x`
|
||||||
- `nodejs12.x`
|
- `nodejs12.x`
|
||||||
- `nodejs10.x`
|
|
||||||
- `go1.x`
|
- `go1.x`
|
||||||
- `java11`
|
- `java11`
|
||||||
- `python3.9`
|
- `python3.9`
|
||||||
- `python3.6`
|
- `dotnet6`
|
||||||
- `dotnetcore2.1`
|
- `dotnetcore3.1`
|
||||||
- `ruby2.5`
|
- `ruby2.7`
|
||||||
- `provided`
|
- `provided.al2`
|
||||||
|
|
||||||
## `@vercel/build-utils` Helper Functions
|
## `@vercel/build-utils` Helper Functions
|
||||||
|
|
||||||
|
|||||||
46
api/_lib/script/build.ts
Normal file
46
api/_lib/script/build.ts
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
import fs from 'fs/promises';
|
||||||
|
import { join } from 'path';
|
||||||
|
import { getExampleList } from '../examples/example-list';
|
||||||
|
import { mapOldToNew } from '../examples/map-old-to-new';
|
||||||
|
|
||||||
|
const repoRoot = join(__dirname, '..', '..', '..');
|
||||||
|
const pubDir = join(repoRoot, 'public');
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log(`Building static frontend ${repoRoot}...`);
|
||||||
|
|
||||||
|
await fs.rm(pubDir, { recursive: true, force: true });
|
||||||
|
await fs.mkdir(pubDir);
|
||||||
|
|
||||||
|
const examples = await getExampleList();
|
||||||
|
const pathListAll = join(pubDir, 'list-all.json');
|
||||||
|
await fs.writeFile(pathListAll, JSON.stringify(examples));
|
||||||
|
|
||||||
|
const exampleDirs = await fs.readdir(join(repoRoot, 'examples'), {
|
||||||
|
withFileTypes: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const existingExamples = exampleDirs
|
||||||
|
.filter(dir => dir.isDirectory())
|
||||||
|
.map(dir => ({
|
||||||
|
name: dir.name,
|
||||||
|
visible: true,
|
||||||
|
suggestions: [],
|
||||||
|
}));
|
||||||
|
|
||||||
|
const oldExamples = Object.keys(mapOldToNew).map(key => ({
|
||||||
|
name: key,
|
||||||
|
visible: false,
|
||||||
|
suggestions: mapOldToNew[key],
|
||||||
|
}));
|
||||||
|
|
||||||
|
const pathList = join(pubDir, 'list.json');
|
||||||
|
await fs.writeFile(
|
||||||
|
pathList,
|
||||||
|
JSON.stringify([...existingExamples, ...oldExamples])
|
||||||
|
);
|
||||||
|
|
||||||
|
console.log('Completed building static frontend.');
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(console.error);
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
import { VercelRequest, VercelResponse } from '@vercel/node';
|
|
||||||
import { getExampleList } from '../_lib/examples/example-list';
|
|
||||||
import { withApiHandler } from '../_lib/util/with-api-handler';
|
|
||||||
|
|
||||||
export default withApiHandler(async function (
|
|
||||||
req: VercelRequest,
|
|
||||||
res: VercelResponse
|
|
||||||
) {
|
|
||||||
res.status(200).json(await getExampleList());
|
|
||||||
});
|
|
||||||
@@ -1,27 +0,0 @@
|
|||||||
import { extract } from '../_lib/examples/extract';
|
|
||||||
import { summary } from '../_lib/examples/summary';
|
|
||||||
import { VercelRequest, VercelResponse } from '@vercel/node';
|
|
||||||
import { mapOldToNew } from '../_lib/examples/map-old-to-new';
|
|
||||||
import { withApiHandler } from '../_lib/util/with-api-handler';
|
|
||||||
|
|
||||||
export default withApiHandler(async function (
|
|
||||||
req: VercelRequest,
|
|
||||||
res: VercelResponse
|
|
||||||
) {
|
|
||||||
await extract('https://github.com/vercel/vercel/archive/main.zip', '/tmp');
|
|
||||||
const exampleList = summary('/tmp/vercel-main/examples');
|
|
||||||
|
|
||||||
const existingExamples = Array.from(exampleList).map(key => ({
|
|
||||||
name: key,
|
|
||||||
visible: true,
|
|
||||||
suggestions: [],
|
|
||||||
}));
|
|
||||||
|
|
||||||
const oldExamples = Object.keys(mapOldToNew).map(key => ({
|
|
||||||
name: key,
|
|
||||||
visible: false,
|
|
||||||
suggestions: mapOldToNew[key],
|
|
||||||
}));
|
|
||||||
|
|
||||||
res.status(200).json([...existingExamples, ...oldExamples]);
|
|
||||||
});
|
|
||||||
@@ -16,4 +16,4 @@ If you would not like to verify your domain, you can remove it from your account
|
|||||||
|
|
||||||
#### Resources
|
#### Resources
|
||||||
|
|
||||||
- [Vercel Custom Domains Documentation](https://vercel.com/docs/v2/custom-domains)
|
- [Vercel Custom Domains Documentation](https://vercel.com/docs/concepts/projects/custom-domains)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
#### Why This Error Occurred
|
#### Why This Error Occurred
|
||||||
|
|
||||||
You ran `vercel dev` inside a project that contains a `vercel.json` file with `env` or `build.env` properties that use [Vercel Secrets](https://vercel.com/docs/v2/build-step#environment-variables).
|
You ran `vercel dev` inside a project that contains a `vercel.json` file with `env` or `build.env` properties that use [Vercel Secrets](https://vercel.com/docs/concepts/projects/environment-variables).
|
||||||
|
|
||||||
In order to use environment variables in your project locally that have values defined using the Vercel Secrets format (e.g. `@my-secret-value`), you will need to provide the value as an environment variable using a `.env`.
|
In order to use environment variables in your project locally that have values defined using the Vercel Secrets format (e.g. `@my-secret-value`), you will need to provide the value as an environment variable using a `.env`.
|
||||||
|
|
||||||
@@ -24,4 +24,4 @@ TEST=value
|
|||||||
|
|
||||||
In the above example, `TEST` represents the name of the environment variable and `value` its value.
|
In the above example, `TEST` represents the name of the environment variable and `value` its value.
|
||||||
|
|
||||||
For more information on Environment Variables in development, [see the documentation](https://vercel.com/docs/v2/build-step#environment-variables).
|
For more information on Environment Variables in development, [see the documentation](https://vercel.com/docs/concepts/projects/environment-variables).
|
||||||
|
|||||||
1
examples/amp/.gitignore
vendored
1
examples/amp/.gitignore
vendored
@@ -1 +0,0 @@
|
|||||||
.env
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
# AMP Example
|
|
||||||
|
|
||||||
This directory is a brief example of an [AMP](https://amp.dev/) site that can be deployed to Vercel with zero configuration.
|
|
||||||
|
|
||||||
## Deploy Your Own
|
|
||||||
|
|
||||||
Deploy your own AMP project with Vercel.
|
|
||||||
|
|
||||||
[](https://vercel.com/new/clone?repository-url=https://github.com/vercel/vercel/tree/main/examples/amp)
|
|
||||||
|
|
||||||
_Live Example: https://amp-template.vercel.app_
|
|
||||||
|
|
||||||
### How We Created This Example
|
|
||||||
|
|
||||||
To get started deploying AMP with Vercel, you can use the [Vercel CLI](https://vercel.com/download) to initialize the project:
|
|
||||||
|
|
||||||
```shell
|
|
||||||
$ vercel init amp
|
|
||||||
```
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 2.0 KiB |
@@ -1,72 +0,0 @@
|
|||||||
<!DOCTYPE html>
|
|
||||||
<html ⚡>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8" />
|
|
||||||
<meta name="viewport" content="width=device-width,minimum-scale=1" />
|
|
||||||
<link rel="shortcut icon" href="favicon.png">
|
|
||||||
<style amp-boilerplate>body{-webkit-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-moz-animation:-amp-start 8s steps(1,end) 0s 1 normal both;-ms-animation:-amp-start 8s steps(1,end) 0s 1 normal both;animation:-amp-start 8s steps(1,end) 0s 1 normal both}@-webkit-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-moz-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-ms-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@-o-keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}@keyframes -amp-start{from{visibility:hidden}to{visibility:visible}}</style><noscript><style amp-boilerplate>body{-webkit-animation:none;-moz-animation:none;-ms-animation:none;animation:none}</style></noscript>
|
|
||||||
<link rel="canonical" href="index.html" />
|
|
||||||
<title>AMP Website</title>
|
|
||||||
<script async src="https://cdn.ampproject.org/v0.js"></script>
|
|
||||||
<style amp-custom>
|
|
||||||
body > * {
|
|
||||||
margin: 3rem 1rem;
|
|
||||||
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
|
|
||||||
color: #525252;
|
|
||||||
}
|
|
||||||
|
|
||||||
h3 {
|
|
||||||
font-size: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
h4 {
|
|
||||||
margin-top: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
p {
|
|
||||||
font-size: 1.2rem;
|
|
||||||
line-height: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.links {
|
|
||||||
display: flex;
|
|
||||||
justify-content: center;
|
|
||||||
margin-bottom: 3rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.links a {
|
|
||||||
margin: 0 10px;
|
|
||||||
font-size: 1rem;
|
|
||||||
color: #005af0;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<center>
|
|
||||||
<amp-img width=150 height=150 layout="fixed" class="logo" src="logo.png"></amp-img>
|
|
||||||
<h3>Welcome to your AMP page</h3>
|
|
||||||
<p>AMP is a web component framework to <br> easily create user-first websites, stories, ads and emails.</p>
|
|
||||||
|
|
||||||
<h4>Links</h4>
|
|
||||||
<div class="links">
|
|
||||||
<a href="https://amp.dev/">Homepage</a>
|
|
||||||
<a href="https://amp.dev/documentation/guides-and-tutorials/?format=websites">Tutorials</a>
|
|
||||||
<a href="https://amp.dev/documentation/examples/">Examples</a>
|
|
||||||
<a href="https://blog.amp.dev">Blog</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h4>Ready to get started?</h4>
|
|
||||||
<div class="links">
|
|
||||||
<a href="https://amp.dev/documentation/guides-and-tutorials/start/create/?format=websites">Create your first AMP page</a>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<h4>Get involved</h4>
|
|
||||||
<div class="links">
|
|
||||||
<a href="https://twitter.com/amphtml">Twitter</a>
|
|
||||||
<a href="https://amphtml.slack.com">Slack</a>
|
|
||||||
<a href="https://amp.dev/events/amp-conf-2019">AMP Conf</a>
|
|
||||||
<a href="https://amp.dev/events/amp-roadshow">AMP Roadshow</a>
|
|
||||||
</div>
|
|
||||||
</center>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
Binary file not shown.
|
Before Width: | Height: | Size: 43 KiB |
1
examples/angular/.gitignore
vendored
1
examples/angular/.gitignore
vendored
@@ -41,4 +41,3 @@ testem.log
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
.vercel
|
.vercel
|
||||||
.output
|
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ Deploy your own Angular project with Vercel.
|
|||||||
|
|
||||||
[](https://vercel.com/new/clone?repository-url=https://github.com/vercel/vercel/tree/main/examples/angular&template=angular)
|
[](https://vercel.com/new/clone?repository-url=https://github.com/vercel/vercel/tree/main/examples/angular&template=angular)
|
||||||
|
|
||||||
_Live Example: https://angular-now-examples.vercel.app_
|
_Live Example: https://angular-template.vercel.app_
|
||||||
|
|
||||||
## Development server
|
## Development server
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ This directory is a brief example of a [Dojo](https://dojo.io) site that can be
|
|||||||
|
|
||||||
Deploy your own Dojo project with Vercel.
|
Deploy your own Dojo project with Vercel.
|
||||||
|
|
||||||
[](https://vercel.com/new/clone?repository-url=https://github.com/vercel/vercel/tree/main/dojo&template=dojo)
|
[](https://vercel.com/new/clone?repository-url=https://github.com/vercel/vercel/tree/main/examples/dojo&template=dojo)
|
||||||
|
|
||||||
### How We Created This Example
|
### How We Created This Example
|
||||||
|
|
||||||
|
|||||||
20
examples/eleventy/LICENSE.md
Normal file
20
examples/eleventy/LICENSE.md
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
MIT License
|
||||||
|
Copyright (c) 2022 Zach Leatherman @zachleat
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
@@ -1,5 +1,19 @@
|
|||||||
{
|
{
|
||||||
"private": true,
|
"private": true,
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git://github.com/11ty/eleventy-base-blog.git"
|
||||||
|
},
|
||||||
|
"author": {
|
||||||
|
"name": "Zach Leatherman",
|
||||||
|
"email": "zachleatherman@gmail.com",
|
||||||
|
"url": "https://zachleat.com/"
|
||||||
|
},
|
||||||
|
"license": "MIT",
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://github.com/11ty/eleventy-base-blog/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://github.com/11ty/eleventy-base-blog#readme",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "eleventy",
|
"build": "eleventy",
|
||||||
"watch": "eleventy --watch",
|
"watch": "eleventy --watch",
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ cache:
|
|||||||
|
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
# See https://git.io/vdao3 for details.
|
# See https://github.com/ember-cli/ember-cli/blob/master/docs/build-concurrency.md
|
||||||
- JOBS=1
|
- JOBS=1
|
||||||
|
|
||||||
script:
|
script:
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ async function main() {
|
|||||||
// if it's an empty string, just ignore it
|
// if it's an empty string, just ignore it
|
||||||
if (!formatted) return false;
|
if (!formatted) return false;
|
||||||
|
|
||||||
let type = url.substr(-3) == 'css' ? 'style' : 'script';
|
let type = url.slice(-3) == 'css' ? 'style' : 'script';
|
||||||
results += `</${formatted}>;rel=preload;as=${type},`;
|
results += `</${formatted}>;rel=preload;as=${type},`;
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|||||||
1
examples/nextjs/.gitignore
vendored
1
examples/nextjs/.gitignore
vendored
@@ -23,6 +23,7 @@
|
|||||||
npm-debug.log*
|
npm-debug.log*
|
||||||
yarn-debug.log*
|
yarn-debug.log*
|
||||||
yarn-error.log*
|
yarn-error.log*
|
||||||
|
.pnpm-debug.log*
|
||||||
|
|
||||||
# local env files
|
# local env files
|
||||||
.env.local
|
.env.local
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
module.exports = {
|
/** @type {import('next').NextConfig} */
|
||||||
|
const nextConfig = {
|
||||||
reactStrictMode: true,
|
reactStrictMode: true,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
module.exports = nextConfig
|
||||||
|
|||||||
3145
examples/nextjs/package-lock.json
generated
3145
examples/nextjs/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -7,12 +7,12 @@
|
|||||||
"lint": "next lint"
|
"lint": "next lint"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"next": "^12.0.8",
|
"next": "12.1.4",
|
||||||
"react": "17.0.2",
|
"react": "18.0.0",
|
||||||
"react-dom": "17.0.2"
|
"react-dom": "18.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"eslint": "8.7.0",
|
"eslint": "8.12.0",
|
||||||
"eslint-config-next": "^12.0.8"
|
"eslint-config-next": "12.1.4"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
1
examples/remix/.gitignore
vendored
1
examples/remix/.gitignore
vendored
@@ -2,7 +2,6 @@ node_modules
|
|||||||
|
|
||||||
.cache
|
.cache
|
||||||
.vercel
|
.vercel
|
||||||
.output
|
|
||||||
|
|
||||||
public/build
|
public/build
|
||||||
api/_build
|
api/_build
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ export default function Index() {
|
|||||||
<p>
|
<p>
|
||||||
Wait a sec...<em>its children</em>? To understand what we mean by
|
Wait a sec...<em>its children</em>? To understand what we mean by
|
||||||
this,{" "}
|
this,{" "}
|
||||||
<a href="https://remix.run/tutorial/4-nested-routes-params">
|
<a href="https://remix.run/docs/en/v1/guides/routing">
|
||||||
read all about nested routes in the docs
|
read all about nested routes in the docs
|
||||||
</a>
|
</a>
|
||||||
.
|
.
|
||||||
|
|||||||
3
examples/sanity/.env.template
Normal file
3
examples/sanity/.env.template
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
# Run `vercel env pull` to generate a .env file from your Vercel project
|
||||||
|
SANITY_STUDIO_API_PROJECT_ID=
|
||||||
|
SANITY_STUDIO_API_DATASET=
|
||||||
12
examples/sanity/.npmignore
Normal file
12
examples/sanity/.npmignore
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
# Logs
|
||||||
|
/logs
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
/coverage
|
||||||
|
|
||||||
|
# Dependency directories
|
||||||
|
node_modules
|
||||||
|
|
||||||
|
# Compiled sanity studio
|
||||||
|
/dist
|
||||||
29
examples/sanity/README.md
Normal file
29
examples/sanity/README.md
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
# Sanity Blogging Content Studio
|
||||||
|
|
||||||
|
Congratulations, you have now installed Sanity Studio, an open source real-time content editing environment connected to the Sanity backend.
|
||||||
|
|
||||||
|
Now you can do the following things:
|
||||||
|
|
||||||
|
- [Read “getting started” in the docs](https://www.sanity.io/docs/introduction/getting-started?utm_source=readme)
|
||||||
|
- Check out the example frontend: [React/Next.js](https://github.com/sanity-io/tutorial-sanity-blog-react-next)
|
||||||
|
- [Read the blog post about this template](https://www.sanity.io/blog/build-your-own-blog-with-sanity-and-next-js?utm_source=readme)
|
||||||
|
- [Join the community Slack](https://slack.sanity.io/?utm_source=readme)
|
||||||
|
- [Extend and build plugins](https://www.sanity.io/docs/content-studio/extending?utm_source=readme)
|
||||||
|
|
||||||
|
## Develop locally
|
||||||
|
|
||||||
|
Install dependencies:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
npx @sanity/cli install
|
||||||
|
```
|
||||||
|
|
||||||
|
Pull down environment variables from your Vercel project (requires the [Vercel CLI](https://vercel.com/cli)):
|
||||||
|
|
||||||
|
```sh
|
||||||
|
vercel env pull
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
You can also run `npx @sanity/init` in this repo and agree to reconfigure it. You'll then be able to select from existing projects. The CLI will update `sanity.json` with the project ID and dataset name.
|
||||||
7
examples/sanity/config/.checksums
Normal file
7
examples/sanity/config/.checksums
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"#": "Used by Sanity to keep track of configuration file checksums, do not delete or modify!",
|
||||||
|
"@sanity/default-layout": "bb034f391ba508a6ca8cd971967cbedeb131c4d19b17b28a0895f32db5d568ea",
|
||||||
|
"@sanity/default-login": "6fb6d3800aa71346e1b84d95bbcaa287879456f2922372bb0294e30b968cd37f",
|
||||||
|
"@sanity/form-builder": "b38478227ba5e22c91981da4b53436df22e48ff25238a55a973ed620be5068aa",
|
||||||
|
"@sanity/data-aspects": "d199e2c199b3e26cd28b68dc84d7fc01c9186bf5089580f2e2446994d36b3cb6"
|
||||||
|
}
|
||||||
3
examples/sanity/config/@sanity/data-aspects.json
Normal file
3
examples/sanity/config/@sanity/data-aspects.json
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"listOptions": {}
|
||||||
|
}
|
||||||
6
examples/sanity/config/@sanity/default-layout.json
Normal file
6
examples/sanity/config/@sanity/default-layout.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"toolSwitcher": {
|
||||||
|
"order": [],
|
||||||
|
"hidden": []
|
||||||
|
}
|
||||||
|
}
|
||||||
7
examples/sanity/config/@sanity/default-login.json
Normal file
7
examples/sanity/config/@sanity/default-login.json
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
{
|
||||||
|
"providers": {
|
||||||
|
"mode": "append",
|
||||||
|
"redirectOnSingle": false,
|
||||||
|
"entries": []
|
||||||
|
}
|
||||||
|
}
|
||||||
5
examples/sanity/config/@sanity/form-builder.json
Normal file
5
examples/sanity/config/@sanity/form-builder.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"images": {
|
||||||
|
"directUploads": true
|
||||||
|
}
|
||||||
|
}
|
||||||
30
examples/sanity/package.json
Normal file
30
examples/sanity/package.json
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"name": "verceltemplateblogstudio",
|
||||||
|
"private": true,
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "This is the public list of examples for **Vercel**",
|
||||||
|
"main": "package.json",
|
||||||
|
"author": "Knut Melvær <knut@sanity.io>",
|
||||||
|
"license": "UNLICENSED",
|
||||||
|
"scripts": {
|
||||||
|
"start": "sanity start",
|
||||||
|
"build": "sanity build"
|
||||||
|
},
|
||||||
|
"keywords": [
|
||||||
|
"sanity"
|
||||||
|
],
|
||||||
|
"dependencies": {
|
||||||
|
"@sanity/core": "^2.26",
|
||||||
|
"@sanity/default-layout": "^2.26",
|
||||||
|
"@sanity/default-login": "^2.26",
|
||||||
|
"@sanity/desk-tool": "^2.26",
|
||||||
|
"@sanity/vision": "^2.26",
|
||||||
|
"prop-types": "^15.7",
|
||||||
|
"react": "^17.0",
|
||||||
|
"react-dom": "^17.0",
|
||||||
|
"styled-components": "^5.2"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@sanity/cli": "^2.26"
|
||||||
|
}
|
||||||
|
}
|
||||||
1
examples/sanity/plugins/.gitkeep
Normal file
1
examples/sanity/plugins/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
User-specific packages can be placed here
|
||||||
29
examples/sanity/sanity.json
Normal file
29
examples/sanity/sanity.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"root": true,
|
||||||
|
"project": {
|
||||||
|
"name": "vercel-template-blog-studio"
|
||||||
|
},
|
||||||
|
"api": {
|
||||||
|
"projectId": "YOUR_PROJECT_ID",
|
||||||
|
"dataset": "YOUR_DATASET_NAME"
|
||||||
|
},
|
||||||
|
"plugins": [
|
||||||
|
"@sanity/base",
|
||||||
|
"@sanity/default-layout",
|
||||||
|
"@sanity/default-login",
|
||||||
|
"@sanity/desk-tool"
|
||||||
|
],
|
||||||
|
"env": {
|
||||||
|
"development": {
|
||||||
|
"plugins": [
|
||||||
|
"@sanity/vision"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"parts": [
|
||||||
|
{
|
||||||
|
"name": "part:@sanity/base/schema",
|
||||||
|
"path": "./schemas/schema"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
48
examples/sanity/schemas/author.js
Normal file
48
examples/sanity/schemas/author.js
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
export default {
|
||||||
|
name: 'author',
|
||||||
|
title: 'Author',
|
||||||
|
type: 'document',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'name',
|
||||||
|
title: 'Name',
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'slug',
|
||||||
|
title: 'Slug',
|
||||||
|
type: 'slug',
|
||||||
|
options: {
|
||||||
|
source: 'name',
|
||||||
|
maxLength: 96,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'image',
|
||||||
|
title: 'Image',
|
||||||
|
type: 'image',
|
||||||
|
options: {
|
||||||
|
hotspot: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'bio',
|
||||||
|
title: 'Bio',
|
||||||
|
type: 'array',
|
||||||
|
of: [
|
||||||
|
{
|
||||||
|
title: 'Block',
|
||||||
|
type: 'block',
|
||||||
|
styles: [{title: 'Normal', value: 'normal'}],
|
||||||
|
lists: [],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
preview: {
|
||||||
|
select: {
|
||||||
|
title: 'name',
|
||||||
|
media: 'image',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
65
examples/sanity/schemas/blockContent.js
Normal file
65
examples/sanity/schemas/blockContent.js
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
/**
|
||||||
|
* This is the schema definition for the rich text fields used for
|
||||||
|
* for this blog studio. When you import it in schemas.js it can be
|
||||||
|
* reused in other parts of the studio with:
|
||||||
|
* {
|
||||||
|
* name: 'someName',
|
||||||
|
* title: 'Some title',
|
||||||
|
* type: 'blockContent'
|
||||||
|
* }
|
||||||
|
*/
|
||||||
|
export default {
|
||||||
|
title: 'Block Content',
|
||||||
|
name: 'blockContent',
|
||||||
|
type: 'array',
|
||||||
|
of: [
|
||||||
|
{
|
||||||
|
title: 'Block',
|
||||||
|
type: 'block',
|
||||||
|
// Styles let you set what your user can mark up blocks with. These
|
||||||
|
// correspond with HTML tags, but you can set any title or value
|
||||||
|
// you want and decide how you want to deal with it where you want to
|
||||||
|
// use your content.
|
||||||
|
styles: [
|
||||||
|
{title: 'Normal', value: 'normal'},
|
||||||
|
{title: 'H1', value: 'h1'},
|
||||||
|
{title: 'H2', value: 'h2'},
|
||||||
|
{title: 'H3', value: 'h3'},
|
||||||
|
{title: 'H4', value: 'h4'},
|
||||||
|
{title: 'Quote', value: 'blockquote'},
|
||||||
|
],
|
||||||
|
lists: [{title: 'Bullet', value: 'bullet'}],
|
||||||
|
// Marks let you mark up inline text in the block editor.
|
||||||
|
marks: {
|
||||||
|
// Decorators usually describe a single property – e.g. a typographic
|
||||||
|
// preference or highlighting by editors.
|
||||||
|
decorators: [
|
||||||
|
{title: 'Strong', value: 'strong'},
|
||||||
|
{title: 'Emphasis', value: 'em'},
|
||||||
|
],
|
||||||
|
// Annotations can be any object structure – e.g. a link or a footnote.
|
||||||
|
annotations: [
|
||||||
|
{
|
||||||
|
title: 'URL',
|
||||||
|
name: 'link',
|
||||||
|
type: 'object',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
title: 'URL',
|
||||||
|
name: 'href',
|
||||||
|
type: 'url',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
// You can add additional types here. Note that you can't use
|
||||||
|
// primitive types such as 'string' and 'number' in the same array
|
||||||
|
// as a block type.
|
||||||
|
{
|
||||||
|
type: 'image',
|
||||||
|
options: {hotspot: true},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
17
examples/sanity/schemas/category.js
Normal file
17
examples/sanity/schemas/category.js
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
export default {
|
||||||
|
name: 'category',
|
||||||
|
title: 'Category',
|
||||||
|
type: 'document',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'title',
|
||||||
|
title: 'Title',
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'description',
|
||||||
|
title: 'Description',
|
||||||
|
type: 'text',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}
|
||||||
65
examples/sanity/schemas/post.js
Normal file
65
examples/sanity/schemas/post.js
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
export default {
|
||||||
|
name: 'post',
|
||||||
|
title: 'Post',
|
||||||
|
type: 'document',
|
||||||
|
fields: [
|
||||||
|
{
|
||||||
|
name: 'title',
|
||||||
|
title: 'Title',
|
||||||
|
type: 'string',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'slug',
|
||||||
|
title: 'Slug',
|
||||||
|
type: 'slug',
|
||||||
|
options: {
|
||||||
|
source: 'title',
|
||||||
|
maxLength: 96,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'author',
|
||||||
|
title: 'Author',
|
||||||
|
type: 'reference',
|
||||||
|
to: {type: 'author'},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'mainImage',
|
||||||
|
title: 'Main image',
|
||||||
|
type: 'image',
|
||||||
|
options: {
|
||||||
|
hotspot: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'categories',
|
||||||
|
title: 'Categories',
|
||||||
|
type: 'array',
|
||||||
|
of: [{type: 'reference', to: {type: 'category'}}],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'publishedAt',
|
||||||
|
title: 'Published at',
|
||||||
|
type: 'datetime',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'body',
|
||||||
|
title: 'Body',
|
||||||
|
type: 'blockContent',
|
||||||
|
},
|
||||||
|
],
|
||||||
|
|
||||||
|
preview: {
|
||||||
|
select: {
|
||||||
|
title: 'title',
|
||||||
|
author: 'author.name',
|
||||||
|
media: 'mainImage',
|
||||||
|
},
|
||||||
|
prepare(selection) {
|
||||||
|
const {author} = selection
|
||||||
|
return Object.assign({}, selection, {
|
||||||
|
subtitle: author && `by ${author}`,
|
||||||
|
})
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
29
examples/sanity/schemas/schema.js
Normal file
29
examples/sanity/schemas/schema.js
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
// First, we must import the schema creator
|
||||||
|
import createSchema from 'part:@sanity/base/schema-creator'
|
||||||
|
|
||||||
|
// Then import schema types from any plugins that might expose them
|
||||||
|
import schemaTypes from 'all:part:@sanity/base/schema-type'
|
||||||
|
|
||||||
|
// We import object and document schemas
|
||||||
|
import blockContent from './blockContent'
|
||||||
|
import category from './category'
|
||||||
|
import post from './post'
|
||||||
|
import author from './author'
|
||||||
|
|
||||||
|
// Then we give our schema to the builder and provide the result to Sanity
|
||||||
|
export default createSchema({
|
||||||
|
// We name our schema
|
||||||
|
name: 'default',
|
||||||
|
// Then proceed to concatenate our document type
|
||||||
|
// to the ones provided by any plugins that are installed
|
||||||
|
types: schemaTypes.concat([
|
||||||
|
// The following are document types which will appear
|
||||||
|
// in the studio.
|
||||||
|
post,
|
||||||
|
author,
|
||||||
|
category,
|
||||||
|
// When added to this list, object types can be used as
|
||||||
|
// { type: 'typename' } in other document schemas
|
||||||
|
blockContent,
|
||||||
|
]),
|
||||||
|
})
|
||||||
1
examples/sanity/static/.gitkeep
Normal file
1
examples/sanity/static/.gitkeep
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Files placed here will be served by the Sanity server under the `/static`-prefix
|
||||||
BIN
examples/sanity/static/favicon.ico
Normal file
BIN
examples/sanity/static/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.1 KiB |
6
examples/sanity/tsconfig.json
Normal file
6
examples/sanity/tsconfig.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
// Note: This config is only used to help editors like VS Code understand/resolve
|
||||||
|
// parts, the actual transpilation is done by babel. Any compiler configuration in
|
||||||
|
// here will be ignored.
|
||||||
|
"include": ["./node_modules/@sanity/base/types/**/*.ts", "./**/*.ts", "./**/*.tsx"]
|
||||||
|
}
|
||||||
9911
examples/sanity/yarn.lock
Normal file
9911
examples/sanity/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
1
examples/solidstart/.gitignore
vendored
1
examples/solidstart/.gitignore
vendored
@@ -2,7 +2,6 @@ dist
|
|||||||
worker
|
worker
|
||||||
.solid
|
.solid
|
||||||
.vercel
|
.vercel
|
||||||
.output
|
|
||||||
|
|
||||||
# dependencies
|
# dependencies
|
||||||
/node_modules
|
/node_modules
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ export class AppProfile {
|
|||||||
|
|
||||||
normalize(name: string): string {
|
normalize(name: string): string {
|
||||||
if (name) {
|
if (name) {
|
||||||
return name.substr(0, 1).toUpperCase() + name.substr(1).toLowerCase();
|
return name.slice(0, 1).toUpperCase() + name.slice(1).toLowerCase();
|
||||||
}
|
}
|
||||||
return '';
|
return '';
|
||||||
}
|
}
|
||||||
|
|||||||
1
examples/sveltekit/.gitignore
vendored
1
examples/sveltekit/.gitignore
vendored
@@ -7,4 +7,3 @@ node_modules
|
|||||||
.env.*
|
.env.*
|
||||||
!.env.example
|
!.env.example
|
||||||
.vercel
|
.vercel
|
||||||
.output
|
|
||||||
|
|||||||
670
examples/sveltekit/package-lock.json
generated
670
examples/sveltekit/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +1,7 @@
|
|||||||
{
|
{
|
||||||
"private": true,
|
"private": true,
|
||||||
|
"name": "sveltekit",
|
||||||
|
"version": "0.0.1",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"dev": "svelte-kit dev",
|
"dev": "svelte-kit dev",
|
||||||
"build": "svelte-kit build",
|
"build": "svelte-kit build",
|
||||||
@@ -9,7 +11,7 @@
|
|||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@sveltejs/adapter-auto": "next",
|
"@sveltejs/adapter-auto": "next",
|
||||||
"@sveltejs/kit": "next",
|
"@sveltejs/kit": "next",
|
||||||
"svelte": "^3.44.0"
|
"svelte": "^3.46.0"
|
||||||
},
|
},
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
|||||||
@@ -8,6 +8,6 @@
|
|||||||
%svelte.head%
|
%svelte.head%
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<div id="svelte">%svelte.body%</div>
|
<div>%svelte.body%</div>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|||||||
1
examples/sveltekit/src/global.d.ts
vendored
1
examples/sveltekit/src/global.d.ts
vendored
@@ -1 +0,0 @@
|
|||||||
/// <reference types="@sveltejs/kit" />
|
|
||||||
@@ -1,19 +1,22 @@
|
|||||||
import cookie from 'cookie';
|
import cookie from 'cookie';
|
||||||
import { v4 as uuid } from '@lukeed/uuid';
|
import { v4 as uuid } from '@lukeed/uuid';
|
||||||
|
|
||||||
export const handle = async ({ request, resolve }) => {
|
export const handle = async ({ event, resolve }) => {
|
||||||
const cookies = cookie.parse(request.headers.cookie || '');
|
const cookies = cookie.parse(event.request.headers.get('cookie') || '');
|
||||||
request.locals.userid = cookies.userid || uuid();
|
event.locals.userid = cookies.userid || uuid();
|
||||||
|
|
||||||
const response = await resolve(request);
|
const response = await resolve(event);
|
||||||
|
|
||||||
if (!cookies.userid) {
|
if (!cookies.userid) {
|
||||||
// if this is the first time the user has visited this app,
|
// if this is the first time the user has visited this app,
|
||||||
// set a cookie so that we recognise them when they return
|
// set a cookie so that we recognise them when they return
|
||||||
response.headers['set-cookie'] = cookie.serialize('userid', request.locals.userid, {
|
response.headers.set(
|
||||||
|
'set-cookie',
|
||||||
|
cookie.serialize('userid', event.locals.userid, {
|
||||||
path: '/',
|
path: '/',
|
||||||
httpOnly: true
|
httpOnly: true
|
||||||
});
|
})
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
|
|||||||
@@ -22,7 +22,7 @@
|
|||||||
|
|
||||||
<div class="counter-viewport">
|
<div class="counter-viewport">
|
||||||
<div class="counter-digits" style="transform: translate(0, {100 * offset}%)">
|
<div class="counter-digits" style="transform: translate(0, {100 * offset}%)">
|
||||||
<strong style="top: -100%" aria-hidden="true">{Math.floor($displayed_count + 1)}</strong>
|
<strong class="hidden" aria-hidden="true">{Math.floor($displayed_count + 1)}</strong>
|
||||||
<strong>{Math.floor($displayed_count)}</strong>
|
<strong>{Math.floor($displayed_count)}</strong>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@@ -94,4 +94,9 @@
|
|||||||
width: 100%;
|
width: 100%;
|
||||||
height: 100%;
|
height: 100%;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.hidden {
|
||||||
|
top: -100%;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
|
import { invalidate } from '$app/navigation';
|
||||||
|
|
||||||
// this action (https://svelte.dev/tutorial/actions) allows us to
|
// this action (https://svelte.dev/tutorial/actions) allows us to
|
||||||
// progressively enhance a <form> that already works without JS
|
// progressively enhance a <form> that already works without JS
|
||||||
export function enhance(form, { pending, error, result }) {
|
export function enhance(form, { pending, error, result } = {}) {
|
||||||
let current_token;
|
let current_token;
|
||||||
|
|
||||||
async function handle_submit(e) {
|
async function handle_submit(e) {
|
||||||
@@ -8,31 +10,35 @@ export function enhance(form, { pending, error, result }) {
|
|||||||
|
|
||||||
e.preventDefault();
|
e.preventDefault();
|
||||||
|
|
||||||
const body = new FormData(form);
|
const data = new FormData(form);
|
||||||
|
|
||||||
if (pending) pending(body, form);
|
if (pending) pending({ data, form });
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const res = await fetch(form.action, {
|
const response = await fetch(form.action, {
|
||||||
method: form.method,
|
method: form.method,
|
||||||
headers: {
|
headers: {
|
||||||
accept: 'application/json'
|
accept: 'application/json'
|
||||||
},
|
},
|
||||||
body
|
body: data
|
||||||
});
|
});
|
||||||
|
|
||||||
if (token !== current_token) return;
|
if (token !== current_token) return;
|
||||||
|
|
||||||
if (res.ok) {
|
if (response.ok) {
|
||||||
result(res, form);
|
if (result) result({ data, form, response });
|
||||||
|
|
||||||
|
const url = new URL(form.action);
|
||||||
|
url.search = url.hash = '';
|
||||||
|
invalidate(url.href);
|
||||||
} else if (error) {
|
} else if (error) {
|
||||||
error(res, null, form);
|
error({ data, form, error: null, response });
|
||||||
} else {
|
} else {
|
||||||
console.error(await res.text());
|
console.error(await response.text());
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
if (error) {
|
if (error) {
|
||||||
error(null, e, form);
|
error({ data, form, error: e, response: null });
|
||||||
} else {
|
} else {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,14 +0,0 @@
|
|||||||
import { api } from './_api';
|
|
||||||
|
|
||||||
// PATCH /todos/:uid.json
|
|
||||||
export const patch = async (request) => {
|
|
||||||
return api(request, `todos/${request.locals.userid}/${request.params.uid}`, {
|
|
||||||
text: request.body.get('text'),
|
|
||||||
done: request.body.has('done') ? !!request.body.get('done') : undefined
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
// DELETE /todos/:uid.json
|
|
||||||
export const del = async (request) => {
|
|
||||||
return api(request, `todos/${request.locals.userid}/${request.params.uid}`);
|
|
||||||
};
|
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
/*
|
/*
|
||||||
This module is used by the /todos.json and /todos/[uid].json
|
This module is used by the /todos endpoint to
|
||||||
endpoints to make calls to api.svelte.dev, which stores todos
|
make calls to api.svelte.dev, which stores todos
|
||||||
for each user. The leading underscore indicates that this is
|
for each user. The leading underscore indicates that this is
|
||||||
a private module, _not_ an endpoint — visiting /todos/_api
|
a private module, _not_ an endpoint — visiting /todos/_api
|
||||||
will net you a 404 response.
|
will net you a 404 response.
|
||||||
@@ -11,35 +11,12 @@
|
|||||||
|
|
||||||
const base = 'https://api.svelte.dev';
|
const base = 'https://api.svelte.dev';
|
||||||
|
|
||||||
export async function api(request, resource, data) {
|
export function api(method, resource, data) {
|
||||||
// user must have a cookie set
|
return fetch(`${base}/${resource}`, {
|
||||||
if (!request.locals.userid) {
|
method,
|
||||||
return { status: 401 };
|
|
||||||
}
|
|
||||||
|
|
||||||
const res = await fetch(`${base}/${resource}`, {
|
|
||||||
method: request.method,
|
|
||||||
headers: {
|
headers: {
|
||||||
'content-type': 'application/json'
|
'content-type': 'application/json'
|
||||||
},
|
},
|
||||||
body: data && JSON.stringify(data)
|
body: data && JSON.stringify(data)
|
||||||
});
|
});
|
||||||
|
|
||||||
// if the request came from a <form> submission, the browser's default
|
|
||||||
// behaviour is to show the URL corresponding to the form's "action"
|
|
||||||
// attribute. in those cases, we want to redirect them back to the
|
|
||||||
// /todos page, rather than showing the response
|
|
||||||
if (res.ok && request.method !== 'GET' && request.headers.accept !== 'application/json') {
|
|
||||||
return {
|
|
||||||
status: 303,
|
|
||||||
headers: {
|
|
||||||
location: '/todos'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
status: res.status,
|
|
||||||
body: await res.json()
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|||||||
66
examples/sveltekit/src/routes/todos/index.js
Normal file
66
examples/sveltekit/src/routes/todos/index.js
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import { api } from './_api';
|
||||||
|
|
||||||
|
export const get = async ({ locals }) => {
|
||||||
|
// locals.userid comes from src/hooks.js
|
||||||
|
const response = await api('get', `todos/${locals.userid}`);
|
||||||
|
|
||||||
|
if (response.status === 404) {
|
||||||
|
// user hasn't created a todo list.
|
||||||
|
// start with an empty array
|
||||||
|
return {
|
||||||
|
body: {
|
||||||
|
todos: []
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (response.status === 200) {
|
||||||
|
return {
|
||||||
|
body: {
|
||||||
|
todos: await response.json()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: response.status
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
export const post = async ({ request, locals }) => {
|
||||||
|
const form = await request.formData();
|
||||||
|
|
||||||
|
await api('post', `todos/${locals.userid}`, {
|
||||||
|
text: form.get('text')
|
||||||
|
});
|
||||||
|
|
||||||
|
return {};
|
||||||
|
};
|
||||||
|
|
||||||
|
// If the user has JavaScript disabled, the URL will change to
|
||||||
|
// include the method override unless we redirect back to /todos
|
||||||
|
const redirect = {
|
||||||
|
status: 303,
|
||||||
|
headers: {
|
||||||
|
location: '/todos'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export const patch = async ({ request, locals }) => {
|
||||||
|
const form = await request.formData();
|
||||||
|
|
||||||
|
await api('patch', `todos/${locals.userid}/${form.get('uid')}`, {
|
||||||
|
text: form.has('text') ? form.get('text') : undefined,
|
||||||
|
done: form.has('done') ? !!form.get('done') : undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
return redirect;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const del = async ({ request, locals }) => {
|
||||||
|
const form = await request.formData();
|
||||||
|
|
||||||
|
await api('delete', `todos/${locals.userid}/${form.get('uid')}`);
|
||||||
|
|
||||||
|
return redirect;
|
||||||
|
};
|
||||||
@@ -1,28 +0,0 @@
|
|||||||
import { api } from './_api';
|
|
||||||
|
|
||||||
// GET /todos.json
|
|
||||||
export const get = async (request) => {
|
|
||||||
// request.locals.userid comes from src/hooks.js
|
|
||||||
const response = await api(request, `todos/${request.locals.userid}`);
|
|
||||||
|
|
||||||
if (response.status === 404) {
|
|
||||||
// user hasn't created a todo list.
|
|
||||||
// start with an empty array
|
|
||||||
return { body: [] };
|
|
||||||
}
|
|
||||||
|
|
||||||
return response;
|
|
||||||
};
|
|
||||||
|
|
||||||
// POST /todos.json
|
|
||||||
export const post = async (request) => {
|
|
||||||
const response = await api(request, `todos/${request.locals.userid}`, {
|
|
||||||
// because index.svelte posts a FormData object,
|
|
||||||
// request.body is _also_ a (readonly) FormData
|
|
||||||
// object, which allows us to get form data
|
|
||||||
// with the `body.get(key)` method
|
|
||||||
text: request.body.get('text')
|
|
||||||
});
|
|
||||||
|
|
||||||
return response;
|
|
||||||
};
|
|
||||||
@@ -1,40 +1,9 @@
|
|||||||
<script context="module">
|
|
||||||
import { enhance } from '$lib/form';
|
|
||||||
|
|
||||||
// see https://kit.svelte.dev/docs#loading
|
|
||||||
export const load = async ({ fetch }) => {
|
|
||||||
const res = await fetch('/todos.json');
|
|
||||||
|
|
||||||
if (res.ok) {
|
|
||||||
const todos = await res.json();
|
|
||||||
|
|
||||||
return {
|
|
||||||
props: { todos }
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const { message } = await res.json();
|
|
||||||
|
|
||||||
return {
|
|
||||||
error: new Error(message)
|
|
||||||
};
|
|
||||||
};
|
|
||||||
</script>
|
|
||||||
|
|
||||||
<script>
|
<script>
|
||||||
|
import { enhance } from '$lib/form';
|
||||||
import { scale } from 'svelte/transition';
|
import { scale } from 'svelte/transition';
|
||||||
import { flip } from 'svelte/animate';
|
import { flip } from 'svelte/animate';
|
||||||
|
|
||||||
export let todos;
|
export let todos;
|
||||||
|
|
||||||
async function patch(res) {
|
|
||||||
const todo = await res.json();
|
|
||||||
|
|
||||||
todos = todos.map((t) => {
|
|
||||||
if (t.uid === todo.uid) return todo;
|
|
||||||
return t;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<svelte:head>
|
<svelte:head>
|
||||||
@@ -46,13 +15,10 @@
|
|||||||
|
|
||||||
<form
|
<form
|
||||||
class="new"
|
class="new"
|
||||||
action="/todos.json"
|
action="/todos"
|
||||||
method="post"
|
method="post"
|
||||||
use:enhance={{
|
use:enhance={{
|
||||||
result: async (res, form) => {
|
result: async ({ form }) => {
|
||||||
const created = await res.json();
|
|
||||||
todos = [...todos, created];
|
|
||||||
|
|
||||||
form.reset();
|
form.reset();
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
@@ -68,41 +34,33 @@
|
|||||||
animate:flip={{ duration: 200 }}
|
animate:flip={{ duration: 200 }}
|
||||||
>
|
>
|
||||||
<form
|
<form
|
||||||
action="/todos/{todo.uid}.json?_method=PATCH"
|
action="/todos?_method=PATCH"
|
||||||
method="post"
|
method="post"
|
||||||
use:enhance={{
|
use:enhance={{
|
||||||
pending: (data) => {
|
pending: ({ data }) => {
|
||||||
todo.done = !!data.get('done');
|
todo.done = !!data.get('done');
|
||||||
},
|
}
|
||||||
result: patch
|
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
|
<input type="hidden" name="uid" value={todo.uid} />
|
||||||
<input type="hidden" name="done" value={todo.done ? '' : 'true'} />
|
<input type="hidden" name="done" value={todo.done ? '' : 'true'} />
|
||||||
<button class="toggle" aria-label="Mark todo as {todo.done ? 'not done' : 'done'}" />
|
<button class="toggle" aria-label="Mark todo as {todo.done ? 'not done' : 'done'}" />
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
<form
|
<form class="text" action="/todos?_method=PATCH" method="post" use:enhance>
|
||||||
class="text"
|
<input type="hidden" name="uid" value={todo.uid} />
|
||||||
action="/todos/{todo.uid}.json?_method=PATCH"
|
|
||||||
method="post"
|
|
||||||
use:enhance={{
|
|
||||||
result: patch
|
|
||||||
}}
|
|
||||||
>
|
|
||||||
<input aria-label="Edit todo" type="text" name="text" value={todo.text} />
|
<input aria-label="Edit todo" type="text" name="text" value={todo.text} />
|
||||||
<button class="save" aria-label="Save todo" />
|
<button class="save" aria-label="Save todo" />
|
||||||
</form>
|
</form>
|
||||||
|
|
||||||
<form
|
<form
|
||||||
action="/todos/{todo.uid}.json?_method=DELETE"
|
action="/todos?_method=DELETE"
|
||||||
method="post"
|
method="post"
|
||||||
use:enhance={{
|
use:enhance={{
|
||||||
pending: () => (todo.pending_delete = true),
|
pending: () => (todo.pending_delete = true)
|
||||||
result: () => {
|
|
||||||
todos = todos.filter((t) => t.uid !== todo.uid);
|
|
||||||
}
|
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
|
<input type="hidden" name="uid" value={todo.uid} />
|
||||||
<button class="delete" aria-label="Delete todo" disabled={todo.pending_delete} />
|
<button class="delete" aria-label="Delete todo" disabled={todo.pending_delete} />
|
||||||
</form>
|
</form>
|
||||||
</div>
|
</div>
|
||||||
@@ -158,7 +116,7 @@
|
|||||||
.done {
|
.done {
|
||||||
transform: none;
|
transform: none;
|
||||||
opacity: 0.4;
|
opacity: 0.4;
|
||||||
filter: drop-shadow(0px 0px 1px rgba(0, 0, 0, 0.1));
|
filter: drop-shadow(0 0 1px rgba(0, 0, 0, 0.1));
|
||||||
}
|
}
|
||||||
|
|
||||||
form.text {
|
form.text {
|
||||||
|
|||||||
@@ -5,8 +5,10 @@ const config = {
|
|||||||
kit: {
|
kit: {
|
||||||
adapter: adapter(),
|
adapter: adapter(),
|
||||||
|
|
||||||
// hydrate the <div id="svelte"> element in src/app.html
|
// Override http methods in the Todo forms
|
||||||
target: '#svelte'
|
methodOverride: {
|
||||||
|
allowed: ['PATCH', 'DELETE']
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
48
package.json
48
package.json
@@ -14,37 +14,24 @@
|
|||||||
"dependencies": {
|
"dependencies": {
|
||||||
"lerna": "3.16.4"
|
"lerna": "3.16.4"
|
||||||
},
|
},
|
||||||
"turbo": {
|
|
||||||
"baseBranch": "origin/main",
|
|
||||||
"pipeline": {
|
|
||||||
"build": {
|
|
||||||
"dependsOn": [
|
|
||||||
"^build"
|
|
||||||
],
|
|
||||||
"outputs": [
|
|
||||||
"dist/**"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@typescript-eslint/eslint-plugin": "4.28.0",
|
"@typescript-eslint/eslint-plugin": "5.21.0",
|
||||||
"@typescript-eslint/parser": "4.28.0",
|
"@typescript-eslint/parser": "5.21.0",
|
||||||
"async-retry": "1.2.3",
|
"async-retry": "1.2.3",
|
||||||
"buffer-replace": "1.0.0",
|
"buffer-replace": "1.0.0",
|
||||||
"cheerio": "1.0.0-rc.3",
|
"eslint": "8.14.0",
|
||||||
"eslint": "7.29.0",
|
"eslint-config-prettier": "8.5.0",
|
||||||
"eslint-config-prettier": "8.3.0",
|
"eslint-plugin-jest": "26.1.5",
|
||||||
"eslint-plugin-jest": "24.3.6",
|
"husky": "7.0.4",
|
||||||
"husky": "6.0.0",
|
"jest": "28.0.2",
|
||||||
"jest": "27.3.1",
|
|
||||||
"json5": "2.1.1",
|
"json5": "2.1.1",
|
||||||
"lint-staged": "9.2.5",
|
"lint-staged": "9.2.5",
|
||||||
"node-fetch": "2.6.1",
|
"node-fetch": "2.6.1",
|
||||||
"npm-package-arg": "6.1.0",
|
"npm-package-arg": "6.1.0",
|
||||||
"prettier": "2.3.1",
|
"prettier": "2.6.2",
|
||||||
"ts-jest": "27.0.4",
|
"ts-eager": "2.0.2",
|
||||||
"turbo": "1.0.18"
|
"ts-jest": "28.0.0-next.1",
|
||||||
|
"turbo": "1.2.5"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lerna": "lerna",
|
"lerna": "lerna",
|
||||||
@@ -54,9 +41,10 @@
|
|||||||
"publish-from-github": "./utils/publish.sh",
|
"publish-from-github": "./utils/publish.sh",
|
||||||
"changelog": "node utils/changelog.js",
|
"changelog": "node utils/changelog.js",
|
||||||
"build": "turbo run build",
|
"build": "turbo run build",
|
||||||
"vercel-build": "mkdir -p public && echo '<a href=\"https://vercel.com/import\">Import</a>' > public/output.html",
|
"vercel-build": "yarn build && cd api && node -r ts-eager/register ./_lib/script/build.ts",
|
||||||
"pre-commit": "lint-staged",
|
"pre-commit": "lint-staged",
|
||||||
"test-unit": "node utils/run.js test-unit",
|
"test": "jest --rootDir=\"test\" --testPathPattern=\"\\.test.js\"",
|
||||||
|
"test-unit": "yarn test && node utils/run.js test-unit",
|
||||||
"test-integration-cli": "node utils/run.js test-integration-cli",
|
"test-integration-cli": "node utils/run.js test-integration-cli",
|
||||||
"test-integration-once": "node utils/run.js test-integration-once",
|
"test-integration-once": "node utils/run.js test-integration-once",
|
||||||
"test-integration-dev": "node utils/run.js test-integration-dev",
|
"test-integration-dev": "node utils/run.js test-integration-dev",
|
||||||
@@ -102,6 +90,14 @@
|
|||||||
"es6": true
|
"es6": true
|
||||||
},
|
},
|
||||||
"rules": {
|
"rules": {
|
||||||
|
"no-restricted-syntax": [
|
||||||
|
"warn",
|
||||||
|
"WithStatement",
|
||||||
|
{
|
||||||
|
"message": "substr() is deprecated, use slice() or substring() instead",
|
||||||
|
"selector": "MemberExpression > Identifier[name='substr']"
|
||||||
|
}
|
||||||
|
],
|
||||||
"require-atomic-updates": 0,
|
"require-atomic-updates": 0,
|
||||||
"@typescript-eslint/ban-ts-comment": 0,
|
"@typescript-eslint/ban-ts-comment": 0,
|
||||||
"@typescript-eslint/camelcase": 0,
|
"@typescript-eslint/camelcase": 0,
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
/src
|
/src
|
||||||
/test
|
/test
|
||||||
|
/tsconfig.json
|
||||||
|
/.turbo
|
||||||
|
/jest.config.js
|
||||||
tmp
|
tmp
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@vercel/build-utils",
|
"name": "@vercel/build-utils",
|
||||||
"version": "2.13.1-canary.1",
|
"version": "2.16.0",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"types": "./dist/index.d.js",
|
"types": "./dist/index.d.js",
|
||||||
@@ -23,14 +23,14 @@
|
|||||||
"@types/end-of-stream": "^1.4.0",
|
"@types/end-of-stream": "^1.4.0",
|
||||||
"@types/fs-extra": "9.0.13",
|
"@types/fs-extra": "9.0.13",
|
||||||
"@types/glob": "^7.1.1",
|
"@types/glob": "^7.1.1",
|
||||||
"@types/jest": "27.0.1",
|
"@types/jest": "27.4.1",
|
||||||
"@types/js-yaml": "3.12.1",
|
"@types/js-yaml": "3.12.1",
|
||||||
"@types/ms": "0.7.31",
|
"@types/ms": "0.7.31",
|
||||||
"@types/multistream": "2.1.1",
|
"@types/multistream": "2.1.1",
|
||||||
"@types/node-fetch": "^2.1.6",
|
"@types/node-fetch": "^2.1.6",
|
||||||
"@types/semver": "6.0.0",
|
"@types/semver": "6.0.0",
|
||||||
"@types/yazl": "^2.4.1",
|
"@types/yazl": "2.4.2",
|
||||||
"@vercel/frameworks": "0.5.1-canary.21",
|
"@vercel/frameworks": "0.8.0",
|
||||||
"@vercel/ncc": "0.24.0",
|
"@vercel/ncc": "0.24.0",
|
||||||
"aggregate-error": "3.0.1",
|
"aggregate-error": "3.0.1",
|
||||||
"async-retry": "1.2.3",
|
"async-retry": "1.2.3",
|
||||||
@@ -47,6 +47,6 @@
|
|||||||
"node-fetch": "2.6.1",
|
"node-fetch": "2.6.1",
|
||||||
"semver": "6.1.1",
|
"semver": "6.1.1",
|
||||||
"typescript": "4.3.4",
|
"typescript": "4.3.4",
|
||||||
"yazl": "2.4.3"
|
"yazl": "2.5.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,411 +0,0 @@
|
|||||||
import fs from 'fs-extra';
|
|
||||||
import { join, parse, relative, dirname, basename, extname } from 'path';
|
|
||||||
import glob from './fs/glob';
|
|
||||||
import { normalizePath } from './fs/normalize-path';
|
|
||||||
import { Lambda } from './lambda';
|
|
||||||
import type { BuildOptions } from './types';
|
|
||||||
import { debug, getIgnoreFilter } from '.';
|
|
||||||
|
|
||||||
// `.output` was already created by the Build Command, so we have
|
|
||||||
// to ensure its contents don't get bundled into the Lambda. Similarily,
|
|
||||||
// we don't want to bundle anything from `.vercel` either. Lastly,
|
|
||||||
// Builders/Runtimes didn't have `vercel.json` or `now.json`.
|
|
||||||
const ignoredPaths = ['.output', '.vercel', 'vercel.json', 'now.json'];
|
|
||||||
|
|
||||||
const shouldIgnorePath = (
|
|
||||||
file: string,
|
|
||||||
ignoreFilter: any,
|
|
||||||
ignoreFile: boolean
|
|
||||||
) => {
|
|
||||||
const isNative = ignoredPaths.some(item => {
|
|
||||||
return file.startsWith(item);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!ignoreFile) {
|
|
||||||
return isNative;
|
|
||||||
}
|
|
||||||
|
|
||||||
return isNative || ignoreFilter(file);
|
|
||||||
};
|
|
||||||
|
|
||||||
const getSourceFiles = async (workPath: string, ignoreFilter: any) => {
|
|
||||||
const list = await glob('**', {
|
|
||||||
cwd: workPath,
|
|
||||||
});
|
|
||||||
|
|
||||||
// We're not passing this as an `ignore` filter to the `glob` function above,
|
|
||||||
// so that we can re-use exactly the same `getIgnoreFilter` method that the
|
|
||||||
// Build Step uses (literally the same code). Note that this exclusion only applies
|
|
||||||
// when deploying. Locally, another exclusion is needed, which is handled
|
|
||||||
// further below in the `convertRuntimeToPlugin` function.
|
|
||||||
for (const file in list) {
|
|
||||||
if (shouldIgnorePath(file, ignoreFilter, true)) {
|
|
||||||
delete list[file];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return list;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert legacy Runtime to a Plugin.
|
|
||||||
* @param buildRuntime - a legacy build() function from a Runtime
|
|
||||||
* @param packageName - the name of the package, for example `vercel-plugin-python`
|
|
||||||
* @param ext - the file extension, for example `.py`
|
|
||||||
*/
|
|
||||||
export function _experimental_convertRuntimeToPlugin(
|
|
||||||
buildRuntime: (options: BuildOptions) => Promise<{ output: Lambda }>,
|
|
||||||
packageName: string,
|
|
||||||
ext: string
|
|
||||||
) {
|
|
||||||
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
|
|
||||||
return async function build({ workPath }: { workPath: string }) {
|
|
||||||
// We also don't want to provide any files to Runtimes that were ignored
|
|
||||||
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
|
|
||||||
const ignoreFilter = await getIgnoreFilter(workPath);
|
|
||||||
|
|
||||||
// Retrieve the files that are currently available on the File System,
|
|
||||||
// before the Legacy Runtime has even started to build.
|
|
||||||
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
|
|
||||||
|
|
||||||
// Instead of doing another `glob` to get all the matching source files,
|
|
||||||
// we'll filter the list of existing files down to only the ones
|
|
||||||
// that are matching the entrypoint pattern, so we're first creating
|
|
||||||
// a clean new list to begin.
|
|
||||||
const entrypoints = Object.assign({}, sourceFilesPreBuild);
|
|
||||||
|
|
||||||
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
|
|
||||||
|
|
||||||
// Up next, we'll strip out the files from the list of entrypoints
|
|
||||||
// that aren't actually considered entrypoints.
|
|
||||||
for (const file in entrypoints) {
|
|
||||||
if (!entrypointMatch.test(file)) {
|
|
||||||
delete entrypoints[file];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const pages: { [key: string]: any } = {};
|
|
||||||
const pluginName = packageName.replace('vercel-plugin-', '');
|
|
||||||
const outputPath = join(workPath, '.output');
|
|
||||||
|
|
||||||
const traceDir = join(
|
|
||||||
outputPath,
|
|
||||||
`inputs`,
|
|
||||||
// Legacy Runtimes can only provide API Routes, so that's
|
|
||||||
// why we can use this prefix for all of them. Here, we have to
|
|
||||||
// make sure to not use a cryptic hash name, because people
|
|
||||||
// need to be able to easily inspect the output.
|
|
||||||
`api-routes-${pluginName}`
|
|
||||||
);
|
|
||||||
|
|
||||||
await fs.ensureDir(traceDir);
|
|
||||||
|
|
||||||
const entryRoot = join(outputPath, 'server', 'pages');
|
|
||||||
|
|
||||||
for (const entrypoint of Object.keys(entrypoints)) {
|
|
||||||
const { output } = await buildRuntime({
|
|
||||||
files: sourceFilesPreBuild,
|
|
||||||
entrypoint,
|
|
||||||
workPath,
|
|
||||||
config: {
|
|
||||||
zeroConfig: true,
|
|
||||||
},
|
|
||||||
meta: {
|
|
||||||
avoidTopLevelInstall: true,
|
|
||||||
skipDownload: true,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const lambdaFiles = output.files;
|
|
||||||
|
|
||||||
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
|
||||||
// have certain files that are ignored stripped, but locally, that list of
|
|
||||||
// files isn't used by the Legacy Runtimes, so we need to apply the filters
|
|
||||||
// to the outputs that they are returning instead.
|
|
||||||
for (const file in lambdaFiles) {
|
|
||||||
if (shouldIgnorePath(file, ignoreFilter, false)) {
|
|
||||||
delete lambdaFiles[file];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let handlerFileBase = output.handler;
|
|
||||||
let handlerFile = lambdaFiles[handlerFileBase];
|
|
||||||
let handlerHasImport = false;
|
|
||||||
|
|
||||||
const { handler } = output;
|
|
||||||
const handlerMethod = handler.split('.').pop();
|
|
||||||
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
|
||||||
|
|
||||||
// For compiled languages, the launcher file for the Lambda generated
|
|
||||||
// by the Legacy Runtime matches the `handler` defined for it, but for
|
|
||||||
// interpreted languages, the `handler` consists of the launcher file name
|
|
||||||
// without an extension, plus the name of the method inside of that file
|
|
||||||
// that should be invoked, so we have to construct the file path explicitly.
|
|
||||||
if (!handlerFile) {
|
|
||||||
handlerFileBase = handlerFileName + ext;
|
|
||||||
handlerFile = lambdaFiles[handlerFileBase];
|
|
||||||
handlerHasImport = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!handlerFile || !handlerFile.fsPath) {
|
|
||||||
throw new Error(
|
|
||||||
`Could not find a handler file. Please ensure that \`files\` for the returned \`Lambda\` contains an \`FileFsRef\` named "${handlerFileBase}" with a valid \`fsPath\`.`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const handlerExtName = extname(handlerFile.fsPath);
|
|
||||||
|
|
||||||
const entryBase = basename(entrypoint).replace(ext, handlerExtName);
|
|
||||||
const entryPath = join(dirname(entrypoint), entryBase);
|
|
||||||
const entry = join(entryRoot, entryPath);
|
|
||||||
|
|
||||||
// Create the parent directory of the API Route that will be created
|
|
||||||
// for the current entrypoint inside of `.output/server/pages/api`.
|
|
||||||
await fs.ensureDir(dirname(entry));
|
|
||||||
|
|
||||||
// For compiled languages, the launcher file will be binary and therefore
|
|
||||||
// won't try to import a user-provided request handler (instead, it will
|
|
||||||
// contain it). But for interpreted languages, the launcher might try to
|
|
||||||
// load a user-provided request handler from the source file instead of bundling
|
|
||||||
// it, so we have to adjust the import statement inside the launcher to point
|
|
||||||
// to the respective source file. Previously, Legacy Runtimes simply expected
|
|
||||||
// the user-provided request-handler to be copied right next to the launcher,
|
|
||||||
// but with the new File System API, files won't be moved around unnecessarily.
|
|
||||||
if (handlerHasImport) {
|
|
||||||
const { fsPath } = handlerFile;
|
|
||||||
const encoding = 'utf-8';
|
|
||||||
|
|
||||||
// This is the true directory of the user-provided request handler in the
|
|
||||||
// source files, so that's what we will use as an import path in the launcher.
|
|
||||||
const locationPrefix = relative(entry, outputPath);
|
|
||||||
|
|
||||||
let handlerContent = await fs.readFile(fsPath, encoding);
|
|
||||||
|
|
||||||
const importPaths = [
|
|
||||||
// This is the full entrypoint path, like `./api/test.py`. In our tests
|
|
||||||
// Python didn't support importing from a parent directory without using different
|
|
||||||
// code in the launcher that registers it as a location for modules and then changing
|
|
||||||
// the importing syntax, but continuing to import it like before seems to work. If
|
|
||||||
// other languages need this, we should consider excluding Python explicitly.
|
|
||||||
// `./${entrypoint}`,
|
|
||||||
|
|
||||||
// This is the entrypoint path without extension, like `api/test`
|
|
||||||
entrypoint.slice(0, -ext.length),
|
|
||||||
];
|
|
||||||
|
|
||||||
// Generate a list of regular expressions that we can use for
|
|
||||||
// finding matches, but only allow matches if the import path is
|
|
||||||
// wrapped inside single (') or double quotes (").
|
|
||||||
const patterns = importPaths.map(path => {
|
|
||||||
// eslint-disable-next-line no-useless-escape
|
|
||||||
return new RegExp(`('|")(${path.replace(/\./g, '\\.')})('|")`, 'g');
|
|
||||||
});
|
|
||||||
|
|
||||||
let replacedMatch = null;
|
|
||||||
|
|
||||||
for (const pattern of patterns) {
|
|
||||||
const newContent = handlerContent.replace(
|
|
||||||
pattern,
|
|
||||||
(_, p1, p2, p3) => {
|
|
||||||
return `${p1}${join(locationPrefix, p2)}${p3}`;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (newContent !== handlerContent) {
|
|
||||||
debug(
|
|
||||||
`Replaced "${pattern}" inside "${entry}" to ensure correct import of user-provided request handler`
|
|
||||||
);
|
|
||||||
|
|
||||||
handlerContent = newContent;
|
|
||||||
replacedMatch = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!replacedMatch) {
|
|
||||||
new Error(
|
|
||||||
`No replacable matches for "${importPaths[0]}" or "${importPaths[1]}" found in "${fsPath}"`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
await fs.writeFile(entry, handlerContent, encoding);
|
|
||||||
} else {
|
|
||||||
await fs.copy(handlerFile.fsPath, entry);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Legacy Runtimes based on interpreted languages will create a new launcher file
|
|
||||||
// for every entrypoint, but they will create each one inside `workPath`, which means that
|
|
||||||
// the launcher for one entrypoint will overwrite the launcher provided for the previous
|
|
||||||
// entrypoint. That's why, above, we copy the file contents into the new destination (and
|
|
||||||
// optionally transform them along the way), instead of linking. We then also want to remove
|
|
||||||
// the copy origin right here, so that the `workPath` doesn't contain a useless launcher file
|
|
||||||
// once the build has finished running.
|
|
||||||
await fs.remove(handlerFile.fsPath);
|
|
||||||
debug(`Removed temporary file "${handlerFile.fsPath}"`);
|
|
||||||
|
|
||||||
const nft = `${entry}.nft.json`;
|
|
||||||
|
|
||||||
const json = JSON.stringify({
|
|
||||||
version: 2,
|
|
||||||
files: Object.keys(lambdaFiles)
|
|
||||||
.map(file => {
|
|
||||||
const { fsPath } = lambdaFiles[file];
|
|
||||||
|
|
||||||
if (!fsPath) {
|
|
||||||
throw new Error(
|
|
||||||
`File "${file}" is missing valid \`fsPath\` property`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The handler was already moved into position above.
|
|
||||||
if (file === handlerFileBase) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
return normalizePath(relative(dirname(nft), fsPath));
|
|
||||||
})
|
|
||||||
.filter(Boolean),
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(nft, json);
|
|
||||||
|
|
||||||
// Add an entry that will later on be added to the `functions-manifest.json`
|
|
||||||
// file that is placed inside of the `.output` directory.
|
|
||||||
pages[normalizePath(entryPath)] = {
|
|
||||||
// Because the underlying file used as a handler was placed
|
|
||||||
// inside `.output/server/pages/api`, it no longer has the name it originally
|
|
||||||
// had and is now named after the API Route that it's responsible for,
|
|
||||||
// so we have to adjust the name of the Lambda handler accordingly.
|
|
||||||
handler: handler.replace(handlerFileName, parse(entry).name),
|
|
||||||
runtime: output.runtime,
|
|
||||||
memory: output.memory,
|
|
||||||
maxDuration: output.maxDuration,
|
|
||||||
environment: output.environment,
|
|
||||||
allowQuery: output.allowQuery,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add any Serverless Functions that were exposed by the Legacy Runtime
|
|
||||||
// to the `functions-manifest.json` file provided in `.output`.
|
|
||||||
await _experimental_updateFunctionsManifest({ workPath, pages });
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
|
|
||||||
try {
|
|
||||||
const str = await fs.readFile(filePath, 'utf8');
|
|
||||||
return JSON.parse(str);
|
|
||||||
} catch (err) {
|
|
||||||
if (err.code === 'ENOENT') {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
throw err;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* If `.output/functions-manifest.json` exists, append to the pages
|
|
||||||
* property. Otherwise write a new file.
|
|
||||||
*/
|
|
||||||
export async function _experimental_updateFunctionsManifest({
|
|
||||||
workPath,
|
|
||||||
pages,
|
|
||||||
}: {
|
|
||||||
workPath: string;
|
|
||||||
pages: { [key: string]: any };
|
|
||||||
}) {
|
|
||||||
const functionsManifestPath = join(
|
|
||||||
workPath,
|
|
||||||
'.output',
|
|
||||||
'functions-manifest.json'
|
|
||||||
);
|
|
||||||
const functionsManifest = await readJson(functionsManifestPath);
|
|
||||||
|
|
||||||
if (!functionsManifest.version) functionsManifest.version = 2;
|
|
||||||
if (!functionsManifest.pages) functionsManifest.pages = {};
|
|
||||||
|
|
||||||
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
|
||||||
functionsManifest.pages[pageKey] = { ...pageConfig };
|
|
||||||
}
|
|
||||||
|
|
||||||
await fs.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Append routes to the `routes-manifest.json` file.
|
|
||||||
* If the file does not exist, it will be created.
|
|
||||||
*/
|
|
||||||
export async function _experimental_updateRoutesManifest({
|
|
||||||
workPath,
|
|
||||||
redirects,
|
|
||||||
rewrites,
|
|
||||||
headers,
|
|
||||||
dynamicRoutes,
|
|
||||||
staticRoutes,
|
|
||||||
}: {
|
|
||||||
workPath: string;
|
|
||||||
redirects?: {
|
|
||||||
source: string;
|
|
||||||
destination: string;
|
|
||||||
statusCode: number;
|
|
||||||
regex: string;
|
|
||||||
}[];
|
|
||||||
rewrites?: {
|
|
||||||
source: string;
|
|
||||||
destination: string;
|
|
||||||
regex: string;
|
|
||||||
}[];
|
|
||||||
headers?: {
|
|
||||||
source: string;
|
|
||||||
headers: {
|
|
||||||
key: string;
|
|
||||||
value: string;
|
|
||||||
}[];
|
|
||||||
regex: string;
|
|
||||||
}[];
|
|
||||||
dynamicRoutes?: {
|
|
||||||
page: string;
|
|
||||||
regex: string;
|
|
||||||
namedRegex?: string;
|
|
||||||
routeKeys?: { [named: string]: string };
|
|
||||||
}[];
|
|
||||||
staticRoutes?: {
|
|
||||||
page: string;
|
|
||||||
regex: string;
|
|
||||||
namedRegex?: string;
|
|
||||||
routeKeys?: { [named: string]: string };
|
|
||||||
}[];
|
|
||||||
}) {
|
|
||||||
const routesManifestPath = join(workPath, '.output', 'routes-manifest.json');
|
|
||||||
|
|
||||||
const routesManifest = await readJson(routesManifestPath);
|
|
||||||
|
|
||||||
if (!routesManifest.version) routesManifest.version = 3;
|
|
||||||
if (routesManifest.pages404 === undefined) routesManifest.pages404 = true;
|
|
||||||
|
|
||||||
if (redirects) {
|
|
||||||
if (!routesManifest.redirects) routesManifest.redirects = [];
|
|
||||||
routesManifest.redirects.push(...redirects);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (rewrites) {
|
|
||||||
if (!routesManifest.rewrites) routesManifest.rewrites = [];
|
|
||||||
routesManifest.rewrites.push(...rewrites);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (headers) {
|
|
||||||
if (!routesManifest.headers) routesManifest.headers = [];
|
|
||||||
routesManifest.headers.push(...headers);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (dynamicRoutes) {
|
|
||||||
if (!routesManifest.dynamicRoutes) routesManifest.dynamicRoutes = [];
|
|
||||||
routesManifest.dynamicRoutes.push(...dynamicRoutes);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (staticRoutes) {
|
|
||||||
if (!routesManifest.staticRoutes) routesManifest.staticRoutes = [];
|
|
||||||
routesManifest.staticRoutes.push(...staticRoutes);
|
|
||||||
}
|
|
||||||
|
|
||||||
await fs.writeFile(routesManifestPath, JSON.stringify(routesManifest));
|
|
||||||
}
|
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { getPlatformEnv } from './';
|
import { getPlatformEnv } from './get-platform-env';
|
||||||
|
|
||||||
export default function debug(message: string, ...additional: any[]) {
|
export default function debug(message: string, ...additional: any[]) {
|
||||||
if (getPlatformEnv('BUILDER_DEBUG')) {
|
if (getPlatformEnv('BUILDER_DEBUG')) {
|
||||||
|
|||||||
@@ -67,8 +67,7 @@ function getPublicBuilder(
|
|||||||
typeof builder.src === 'string' &&
|
typeof builder.src === 'string' &&
|
||||||
isOfficialRuntime('static', builder.use) &&
|
isOfficialRuntime('static', builder.use) &&
|
||||||
/^.*\/\*\*\/\*$/.test(builder.src) &&
|
/^.*\/\*\*\/\*$/.test(builder.src) &&
|
||||||
builder.config &&
|
builder.config?.zeroConfig === true
|
||||||
builder.config.zeroConfig === true
|
|
||||||
) {
|
) {
|
||||||
return builder as Builder & { src: string };
|
return builder as Builder & { src: string };
|
||||||
}
|
}
|
||||||
@@ -539,7 +538,7 @@ function getMissingBuildScriptError() {
|
|||||||
code: 'missing_build_script',
|
code: 'missing_build_script',
|
||||||
message:
|
message:
|
||||||
'Your `package.json` file is missing a `build` property inside the `scripts` property.' +
|
'Your `package.json` file is missing a `build` property inside the `scripts` property.' +
|
||||||
'\nLearn More: https://vercel.com/docs/v2/platform/frequently-asked-questions#missing-build-script',
|
'\nLearn More: https://vercel.link/missing-build-script',
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
44
packages/build-utils/src/edge-function.ts
Normal file
44
packages/build-utils/src/edge-function.ts
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
import type { Files } from './types';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An Edge Functions output
|
||||||
|
*/
|
||||||
|
export class EdgeFunction {
|
||||||
|
type: 'EdgeFunction';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A display name for the edge function.
|
||||||
|
*/
|
||||||
|
name: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The deployment target.
|
||||||
|
* Only v8-worker is currently supported.
|
||||||
|
*/
|
||||||
|
deploymentTarget: 'v8-worker';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The entrypoint for the edge function.
|
||||||
|
*/
|
||||||
|
entrypoint: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The list of files to be included in the edge function bundle.
|
||||||
|
*/
|
||||||
|
files: Files;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extra environment variables in use for the user code, to be
|
||||||
|
* assigned to the edge function.
|
||||||
|
*/
|
||||||
|
envVarsInUse?: string[];
|
||||||
|
|
||||||
|
constructor(params: Omit<EdgeFunction, 'type'>) {
|
||||||
|
this.type = 'EdgeFunction';
|
||||||
|
this.name = params.name;
|
||||||
|
this.deploymentTarget = params.deploymentTarget;
|
||||||
|
this.entrypoint = params.entrypoint;
|
||||||
|
this.files = params.files;
|
||||||
|
this.envVarsInUse = params.envVarsInUse;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,6 +1,6 @@
|
|||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import intoStream from 'into-stream';
|
import intoStream from 'into-stream';
|
||||||
import { File } from './types';
|
import { FileBase } from './types';
|
||||||
|
|
||||||
interface FileBlobOptions {
|
interface FileBlobOptions {
|
||||||
mode?: number;
|
mode?: number;
|
||||||
@@ -14,7 +14,7 @@ interface FromStreamOptions {
|
|||||||
stream: NodeJS.ReadableStream;
|
stream: NodeJS.ReadableStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
export default class FileBlob implements File {
|
export default class FileBlob implements FileBase {
|
||||||
public type: 'FileBlob';
|
public type: 'FileBlob';
|
||||||
public mode: number;
|
public mode: number;
|
||||||
public data: string | Buffer;
|
public data: string | Buffer;
|
||||||
@@ -48,6 +48,10 @@ export default class FileBlob implements File {
|
|||||||
return new FileBlob({ mode, contentType, data });
|
return new FileBlob({ mode, contentType, data });
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||||
|
return this.toStream();
|
||||||
|
}
|
||||||
|
|
||||||
toStream(): NodeJS.ReadableStream {
|
toStream(): NodeJS.ReadableStream {
|
||||||
return intoStream(this.data);
|
return intoStream(this.data);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import fs from 'fs-extra';
|
|||||||
import multiStream from 'multistream';
|
import multiStream from 'multistream';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import Sema from 'async-sema';
|
import Sema from 'async-sema';
|
||||||
import { File } from './types';
|
import { FileBase } from './types';
|
||||||
|
|
||||||
const semaToPreventEMFILE = new Sema(20);
|
const semaToPreventEMFILE = new Sema(20);
|
||||||
|
|
||||||
@@ -20,7 +20,7 @@ interface FromStreamOptions {
|
|||||||
fsPath: string;
|
fsPath: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
class FileFsRef implements File {
|
class FileFsRef implements FileBase {
|
||||||
public type: 'FileFsRef';
|
public type: 'FileFsRef';
|
||||||
public mode: number;
|
public mode: number;
|
||||||
public fsPath: string;
|
public fsPath: string;
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import fetch from 'node-fetch';
|
|||||||
import multiStream from 'multistream';
|
import multiStream from 'multistream';
|
||||||
import retry from 'async-retry';
|
import retry from 'async-retry';
|
||||||
import Sema from 'async-sema';
|
import Sema from 'async-sema';
|
||||||
import { File } from './types';
|
import { FileBase } from './types';
|
||||||
|
|
||||||
interface FileRefOptions {
|
interface FileRefOptions {
|
||||||
mode?: number;
|
mode?: number;
|
||||||
@@ -23,7 +23,7 @@ class BailableError extends Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export default class FileRef implements File {
|
export default class FileRef implements FileBase {
|
||||||
public type: 'FileRef';
|
public type: 'FileRef';
|
||||||
public mode: number;
|
public mode: number;
|
||||||
public digest: string;
|
public digest: string;
|
||||||
|
|||||||
@@ -3,6 +3,7 @@ import debug from '../debug';
|
|||||||
import FileFsRef from '../file-fs-ref';
|
import FileFsRef from '../file-fs-ref';
|
||||||
import { File, Files, Meta } from '../types';
|
import { File, Files, Meta } from '../types';
|
||||||
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
import { remove, mkdirp, readlink, symlink } from 'fs-extra';
|
||||||
|
import streamToBuffer from './stream-to-buffer';
|
||||||
|
|
||||||
export interface DownloadedFiles {
|
export interface DownloadedFiles {
|
||||||
[filePath: string]: FileFsRef;
|
[filePath: string]: FileFsRef;
|
||||||
@@ -15,20 +16,45 @@ export function isSymbolicLink(mode: number): boolean {
|
|||||||
return (mode & S_IFMT) === S_IFLNK;
|
return (mode & S_IFMT) === S_IFLNK;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function prepareSymlinkTarget(
|
||||||
|
file: File,
|
||||||
|
fsPath: string
|
||||||
|
): Promise<string> {
|
||||||
|
const mkdirPromise = mkdirp(path.dirname(fsPath));
|
||||||
|
if (file.type === 'FileFsRef') {
|
||||||
|
const [target] = await Promise.all([readlink(file.fsPath), mkdirPromise]);
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (file.type === 'FileRef' || file.type === 'FileBlob') {
|
||||||
|
const targetPathBufferPromise = await streamToBuffer(
|
||||||
|
await file.toStreamAsync()
|
||||||
|
);
|
||||||
|
const [targetPathBuffer] = await Promise.all([
|
||||||
|
targetPathBufferPromise,
|
||||||
|
mkdirPromise,
|
||||||
|
]);
|
||||||
|
return targetPathBuffer.toString('utf8');
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error(
|
||||||
|
`file.type "${(file as any).type}" not supported for symlink`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||||
const { mode } = file;
|
const { mode } = file;
|
||||||
if (mode && isSymbolicLink(mode) && file.type === 'FileFsRef') {
|
|
||||||
const [target] = await Promise.all([
|
if (isSymbolicLink(mode)) {
|
||||||
readlink((file as FileFsRef).fsPath),
|
const target = await prepareSymlinkTarget(file, fsPath);
|
||||||
mkdirp(path.dirname(fsPath)),
|
|
||||||
]);
|
|
||||||
await symlink(target, fsPath);
|
await symlink(target, fsPath);
|
||||||
return FileFsRef.fromFsPath({ mode, fsPath });
|
return FileFsRef.fromFsPath({ mode, fsPath });
|
||||||
} else {
|
}
|
||||||
|
|
||||||
const stream = file.toStream();
|
const stream = file.toStream();
|
||||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
async function removeFile(basePath: string, fileMatched: string) {
|
async function removeFile(basePath: string, fileMatched: string) {
|
||||||
const file = path.join(basePath, fileMatched);
|
const file = path.join(basePath, fileMatched);
|
||||||
|
|||||||
@@ -46,8 +46,8 @@ export default async function glob(
|
|||||||
const files = await vanillaGlob(pattern, options);
|
const files = await vanillaGlob(pattern, options);
|
||||||
|
|
||||||
for (const relativePath of files) {
|
for (const relativePath of files) {
|
||||||
const fsPath = normalizePath(path.join(options.cwd!, relativePath));
|
const fsPath = normalizePath(path.join(options.cwd, relativePath));
|
||||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
let stat: Stats = options.statCache[fsPath] as Stats;
|
||||||
assert(
|
assert(
|
||||||
stat,
|
stat,
|
||||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
|
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`
|
||||||
|
|||||||
@@ -1,21 +1,31 @@
|
|||||||
import assert from 'assert';
|
import assert from 'assert';
|
||||||
import fs from 'fs-extra';
|
import fs from 'fs-extra';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import debug from '../debug';
|
import Sema from 'async-sema';
|
||||||
import spawn from 'cross-spawn';
|
import spawn from 'cross-spawn';
|
||||||
import { SpawnOptions } from 'child_process';
|
import { SpawnOptions } from 'child_process';
|
||||||
import { deprecate } from 'util';
|
import { deprecate } from 'util';
|
||||||
|
import debug from '../debug';
|
||||||
import { NowBuildError } from '../errors';
|
import { NowBuildError } from '../errors';
|
||||||
import { Meta, PackageJson, NodeVersion, Config } from '../types';
|
import { Meta, PackageJson, NodeVersion, Config } from '../types';
|
||||||
import { getSupportedNodeVersion, getLatestNodeVersion } from './node-version';
|
import { getSupportedNodeVersion, getLatestNodeVersion } from './node-version';
|
||||||
|
import { readConfigFile } from './read-config-file';
|
||||||
|
|
||||||
export type CliType = 'yarn' | 'npm';
|
// Only allow one `runNpmInstall()` invocation to run concurrently
|
||||||
|
const runNpmInstallSema = new Sema(1);
|
||||||
|
|
||||||
|
export type CliType = 'yarn' | 'npm' | 'pnpm';
|
||||||
|
|
||||||
export interface ScanParentDirsResult {
|
export interface ScanParentDirsResult {
|
||||||
/**
|
/**
|
||||||
* "yarn" or "npm", depending on the presence of lockfiles.
|
* "yarn", "npm", or "pnpm" depending on the presence of lockfiles.
|
||||||
*/
|
*/
|
||||||
cliType: CliType;
|
cliType: CliType;
|
||||||
|
/**
|
||||||
|
* The file path of found `package.json` file, or `undefined` if none was
|
||||||
|
* found.
|
||||||
|
*/
|
||||||
|
packageJsonPath?: string;
|
||||||
/**
|
/**
|
||||||
* The contents of found `package.json` file, when the `readPackageJson`
|
* The contents of found `package.json` file, when the `readPackageJson`
|
||||||
* option is enabled.
|
* option is enabled.
|
||||||
@@ -212,6 +222,12 @@ export async function getNodeVersion(
|
|||||||
const latest = getLatestNodeVersion();
|
const latest = getLatestNodeVersion();
|
||||||
return { ...latest, runtime: 'nodejs' };
|
return { ...latest, runtime: 'nodejs' };
|
||||||
}
|
}
|
||||||
|
if (process.env.ENABLE_EXPERIMENTAL_NODE16 === '1') {
|
||||||
|
console.warn(
|
||||||
|
'Warning: Using experimental Node.js 16.x due to ENABLE_EXPERIMENTAL_NODE16=1'
|
||||||
|
);
|
||||||
|
return { major: 16, range: '16.x', runtime: 'nodejs16.x' };
|
||||||
|
}
|
||||||
const { packageJson } = await scanParentDirs(destPath, true);
|
const { packageJson } = await scanParentDirs(destPath, true);
|
||||||
let { nodeVersion } = config;
|
let { nodeVersion } = config;
|
||||||
let isAuto = true;
|
let isAuto = true;
|
||||||
@@ -236,12 +252,13 @@ export async function scanParentDirs(
|
|||||||
|
|
||||||
let cliType: CliType = 'yarn';
|
let cliType: CliType = 'yarn';
|
||||||
let packageJson: PackageJson | undefined;
|
let packageJson: PackageJson | undefined;
|
||||||
|
let packageJsonPath: string | undefined;
|
||||||
let currentDestPath = destPath;
|
let currentDestPath = destPath;
|
||||||
let lockfileVersion: number | undefined;
|
let lockfileVersion: number | undefined;
|
||||||
|
|
||||||
// eslint-disable-next-line no-constant-condition
|
// eslint-disable-next-line no-constant-condition
|
||||||
while (true) {
|
while (true) {
|
||||||
const packageJsonPath = path.join(currentDestPath, 'package.json');
|
packageJsonPath = path.join(currentDestPath, 'package.json');
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
if (await fs.pathExists(packageJsonPath)) {
|
if (await fs.pathExists(packageJsonPath)) {
|
||||||
// Only read the contents of the *first* `package.json` file found,
|
// Only read the contents of the *first* `package.json` file found,
|
||||||
@@ -252,7 +269,7 @@ export async function scanParentDirs(
|
|||||||
}
|
}
|
||||||
|
|
||||||
// eslint-disable-next-line no-await-in-loop
|
// eslint-disable-next-line no-await-in-loop
|
||||||
const [packageLockJson, hasYarnLock] = await Promise.all([
|
const [packageLockJson, hasYarnLock, pnpmLockYaml] = await Promise.all([
|
||||||
fs
|
fs
|
||||||
.readJson(path.join(currentDestPath, 'package-lock.json'))
|
.readJson(path.join(currentDestPath, 'package-lock.json'))
|
||||||
.catch(error => {
|
.catch(error => {
|
||||||
@@ -263,17 +280,26 @@ export async function scanParentDirs(
|
|||||||
throw error;
|
throw error;
|
||||||
}),
|
}),
|
||||||
fs.pathExists(path.join(currentDestPath, 'yarn.lock')),
|
fs.pathExists(path.join(currentDestPath, 'yarn.lock')),
|
||||||
|
readConfigFile<{ lockfileVersion: number }>(
|
||||||
|
path.join(currentDestPath, 'pnpm-lock.yaml')
|
||||||
|
),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (packageLockJson && !hasYarnLock) {
|
if (packageLockJson && !hasYarnLock && !pnpmLockYaml) {
|
||||||
cliType = 'npm';
|
cliType = 'npm';
|
||||||
lockfileVersion = packageLockJson.lockfileVersion;
|
lockfileVersion = packageLockJson.lockfileVersion;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!packageLockJson && !hasYarnLock && pnpmLockYaml) {
|
||||||
|
cliType = 'pnpm';
|
||||||
|
// just ensure that it is read as a number and not a string
|
||||||
|
lockfileVersion = Number(pnpmLockYaml.lockfileVersion);
|
||||||
|
}
|
||||||
|
|
||||||
// Only stop iterating if a lockfile was found, because it's possible
|
// Only stop iterating if a lockfile was found, because it's possible
|
||||||
// that the lockfile is in a higher path than where the `package.json`
|
// that the lockfile is in a higher path than where the `package.json`
|
||||||
// file was found.
|
// file was found.
|
||||||
if (packageLockJson || hasYarnLock) {
|
if (packageLockJson || hasYarnLock || pnpmLockYaml) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -283,7 +309,7 @@ export async function scanParentDirs(
|
|||||||
currentDestPath = newDestPath;
|
currentDestPath = newDestPath;
|
||||||
}
|
}
|
||||||
|
|
||||||
return { cliType, packageJson, lockfileVersion };
|
return { cliType, packageJson, lockfileVersion, packageJsonPath };
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function walkParentDirs({
|
export async function walkParentDirs({
|
||||||
@@ -309,22 +335,49 @@ export async function walkParentDirs({
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isSet<T>(v: any): v is Set<T> {
|
||||||
|
return v?.constructor?.name === 'Set';
|
||||||
|
}
|
||||||
|
|
||||||
export async function runNpmInstall(
|
export async function runNpmInstall(
|
||||||
destPath: string,
|
destPath: string,
|
||||||
args: string[] = [],
|
args: string[] = [],
|
||||||
spawnOpts?: SpawnOptions,
|
spawnOpts?: SpawnOptions,
|
||||||
meta?: Meta,
|
meta?: Meta,
|
||||||
nodeVersion?: NodeVersion
|
nodeVersion?: NodeVersion
|
||||||
) {
|
): Promise<boolean> {
|
||||||
if (meta?.isDev) {
|
if (meta?.isDev) {
|
||||||
debug('Skipping dependency installation because dev mode is enabled');
|
debug('Skipping dependency installation because dev mode is enabled');
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
assert(path.isAbsolute(destPath));
|
assert(path.isAbsolute(destPath));
|
||||||
|
|
||||||
|
try {
|
||||||
|
await runNpmInstallSema.acquire();
|
||||||
|
const { cliType, packageJsonPath, lockfileVersion } = await scanParentDirs(
|
||||||
|
destPath
|
||||||
|
);
|
||||||
|
|
||||||
|
// Only allow `runNpmInstall()` to run once per `package.json`
|
||||||
|
// when doing a default install (no additional args)
|
||||||
|
if (meta && packageJsonPath && args.length === 0) {
|
||||||
|
if (!isSet<string>(meta.runNpmInstallSet)) {
|
||||||
|
meta.runNpmInstallSet = new Set<string>();
|
||||||
|
}
|
||||||
|
if (isSet<string>(meta.runNpmInstallSet)) {
|
||||||
|
if (meta.runNpmInstallSet.has(packageJsonPath)) {
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
meta.runNpmInstallSet.add(packageJsonPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const installTime = Date.now();
|
||||||
|
console.log('Installing dependencies...');
|
||||||
debug(`Installing to ${destPath}`);
|
debug(`Installing to ${destPath}`);
|
||||||
|
|
||||||
const { cliType, lockfileVersion } = await scanParentDirs(destPath);
|
|
||||||
const opts: SpawnOptionsExtended = { cwd: destPath, ...spawnOpts };
|
const opts: SpawnOptionsExtended = { cwd: destPath, ...spawnOpts };
|
||||||
const env = opts.env ? { ...opts.env } : { ...process.env };
|
const env = opts.env ? { ...opts.env } : { ...process.env };
|
||||||
delete env.NODE_ENV;
|
delete env.NODE_ENV;
|
||||||
@@ -341,6 +394,13 @@ export async function runNpmInstall(
|
|||||||
commandArgs = args
|
commandArgs = args
|
||||||
.filter(a => a !== '--prefer-offline')
|
.filter(a => a !== '--prefer-offline')
|
||||||
.concat(['install', '--no-audit', '--unsafe-perm']);
|
.concat(['install', '--no-audit', '--unsafe-perm']);
|
||||||
|
} else if (cliType === 'pnpm') {
|
||||||
|
// PNPM's install command is similar to NPM's but without the audit nonsense
|
||||||
|
// @see options https://pnpm.io/cli/install
|
||||||
|
opts.prettyCommand = 'pnpm install';
|
||||||
|
commandArgs = args
|
||||||
|
.filter(a => a !== '--prefer-offline')
|
||||||
|
.concat(['install', '--unsafe-perm']);
|
||||||
} else {
|
} else {
|
||||||
opts.prettyCommand = 'yarn install';
|
opts.prettyCommand = 'yarn install';
|
||||||
commandArgs = ['install', ...args];
|
commandArgs = ['install', ...args];
|
||||||
@@ -350,7 +410,12 @@ export async function runNpmInstall(
|
|||||||
commandArgs.push('--production');
|
commandArgs.push('--production');
|
||||||
}
|
}
|
||||||
|
|
||||||
return spawnAsync(cliType, commandArgs, opts);
|
await spawnAsync(cliType, commandArgs, opts);
|
||||||
|
debug(`Install complete [${Date.now() - installTime}ms]`);
|
||||||
|
return true;
|
||||||
|
} finally {
|
||||||
|
runNpmInstallSema.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getEnvForPackageManager({
|
export function getEnvForPackageManager({
|
||||||
@@ -448,6 +513,8 @@ export async function runPackageJsonScript(
|
|||||||
|
|
||||||
if (cliType === 'npm') {
|
if (cliType === 'npm') {
|
||||||
opts.prettyCommand = `npm run ${scriptName}`;
|
opts.prettyCommand = `npm run ${scriptName}`;
|
||||||
|
} else if (cliType === 'pnpm') {
|
||||||
|
opts.prettyCommand = `pnpm run ${scriptName}`;
|
||||||
} else {
|
} else {
|
||||||
opts.prettyCommand = `yarn run ${scriptName}`;
|
opts.prettyCommand = `yarn run ${scriptName}`;
|
||||||
}
|
}
|
||||||
@@ -483,7 +550,7 @@ export async function runPipInstall(
|
|||||||
meta?: Meta
|
meta?: Meta
|
||||||
) {
|
) {
|
||||||
if (meta && meta.isDev) {
|
if (meta && meta.isDev) {
|
||||||
debug('Skipping dependency installation because dev mode is enabled');
|
debug('Skipping dependency installation because dev mode is enabled');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
23
packages/build-utils/src/get-platform-env.ts
Normal file
23
packages/build-utils/src/get-platform-env.ts
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
import { NowBuildError } from './errors';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper function to support both `VERCEL_` and legacy `NOW_` env vars.
|
||||||
|
* Throws an error if *both* env vars are defined.
|
||||||
|
*/
|
||||||
|
export const getPlatformEnv = (name: string): string | undefined => {
|
||||||
|
const vName = `VERCEL_${name}`;
|
||||||
|
const nName = `NOW_${name}`;
|
||||||
|
const v = process.env[vName];
|
||||||
|
const n = process.env[nName];
|
||||||
|
if (typeof v === 'string') {
|
||||||
|
if (typeof n === 'string') {
|
||||||
|
throw new NowBuildError({
|
||||||
|
code: 'CONFLICTING_ENV_VAR_NAMES',
|
||||||
|
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`,
|
||||||
|
link: 'https://vercel.link/combining-old-and-new-config',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return v;
|
||||||
|
}
|
||||||
|
return n;
|
||||||
|
};
|
||||||
@@ -2,6 +2,7 @@ import FileBlob from './file-blob';
|
|||||||
import FileFsRef from './file-fs-ref';
|
import FileFsRef from './file-fs-ref';
|
||||||
import FileRef from './file-ref';
|
import FileRef from './file-ref';
|
||||||
import { Lambda, createLambda, getLambdaOptionsFromFunction } from './lambda';
|
import { Lambda, createLambda, getLambdaOptionsFromFunction } from './lambda';
|
||||||
|
import { NodejsLambda } from './nodejs-lambda';
|
||||||
import { Prerender } from './prerender';
|
import { Prerender } from './prerender';
|
||||||
import download, { DownloadedFiles, isSymbolicLink } from './fs/download';
|
import download, { DownloadedFiles, isSymbolicLink } from './fs/download';
|
||||||
import getWriteableDirectory from './fs/get-writable-directory';
|
import getWriteableDirectory from './fs/get-writable-directory';
|
||||||
@@ -31,17 +32,18 @@ import {
|
|||||||
getLatestNodeVersion,
|
getLatestNodeVersion,
|
||||||
getDiscontinuedNodeVersions,
|
getDiscontinuedNodeVersions,
|
||||||
} from './fs/node-version';
|
} from './fs/node-version';
|
||||||
import { NowBuildError } from './errors';
|
|
||||||
import streamToBuffer from './fs/stream-to-buffer';
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
import shouldServe from './should-serve';
|
import shouldServe from './should-serve';
|
||||||
import debug from './debug';
|
import debug from './debug';
|
||||||
import getIgnoreFilter from './get-ignore-filter';
|
import getIgnoreFilter from './get-ignore-filter';
|
||||||
|
import { getPlatformEnv } from './get-platform-env';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
FileBlob,
|
FileBlob,
|
||||||
FileFsRef,
|
FileFsRef,
|
||||||
FileRef,
|
FileRef,
|
||||||
Lambda,
|
Lambda,
|
||||||
|
NodejsLambda,
|
||||||
createLambda,
|
createLambda,
|
||||||
Prerender,
|
Prerender,
|
||||||
download,
|
download,
|
||||||
@@ -69,6 +71,7 @@ export {
|
|||||||
getLatestNodeVersion,
|
getLatestNodeVersion,
|
||||||
getDiscontinuedNodeVersions,
|
getDiscontinuedNodeVersions,
|
||||||
getSpawnOptions,
|
getSpawnOptions,
|
||||||
|
getPlatformEnv,
|
||||||
streamToBuffer,
|
streamToBuffer,
|
||||||
shouldServe,
|
shouldServe,
|
||||||
debug,
|
debug,
|
||||||
@@ -78,6 +81,7 @@ export {
|
|||||||
getIgnoreFilter,
|
getIgnoreFilter,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export { EdgeFunction } from './edge-function';
|
||||||
export {
|
export {
|
||||||
detectBuilders,
|
detectBuilders,
|
||||||
detectOutputDirectory,
|
detectOutputDirectory,
|
||||||
@@ -89,11 +93,6 @@ export { detectFramework } from './detect-framework';
|
|||||||
export { DetectorFilesystem } from './detectors/filesystem';
|
export { DetectorFilesystem } from './detectors/filesystem';
|
||||||
export { readConfigFile } from './fs/read-config-file';
|
export { readConfigFile } from './fs/read-config-file';
|
||||||
export { normalizePath } from './fs/normalize-path';
|
export { normalizePath } from './fs/normalize-path';
|
||||||
export {
|
|
||||||
_experimental_convertRuntimeToPlugin,
|
|
||||||
_experimental_updateFunctionsManifest,
|
|
||||||
_experimental_updateRoutesManifest,
|
|
||||||
} from './convert-runtime-to-plugin';
|
|
||||||
|
|
||||||
export * from './schemas';
|
export * from './schemas';
|
||||||
export * from './types';
|
export * from './types';
|
||||||
@@ -117,25 +116,3 @@ export const isOfficialRuntime = (desired: string, name?: string): boolean => {
|
|||||||
export const isStaticRuntime = (name?: string): boolean => {
|
export const isStaticRuntime = (name?: string): boolean => {
|
||||||
return isOfficialRuntime('static', name);
|
return isOfficialRuntime('static', name);
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
|
||||||
* Helper function to support both `VERCEL_` and legacy `NOW_` env vars.
|
|
||||||
* Throws an error if *both* env vars are defined.
|
|
||||||
*/
|
|
||||||
export const getPlatformEnv = (name: string): string | undefined => {
|
|
||||||
const vName = `VERCEL_${name}`;
|
|
||||||
const nName = `NOW_${name}`;
|
|
||||||
const v = process.env[vName];
|
|
||||||
const n = process.env[nName];
|
|
||||||
if (typeof v === 'string') {
|
|
||||||
if (typeof n === 'string') {
|
|
||||||
throw new NowBuildError({
|
|
||||||
code: 'CONFLICTING_ENV_VAR_NAMES',
|
|
||||||
message: `Both "${vName}" and "${nName}" env vars are defined. Please only define the "${vName}" env var.`,
|
|
||||||
link: 'https://vercel.link/combining-old-and-new-config',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return v;
|
|
||||||
}
|
|
||||||
return n;
|
|
||||||
};
|
|
||||||
|
|||||||
@@ -3,17 +3,17 @@ import Sema from 'async-sema';
|
|||||||
import { ZipFile } from 'yazl';
|
import { ZipFile } from 'yazl';
|
||||||
import minimatch from 'minimatch';
|
import minimatch from 'minimatch';
|
||||||
import { readlink } from 'fs-extra';
|
import { readlink } from 'fs-extra';
|
||||||
import { Files, Config } from './types';
|
|
||||||
import FileFsRef from './file-fs-ref';
|
|
||||||
import { isSymbolicLink } from './fs/download';
|
import { isSymbolicLink } from './fs/download';
|
||||||
import streamToBuffer from './fs/stream-to-buffer';
|
import streamToBuffer from './fs/stream-to-buffer';
|
||||||
|
import type { Files, Config } from './types';
|
||||||
|
|
||||||
interface Environment {
|
interface Environment {
|
||||||
[key: string]: string;
|
[key: string]: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface LambdaOptions {
|
export type LambdaOptions = LambdaOptionsWithFiles | LambdaOptionsWithZipBuffer;
|
||||||
files: Files;
|
|
||||||
|
export interface LambdaOptionsBase {
|
||||||
handler: string;
|
handler: string;
|
||||||
runtime: string;
|
runtime: string;
|
||||||
memory?: number;
|
memory?: number;
|
||||||
@@ -21,6 +21,21 @@ interface LambdaOptions {
|
|||||||
environment?: Environment;
|
environment?: Environment;
|
||||||
allowQuery?: string[];
|
allowQuery?: string[];
|
||||||
regions?: string[];
|
regions?: string[];
|
||||||
|
supportsMultiPayloads?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface LambdaOptionsWithFiles extends LambdaOptionsBase {
|
||||||
|
files: Files;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @deprecated Use `LambdaOptionsWithFiles` instead.
|
||||||
|
*/
|
||||||
|
export interface LambdaOptionsWithZipBuffer extends LambdaOptionsBase {
|
||||||
|
/**
|
||||||
|
* @deprecated Use `files` property instead.
|
||||||
|
*/
|
||||||
|
zipBuffer: Buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
interface GetLambdaOptionsFromFunctionOptions {
|
interface GetLambdaOptionsFromFunctionOptions {
|
||||||
@@ -29,22 +44,23 @@ interface GetLambdaOptionsFromFunctionOptions {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class Lambda {
|
export class Lambda {
|
||||||
public type: 'Lambda';
|
type: 'Lambda';
|
||||||
public files: Files;
|
files?: Files;
|
||||||
public handler: string;
|
handler: string;
|
||||||
public runtime: string;
|
runtime: string;
|
||||||
public memory?: number;
|
memory?: number;
|
||||||
public maxDuration?: number;
|
maxDuration?: number;
|
||||||
public environment: Environment;
|
environment: Environment;
|
||||||
public allowQuery?: string[];
|
allowQuery?: string[];
|
||||||
public regions?: string[];
|
regions?: string[];
|
||||||
/**
|
/**
|
||||||
* @deprecated Use `await lambda.createZip()` instead.
|
* @deprecated Use `await lambda.createZip()` instead.
|
||||||
*/
|
*/
|
||||||
public zipBuffer?: Buffer;
|
zipBuffer?: Buffer;
|
||||||
|
supportsMultiPayloads?: boolean;
|
||||||
|
|
||||||
constructor({
|
constructor(opts: LambdaOptions) {
|
||||||
files,
|
const {
|
||||||
handler,
|
handler,
|
||||||
runtime,
|
runtime,
|
||||||
maxDuration,
|
maxDuration,
|
||||||
@@ -52,8 +68,14 @@ export class Lambda {
|
|||||||
environment = {},
|
environment = {},
|
||||||
allowQuery,
|
allowQuery,
|
||||||
regions,
|
regions,
|
||||||
}: LambdaOptions) {
|
supportsMultiPayloads,
|
||||||
assert(typeof files === 'object', '"files" must be an object');
|
} = opts;
|
||||||
|
if ('files' in opts) {
|
||||||
|
assert(typeof opts.files === 'object', '"files" must be an object');
|
||||||
|
}
|
||||||
|
if ('zipBuffer' in opts) {
|
||||||
|
assert(Buffer.isBuffer(opts.zipBuffer), '"zipBuffer" must be a Buffer');
|
||||||
|
}
|
||||||
assert(typeof handler === 'string', '"handler" is not a string');
|
assert(typeof handler === 'string', '"handler" is not a string');
|
||||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||||
assert(typeof environment === 'object', '"environment" is not an object');
|
assert(typeof environment === 'object', '"environment" is not an object');
|
||||||
@@ -74,6 +96,13 @@ export class Lambda {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (supportsMultiPayloads !== undefined) {
|
||||||
|
assert(
|
||||||
|
typeof supportsMultiPayloads === 'boolean',
|
||||||
|
'"supportsMultiPayloads" is not a boolean'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
if (regions !== undefined) {
|
if (regions !== undefined) {
|
||||||
assert(Array.isArray(regions), '"regions" is not an Array');
|
assert(Array.isArray(regions), '"regions" is not an Array');
|
||||||
assert(
|
assert(
|
||||||
@@ -82,7 +111,7 @@ export class Lambda {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
this.type = 'Lambda';
|
this.type = 'Lambda';
|
||||||
this.files = files;
|
this.files = 'files' in opts ? opts.files : undefined;
|
||||||
this.handler = handler;
|
this.handler = handler;
|
||||||
this.runtime = runtime;
|
this.runtime = runtime;
|
||||||
this.memory = memory;
|
this.memory = memory;
|
||||||
@@ -90,11 +119,16 @@ export class Lambda {
|
|||||||
this.environment = environment;
|
this.environment = environment;
|
||||||
this.allowQuery = allowQuery;
|
this.allowQuery = allowQuery;
|
||||||
this.regions = regions;
|
this.regions = regions;
|
||||||
|
this.zipBuffer = 'zipBuffer' in opts ? opts.zipBuffer : undefined;
|
||||||
|
this.supportsMultiPayloads = supportsMultiPayloads;
|
||||||
}
|
}
|
||||||
|
|
||||||
async createZip(): Promise<Buffer> {
|
async createZip(): Promise<Buffer> {
|
||||||
let { zipBuffer } = this;
|
let { zipBuffer } = this;
|
||||||
if (!zipBuffer) {
|
if (!zipBuffer) {
|
||||||
|
if (!this.files) {
|
||||||
|
throw new Error('`files` is not defined');
|
||||||
|
}
|
||||||
await sema.acquire();
|
await sema.acquire();
|
||||||
try {
|
try {
|
||||||
zipBuffer = await createZip(this.files);
|
zipBuffer = await createZip(this.files);
|
||||||
@@ -128,7 +162,7 @@ export async function createZip(files: Files): Promise<Buffer> {
|
|||||||
for (const name of names) {
|
for (const name of names) {
|
||||||
const file = files[name];
|
const file = files[name];
|
||||||
if (file.mode && isSymbolicLink(file.mode) && file.type === 'FileFsRef') {
|
if (file.mode && isSymbolicLink(file.mode) && file.type === 'FileFsRef') {
|
||||||
const symlinkTarget = await readlink((file as FileFsRef).fsPath);
|
const symlinkTarget = await readlink(file.fsPath);
|
||||||
symlinkTargets.set(name, symlinkTarget);
|
symlinkTargets.set(name, symlinkTarget);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -142,7 +176,7 @@ export async function createZip(files: Files): Promise<Buffer> {
|
|||||||
if (typeof symlinkTarget === 'string') {
|
if (typeof symlinkTarget === 'string') {
|
||||||
zipFile.addBuffer(Buffer.from(symlinkTarget, 'utf8'), name, opts);
|
zipFile.addBuffer(Buffer.from(symlinkTarget, 'utf8'), name, opts);
|
||||||
} else {
|
} else {
|
||||||
const stream = file.toStream() as import('stream').Readable;
|
const stream = file.toStream();
|
||||||
stream.on('error', reject);
|
stream.on('error', reject);
|
||||||
zipFile.addReadStream(stream, name, opts);
|
zipFile.addReadStream(stream, name, opts);
|
||||||
}
|
}
|
||||||
|
|||||||
27
packages/build-utils/src/nodejs-lambda.ts
Normal file
27
packages/build-utils/src/nodejs-lambda.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { Lambda, LambdaOptionsWithFiles } from './lambda';
|
||||||
|
|
||||||
|
interface NodejsLambdaOptions extends LambdaOptionsWithFiles {
|
||||||
|
shouldAddHelpers: boolean;
|
||||||
|
shouldAddSourcemapSupport: boolean;
|
||||||
|
awsLambdaHandler?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class NodejsLambda extends Lambda {
|
||||||
|
launcherType: 'Nodejs';
|
||||||
|
shouldAddHelpers: boolean;
|
||||||
|
shouldAddSourcemapSupport: boolean;
|
||||||
|
awsLambdaHandler?: string;
|
||||||
|
|
||||||
|
constructor({
|
||||||
|
shouldAddHelpers,
|
||||||
|
shouldAddSourcemapSupport,
|
||||||
|
awsLambdaHandler,
|
||||||
|
...opts
|
||||||
|
}: NodejsLambdaOptions) {
|
||||||
|
super(opts);
|
||||||
|
this.launcherType = 'Nodejs';
|
||||||
|
this.shouldAddHelpers = shouldAddHelpers;
|
||||||
|
this.shouldAddSourcemapSupport = shouldAddSourcemapSupport;
|
||||||
|
this.awsLambdaHandler = awsLambdaHandler;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,12 +1,10 @@
|
|||||||
import FileBlob from './file-blob';
|
import { File } from './types';
|
||||||
import FileFsRef from './file-fs-ref';
|
|
||||||
import FileRef from './file-ref';
|
|
||||||
import { Lambda } from './lambda';
|
import { Lambda } from './lambda';
|
||||||
|
|
||||||
interface PrerenderOptions {
|
interface PrerenderOptions {
|
||||||
expiration: number | false;
|
expiration: number | false;
|
||||||
lambda: Lambda;
|
lambda: Lambda;
|
||||||
fallback: FileBlob | FileFsRef | FileRef | null;
|
fallback: File | null;
|
||||||
group?: number;
|
group?: number;
|
||||||
bypassToken?: string | null /* optional to be non-breaking change */;
|
bypassToken?: string | null /* optional to be non-breaking change */;
|
||||||
allowQuery?: string[];
|
allowQuery?: string[];
|
||||||
@@ -16,7 +14,7 @@ export class Prerender {
|
|||||||
public type: 'Prerender';
|
public type: 'Prerender';
|
||||||
public expiration: number | false;
|
public expiration: number | false;
|
||||||
public lambda: Lambda;
|
public lambda: Lambda;
|
||||||
public fallback: FileBlob | FileFsRef | FileRef | null;
|
public fallback: File | null;
|
||||||
public group?: number;
|
public group?: number;
|
||||||
public bypassToken: string | null;
|
public bypassToken: string | null;
|
||||||
public allowQuery?: string[];
|
public allowQuery?: string[];
|
||||||
|
|||||||
@@ -1,20 +1,21 @@
|
|||||||
import FileRef from './file-ref';
|
import type FileRef from './file-ref';
|
||||||
import FileFsRef from './file-fs-ref';
|
import type FileFsRef from './file-fs-ref';
|
||||||
|
import type FileBlob from './file-blob';
|
||||||
|
import type { Lambda } from './lambda';
|
||||||
|
import type { Prerender } from './prerender';
|
||||||
|
import type { EdgeFunction } from './edge-function';
|
||||||
|
|
||||||
export interface Env {
|
export interface Env {
|
||||||
[name: string]: string | undefined;
|
[name: string]: string | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface File {
|
export type File = FileRef | FileFsRef | FileBlob;
|
||||||
|
export interface FileBase {
|
||||||
type: string;
|
type: string;
|
||||||
mode: number;
|
mode: number;
|
||||||
contentType?: string;
|
contentType?: string;
|
||||||
toStream: () => NodeJS.ReadableStream;
|
toStream: () => NodeJS.ReadableStream;
|
||||||
toStreamAsync?: () => Promise<NodeJS.ReadableStream>;
|
toStreamAsync?: () => Promise<NodeJS.ReadableStream>;
|
||||||
/**
|
|
||||||
* The absolute path to the file in the filesystem
|
|
||||||
*/
|
|
||||||
fsPath?: string;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Files {
|
export interface Files {
|
||||||
@@ -22,16 +23,6 @@ export interface Files {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface Config {
|
export interface Config {
|
||||||
[key: string]:
|
|
||||||
| string
|
|
||||||
| string[]
|
|
||||||
| boolean
|
|
||||||
| number
|
|
||||||
| { [key: string]: string }
|
|
||||||
| BuilderFunctions
|
|
||||||
| ProjectSettings
|
|
||||||
| undefined
|
|
||||||
| null;
|
|
||||||
maxLambdaSize?: string;
|
maxLambdaSize?: string;
|
||||||
includeFiles?: string | string[];
|
includeFiles?: string | string[];
|
||||||
excludeFiles?: string | string[];
|
excludeFiles?: string | string[];
|
||||||
@@ -50,6 +41,7 @@ export interface Config {
|
|||||||
devCommand?: string;
|
devCommand?: string;
|
||||||
framework?: string | null;
|
framework?: string | null;
|
||||||
nodeVersion?: string;
|
nodeVersion?: string;
|
||||||
|
[key: string]: unknown;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Meta {
|
export interface Meta {
|
||||||
@@ -62,35 +54,7 @@ export interface Meta {
|
|||||||
env?: Env;
|
env?: Env;
|
||||||
buildEnv?: Env;
|
buildEnv?: Env;
|
||||||
avoidTopLevelInstall?: boolean;
|
avoidTopLevelInstall?: boolean;
|
||||||
}
|
[key: string]: unknown;
|
||||||
|
|
||||||
export interface AnalyzeOptions {
|
|
||||||
/**
|
|
||||||
* All source files of the project
|
|
||||||
*/
|
|
||||||
files: {
|
|
||||||
[filePath: string]: FileRef;
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Name of entrypoint file for this particular build job. Value
|
|
||||||
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
|
||||||
* `entrypoint` is always a discrete file and never a glob, since globs are
|
|
||||||
* expanded into separate builds at deployment time.
|
|
||||||
*/
|
|
||||||
entrypoint: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A writable temporary directory where you are encouraged to perform your
|
|
||||||
* build process. This directory will be populated with the restored cache.
|
|
||||||
*/
|
|
||||||
workPath: string;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An arbitrary object passed by the user in the build definition defined
|
|
||||||
* in `vercel.json`.
|
|
||||||
*/
|
|
||||||
config: Config;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BuildOptions {
|
export interface BuildOptions {
|
||||||
@@ -155,10 +119,11 @@ export interface PrepareCacheOptions {
|
|||||||
workPath: string;
|
workPath: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A writable temporary directory where you can build a cache to use for
|
* The "Root Directory" is assigned to the `workPath` so the `repoRootPath`
|
||||||
* the next run.
|
* is the Git Repository Root. This is only relevant for Monorepos.
|
||||||
|
* See https://vercel.com/blog/monorepos
|
||||||
*/
|
*/
|
||||||
cachePath: string;
|
repoRootPath?: string;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An arbitrary object passed by the user in the build definition defined
|
* An arbitrary object passed by the user in the build definition defined
|
||||||
@@ -367,4 +332,75 @@ export interface ProjectSettings {
|
|||||||
sourceFilesOutsideRootDirectory?: boolean;
|
sourceFilesOutsideRootDirectory?: boolean;
|
||||||
directoryListing?: boolean;
|
directoryListing?: boolean;
|
||||||
gitForkProtection?: boolean;
|
gitForkProtection?: boolean;
|
||||||
|
commandForIgnoringBuildStep?: string | null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface BuilderV2 {
|
||||||
|
version: 2;
|
||||||
|
build: BuildV2;
|
||||||
|
prepareCache?: PrepareCache;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BuilderV3 {
|
||||||
|
version: 3;
|
||||||
|
build: BuildV3;
|
||||||
|
prepareCache?: PrepareCache;
|
||||||
|
startDevServer?: StartDevServer;
|
||||||
|
}
|
||||||
|
|
||||||
|
type ImageFormat = 'image/avif' | 'image/webp';
|
||||||
|
|
||||||
|
export interface Images {
|
||||||
|
domains: string[];
|
||||||
|
sizes: number[];
|
||||||
|
minimumCacheTTL?: number;
|
||||||
|
formats?: ImageFormat[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a Builder ends up creating filesystem outputs conforming to
|
||||||
|
* the Build Output API, then the Builder should return this type.
|
||||||
|
*/
|
||||||
|
export interface BuildResultBuildOutput {
|
||||||
|
/**
|
||||||
|
* Version number of the Build Output API that was created.
|
||||||
|
* Currently only `3` is a valid value.
|
||||||
|
* @example 3
|
||||||
|
*/
|
||||||
|
buildOutputVersion: 3;
|
||||||
|
/**
|
||||||
|
* Filesystem path to the Build Output directory.
|
||||||
|
* @example "/path/to/.vercel/output"
|
||||||
|
*/
|
||||||
|
buildOutputPath: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When a Builder implements `version: 2`, the `build()` function is expected
|
||||||
|
* to return this type.
|
||||||
|
*/
|
||||||
|
export interface BuildResultV2Typical {
|
||||||
|
// TODO: use proper `Route` type from `routing-utils` (perhaps move types to a common package)
|
||||||
|
routes?: any[];
|
||||||
|
images?: Images;
|
||||||
|
output: {
|
||||||
|
[key: string]: File | Lambda | Prerender | EdgeFunction;
|
||||||
|
};
|
||||||
|
wildcard?: Array<{
|
||||||
|
domain: string;
|
||||||
|
value: string;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BuildResultV2 = BuildResultV2Typical | BuildResultBuildOutput;
|
||||||
|
|
||||||
|
export interface BuildResultV3 {
|
||||||
|
output: Lambda;
|
||||||
|
}
|
||||||
|
|
||||||
|
export type BuildV2 = (options: BuildOptions) => Promise<BuildResultV2>;
|
||||||
|
export type BuildV3 = (options: BuildOptions) => Promise<BuildResultV3>;
|
||||||
|
export type PrepareCache = (options: PrepareCacheOptions) => Promise<Files>;
|
||||||
|
export type StartDevServer = (
|
||||||
|
options: StartDevServerOptions
|
||||||
|
) => Promise<StartDevServerResult>;
|
||||||
|
|||||||
15
packages/build-utils/test/fixtures/22-pnpm/package.json
vendored
Normal file
15
packages/build-utils/test/fixtures/22-pnpm/package.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
{
|
||||||
|
"name": "22-pnpm",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "",
|
||||||
|
"main": "index.js",
|
||||||
|
"scripts": {
|
||||||
|
"build": "mkdir -p public && (printf \"pnpm version: \" && pnpm -v) > public/index.txt"
|
||||||
|
},
|
||||||
|
"keywords": [],
|
||||||
|
"author": "",
|
||||||
|
"license": "ISC",
|
||||||
|
"dependencies": {
|
||||||
|
"once": "^1.4.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
19
packages/build-utils/test/fixtures/22-pnpm/pnpm-lock.yaml
generated
vendored
Normal file
19
packages/build-utils/test/fixtures/22-pnpm/pnpm-lock.yaml
generated
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
lockfileVersion: 5.3
|
||||||
|
|
||||||
|
specifiers:
|
||||||
|
once: ^1.4.0
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
once: 1.4.0
|
||||||
|
|
||||||
|
packages:
|
||||||
|
|
||||||
|
/once/1.4.0:
|
||||||
|
resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
|
||||||
|
dependencies:
|
||||||
|
wrappy: 1.0.2
|
||||||
|
dev: false
|
||||||
|
|
||||||
|
/wrappy/1.0.2:
|
||||||
|
resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
|
||||||
|
dev: false
|
||||||
11
packages/build-utils/test/fixtures/22-pnpm/vercel.json
vendored
Normal file
11
packages/build-utils/test/fixtures/22-pnpm/vercel.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"version": 2,
|
||||||
|
"builds": [{ "src": "package.json", "use": "@vercel/static-build" }],
|
||||||
|
"probes": [
|
||||||
|
{
|
||||||
|
"path": "/",
|
||||||
|
"mustContain": "pnpm version: 6",
|
||||||
|
"logMustContain": "pnpm run build"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
5
packages/build-utils/test/fixtures/23-pnpm-workspaces/c/package.json
vendored
Normal file
5
packages/build-utils/test/fixtures/23-pnpm-workspaces/c/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"name": "c",
|
||||||
|
"license": "MIT",
|
||||||
|
"version": "0.1.0"
|
||||||
|
}
|
||||||
8
packages/build-utils/test/fixtures/23-pnpm-workspaces/d/package.json
vendored
Normal file
8
packages/build-utils/test/fixtures/23-pnpm-workspaces/d/package.json
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
{
|
||||||
|
"name": "d",
|
||||||
|
"license": "MIT",
|
||||||
|
"version": "0.1.0",
|
||||||
|
"devDependencies": {
|
||||||
|
"once": "1.4.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
6
packages/build-utils/test/fixtures/23-pnpm-workspaces/package.json
vendored
Normal file
6
packages/build-utils/test/fixtures/23-pnpm-workspaces/package.json
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"private": true,
|
||||||
|
"name": "23-pnpm-workspaces",
|
||||||
|
"license": "MIT",
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
27
packages/build-utils/test/fixtures/23-pnpm-workspaces/pnpm-lock.yaml
generated
vendored
Normal file
27
packages/build-utils/test/fixtures/23-pnpm-workspaces/pnpm-lock.yaml
generated
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
lockfileVersion: 5.3
|
||||||
|
|
||||||
|
importers:
|
||||||
|
|
||||||
|
.:
|
||||||
|
specifiers: {}
|
||||||
|
|
||||||
|
c:
|
||||||
|
specifiers: {}
|
||||||
|
|
||||||
|
d:
|
||||||
|
specifiers:
|
||||||
|
once: 1.4.0
|
||||||
|
devDependencies:
|
||||||
|
once: 1.4.0
|
||||||
|
|
||||||
|
packages:
|
||||||
|
|
||||||
|
/once/1.4.0:
|
||||||
|
resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
|
||||||
|
dependencies:
|
||||||
|
wrappy: 1.0.2
|
||||||
|
dev: true
|
||||||
|
|
||||||
|
/wrappy/1.0.2:
|
||||||
|
resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
|
||||||
|
dev: true
|
||||||
3
packages/build-utils/test/fixtures/23-pnpm-workspaces/pnpm-workspace.yaml
vendored
Normal file
3
packages/build-utils/test/fixtures/23-pnpm-workspaces/pnpm-workspace.yaml
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
packages:
|
||||||
|
- 'c'
|
||||||
|
- 'd'
|
||||||
1
packages/build-utils/test/fixtures/24-pnpm-hoisted/.gitignore
vendored
Normal file
1
packages/build-utils/test/fixtures/24-pnpm-hoisted/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
.vercel
|
||||||
1
packages/build-utils/test/fixtures/24-pnpm-hoisted/.npmrc
vendored
Normal file
1
packages/build-utils/test/fixtures/24-pnpm-hoisted/.npmrc
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
node-linker=hoisted
|
||||||
5
packages/build-utils/test/fixtures/24-pnpm-hoisted/a/index.js
vendored
Normal file
5
packages/build-utils/test/fixtures/24-pnpm-hoisted/a/index.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
const once = require('once');
|
||||||
|
|
||||||
|
module.exports = () => {
|
||||||
|
once(() => {});
|
||||||
|
};
|
||||||
5
packages/build-utils/test/fixtures/24-pnpm-hoisted/b/index.js
vendored
Normal file
5
packages/build-utils/test/fixtures/24-pnpm-hoisted/b/index.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
const once = require('once');
|
||||||
|
|
||||||
|
module.exports = () => {
|
||||||
|
once(() => {});
|
||||||
|
};
|
||||||
10
packages/build-utils/test/fixtures/24-pnpm-hoisted/index.js
vendored
Normal file
10
packages/build-utils/test/fixtures/24-pnpm-hoisted/index.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
const { exec } = require('exeggcute');
|
||||||
|
|
||||||
|
const a = require('./a');
|
||||||
|
const b = require('./b');
|
||||||
|
|
||||||
|
a();
|
||||||
|
b();
|
||||||
|
|
||||||
|
exec('mkdir public', __dirname);
|
||||||
|
exec('echo "Hello, World!" > public/index.html', __dirname);
|
||||||
11
packages/build-utils/test/fixtures/24-pnpm-hoisted/package.json
vendored
Normal file
11
packages/build-utils/test/fixtures/24-pnpm-hoisted/package.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"private": "true",
|
||||||
|
"name": "24-pnpm-hoisted",
|
||||||
|
"scripts": {
|
||||||
|
"build": "ls -Al node_modules && node index.js"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"exeggcute": "^1.0.0",
|
||||||
|
"once": "^1.4.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
25
packages/build-utils/test/fixtures/24-pnpm-hoisted/pnpm-lock.yaml
generated
vendored
Normal file
25
packages/build-utils/test/fixtures/24-pnpm-hoisted/pnpm-lock.yaml
generated
vendored
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
lockfileVersion: 5.3
|
||||||
|
|
||||||
|
specifiers:
|
||||||
|
exeggcute: ^1.0.0
|
||||||
|
once: ^1.4.0
|
||||||
|
|
||||||
|
dependencies:
|
||||||
|
exeggcute: 1.0.0
|
||||||
|
once: 1.4.0
|
||||||
|
|
||||||
|
packages:
|
||||||
|
|
||||||
|
/exeggcute/1.0.0:
|
||||||
|
resolution: {integrity: sha1-qLXakIhjGCDm9ggx4CvEc5mnbBU=}
|
||||||
|
dev: false
|
||||||
|
|
||||||
|
/once/1.4.0:
|
||||||
|
resolution: {integrity: sha1-WDsap3WWHUsROsF9nFC6753Xa9E=}
|
||||||
|
dependencies:
|
||||||
|
wrappy: 1.0.2
|
||||||
|
dev: false
|
||||||
|
|
||||||
|
/wrappy/1.0.2:
|
||||||
|
resolution: {integrity: sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=}
|
||||||
|
dev: false
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user