mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 04:22:13 +00:00
Compare commits
109 Commits
update/ver
...
@vercel/cl
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6f4a1b527b | ||
|
|
1b95576dd2 | ||
|
|
9227471aca | ||
|
|
bf060296eb | ||
|
|
9b3aa41f2e | ||
|
|
ae36585cdb | ||
|
|
e4c636ddd2 | ||
|
|
ae3b25be4b | ||
|
|
a64ed13a40 | ||
|
|
6c1c0e6676 | ||
|
|
82fdd5d121 | ||
|
|
8b40f4435e | ||
|
|
38c87602bb | ||
|
|
7aef3013e7 | ||
|
|
c18676ab4d | ||
|
|
df450c815d | ||
|
|
792ab38760 | ||
|
|
0bba3e76c1 | ||
|
|
3d961ffbb9 | ||
|
|
a3039f57bb | ||
|
|
5499fa9a04 | ||
|
|
b9fd64faff | ||
|
|
1202ff7b2b | ||
|
|
abd9f019f1 | ||
|
|
edb5eead81 | ||
|
|
6b865ff753 | ||
|
|
4fd0734c48 | ||
|
|
f815421acb | ||
|
|
5da926fee1 | ||
|
|
3559531e4c | ||
|
|
449a3b3648 | ||
|
|
7bd338618c | ||
|
|
9048a6f584 | ||
|
|
0cacb1bdac | ||
|
|
318bf35f82 | ||
|
|
cc7b2691c1 | ||
|
|
3a75c37adc | ||
|
|
acd0e84e73 | ||
|
|
49087d5aac | ||
|
|
7d7f3df980 | ||
|
|
5cf0c316e9 | ||
|
|
f4501433c8 | ||
|
|
19831593ce | ||
|
|
5d85bb1426 | ||
|
|
f194d54b0c | ||
|
|
6542086843 | ||
|
|
2721b3449d | ||
|
|
adb284519a | ||
|
|
b2d91f3121 | ||
|
|
32664cd13b | ||
|
|
db468c489a | ||
|
|
edd9bb506c | ||
|
|
a72549a290 | ||
|
|
4aa6a13912 | ||
|
|
81ea0082f1 | ||
|
|
6dff0875f5 | ||
|
|
30aa392c0a | ||
|
|
c4fc060030 | ||
|
|
3fa08bf64f | ||
|
|
43056bde1f | ||
|
|
a49966b9b4 | ||
|
|
7f55de71bb | ||
|
|
db8e36e04c | ||
|
|
82924bb5c4 | ||
|
|
18b5fac93e | ||
|
|
a6012e600b | ||
|
|
c3abf73f58 | ||
|
|
4873b8b379 | ||
|
|
6248139281 | ||
|
|
507a5de3cd | ||
|
|
be1c78e72f | ||
|
|
c277c649c6 | ||
|
|
ed1dacd276 | ||
|
|
144e890bfa | ||
|
|
af097c2c06 | ||
|
|
873a582986 | ||
|
|
986b4c0b1a | ||
|
|
14071819ac | ||
|
|
2a8588a0c5 | ||
|
|
0f7e89f76c | ||
|
|
e68ed33a88 | ||
|
|
d3e98cdb73 | ||
|
|
bf4e77110f | ||
|
|
5b5197d2c5 | ||
|
|
a6ccf6c180 | ||
|
|
8d848ebe8b | ||
|
|
6ef2c16d63 | ||
|
|
6c71ceaaeb | ||
|
|
1dcb6dfc6f | ||
|
|
4fd24575e5 | ||
|
|
8714f1905e | ||
|
|
2e69f2513d | ||
|
|
979e4b674a | ||
|
|
07fa47bcfb | ||
|
|
307c4fc377 | ||
|
|
44868d79b6 | ||
|
|
df9a4afa5c | ||
|
|
8a6869bae2 | ||
|
|
a3fc3c1ca7 | ||
|
|
44037c58be | ||
|
|
1a9419b690 | ||
|
|
93d0e5966c | ||
|
|
306f3a1312 | ||
|
|
9c67e8115e | ||
|
|
b890ac1e44 | ||
|
|
28e71ff109 | ||
|
|
2bf060c708 | ||
|
|
c8ef1d71d1 | ||
|
|
3d2efc7dcd |
@@ -1,11 +1,10 @@
|
||||
node_modules
|
||||
dist
|
||||
examples
|
||||
packages/build-utils/test/fixtures
|
||||
packages/*/test/fixtures
|
||||
packages/cli/@types
|
||||
packages/cli/download
|
||||
packages/cli/dist
|
||||
packages/cli/test/fixtures
|
||||
packages/cli/test/dev/fixtures
|
||||
packages/cli/bin
|
||||
packages/cli/link
|
||||
@@ -13,6 +12,6 @@ packages/cli/src/util/dev/templates/*.ts
|
||||
packages/client/tests/fixtures
|
||||
packages/client/lib
|
||||
packages/node/src/bridge.ts
|
||||
packages/node/test/fixtures
|
||||
packages/node-bridge/bridge.js
|
||||
packages/node-bridge/launcher.js
|
||||
packages/middleware/src/entries.js
|
||||
|
||||
2
.github/workflows/cancel.yml
vendored
2
.github/workflows/cancel.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 2
|
||||
steps:
|
||||
- uses: styfle/cancel-workflow-action@0.4.1
|
||||
- uses: styfle/cancel-workflow-action@0.9.1
|
||||
with:
|
||||
workflow_id: 849295, 849296, 849297, 849298
|
||||
access_token: ${{ github.token }}
|
||||
|
||||
2
.github/workflows/test-integration-dev.yml
vendored
2
.github/workflows/test-integration-dev.yml
vendored
@@ -11,7 +11,7 @@ on:
|
||||
jobs:
|
||||
test:
|
||||
name: Dev
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 75
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -27,3 +27,4 @@ test/lib/deployment/failed-page.txt
|
||||
/public
|
||||
__pycache__
|
||||
.vercel
|
||||
.output
|
||||
|
||||
@@ -332,7 +332,7 @@ This is an abstract enumeration type that is implemented by one of the following
|
||||
- `nodejs10.x`
|
||||
- `go1.x`
|
||||
- `java11`
|
||||
- `python3.8`
|
||||
- `python3.9`
|
||||
- `python3.6`
|
||||
- `dotnetcore2.1`
|
||||
- `ruby2.5`
|
||||
@@ -398,12 +398,12 @@ This utility allows you to _scan_ the filesystem and return a [`Files`](#files)
|
||||
The following trivial example downloads everything to the filesystem, only to return it back (therefore just re-creating the passed-in [`Files`](#files)):
|
||||
|
||||
```js
|
||||
const { glob, download } = require('@vercel/build-utils')
|
||||
const { glob, download } = require('@vercel/build-utils');
|
||||
|
||||
exports.build = ({ files, workPath }) => {
|
||||
await download(files, workPath)
|
||||
return glob('**', workPath)
|
||||
}
|
||||
await download(files, workPath);
|
||||
return glob('**', workPath);
|
||||
};
|
||||
```
|
||||
|
||||
### `getWritableDirectory()`
|
||||
|
||||
@@ -14,8 +14,6 @@ const frameworks = (_frameworks as Framework[])
|
||||
sort: undefined,
|
||||
dependency: undefined,
|
||||
defaultRoutes: undefined,
|
||||
devCommand: undefined,
|
||||
buildCommand: undefined,
|
||||
};
|
||||
|
||||
if (framework.logo) {
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
"description": "API for the vercel/vercel repo",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"vercel-build": "yarn --cwd .. && node ../utils/run.js build all"
|
||||
"vercel-build": "node ../utils/run.js build all"
|
||||
},
|
||||
"dependencies": {
|
||||
"@sentry/node": "5.11.1",
|
||||
|
||||
15787
examples/nextjs/package-lock.json
generated
Normal file
15787
examples/nextjs/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
@@ -9,7 +9,7 @@
|
||||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "11.1.2",
|
||||
"next": "12.0.1",
|
||||
"react": "17.0.2",
|
||||
"react-dom": "17.0.2"
|
||||
},
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
8
examples/remix/.gitignore
vendored
Normal file
8
examples/remix/.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
node_modules
|
||||
|
||||
.cache
|
||||
.vercel
|
||||
.output
|
||||
|
||||
public/build
|
||||
api/_build
|
||||
34
examples/remix/README.md
Normal file
34
examples/remix/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Welcome to Remix!
|
||||
|
||||
- [Remix Docs](https://remix.run/docs)
|
||||
|
||||
## Deployment
|
||||
|
||||
After having run the `create-remix` command and selected "Vercel" as a deployment target, you only need to [import your Git repository](https://vercel.com/new) into Vercel, and it will be deployed.
|
||||
|
||||
If you'd like to avoid using a Git repository, you can also deploy the directory by running [Vercel CLI](https://vercel.com/cli):
|
||||
|
||||
```sh
|
||||
npm i -g vercel
|
||||
vercel
|
||||
```
|
||||
|
||||
It is generally recommended to use a Git repository, because future commits will then automatically be deployed by Vercel, through its [Git Integration](https://vercel.com/docs/concepts/git).
|
||||
|
||||
## Development
|
||||
|
||||
To run your Remix app locally, make sure your project's local dependencies are installed:
|
||||
|
||||
```sh
|
||||
npm install
|
||||
```
|
||||
|
||||
Afterwards, start the Remix development server like so:
|
||||
|
||||
```sh
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Open up [http://localhost:3000](http://localhost:3000) and you should be ready to go!
|
||||
|
||||
If you're used to using the `vercel dev` command provided by [Vercel CLI](https://vercel.com/cli) instead, you can also use that, but it's not needed.
|
||||
5
examples/remix/api/index.js
Normal file
5
examples/remix/api/index.js
Normal file
@@ -0,0 +1,5 @@
|
||||
const { createRequestHandler } = require("@remix-run/vercel");
|
||||
|
||||
module.exports = createRequestHandler({
|
||||
build: require("./_build")
|
||||
});
|
||||
4
examples/remix/app/entry.client.tsx
Normal file
4
examples/remix/app/entry.client.tsx
Normal file
@@ -0,0 +1,4 @@
|
||||
import { hydrate } from "react-dom";
|
||||
import { RemixBrowser } from "remix";
|
||||
|
||||
hydrate(<RemixBrowser />, document);
|
||||
21
examples/remix/app/entry.server.tsx
Normal file
21
examples/remix/app/entry.server.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import { renderToString } from "react-dom/server";
|
||||
import { RemixServer } from "remix";
|
||||
import type { EntryContext } from "remix";
|
||||
|
||||
export default function handleRequest(
|
||||
request: Request,
|
||||
responseStatusCode: number,
|
||||
responseHeaders: Headers,
|
||||
remixContext: EntryContext
|
||||
) {
|
||||
let markup = renderToString(
|
||||
<RemixServer context={remixContext} url={request.url} />
|
||||
);
|
||||
|
||||
responseHeaders.set("Content-Type", "text/html");
|
||||
|
||||
return new Response("<!DOCTYPE html>" + markup, {
|
||||
status: responseStatusCode,
|
||||
headers: responseHeaders
|
||||
});
|
||||
}
|
||||
178
examples/remix/app/root.tsx
Normal file
178
examples/remix/app/root.tsx
Normal file
@@ -0,0 +1,178 @@
|
||||
import {
|
||||
Link,
|
||||
Links,
|
||||
LiveReload,
|
||||
Meta,
|
||||
Outlet,
|
||||
Scripts,
|
||||
ScrollRestoration,
|
||||
useCatch
|
||||
} from "remix";
|
||||
import type { LinksFunction } from "remix";
|
||||
|
||||
import globalStylesUrl from "~/styles/global.css";
|
||||
import darkStylesUrl from "~/styles/dark.css";
|
||||
|
||||
// https://remix.run/api/app#links
|
||||
export let links: LinksFunction = () => {
|
||||
return [
|
||||
{ rel: "stylesheet", href: globalStylesUrl },
|
||||
{
|
||||
rel: "stylesheet",
|
||||
href: darkStylesUrl,
|
||||
media: "(prefers-color-scheme: dark)"
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
// https://remix.run/api/conventions#default-export
|
||||
// https://remix.run/api/conventions#route-filenames
|
||||
export default function App() {
|
||||
return (
|
||||
<Document>
|
||||
<Layout>
|
||||
<Outlet />
|
||||
</Layout>
|
||||
</Document>
|
||||
);
|
||||
}
|
||||
|
||||
// https://remix.run/docs/en/v1/api/conventions#errorboundary
|
||||
export function ErrorBoundary({ error }: { error: Error }) {
|
||||
console.error(error);
|
||||
return (
|
||||
<Document title="Error!">
|
||||
<Layout>
|
||||
<div>
|
||||
<h1>There was an error</h1>
|
||||
<p>{error.message}</p>
|
||||
<hr />
|
||||
<p>
|
||||
Hey, developer, you should replace this with what you want your
|
||||
users to see.
|
||||
</p>
|
||||
</div>
|
||||
</Layout>
|
||||
</Document>
|
||||
);
|
||||
}
|
||||
|
||||
// https://remix.run/docs/en/v1/api/conventions#catchboundary
|
||||
export function CatchBoundary() {
|
||||
let caught = useCatch();
|
||||
|
||||
let message;
|
||||
switch (caught.status) {
|
||||
case 401:
|
||||
message = (
|
||||
<p>
|
||||
Oops! Looks like you tried to visit a page that you do not have access
|
||||
to.
|
||||
</p>
|
||||
);
|
||||
break;
|
||||
case 404:
|
||||
message = (
|
||||
<p>Oops! Looks like you tried to visit a page that does not exist.</p>
|
||||
);
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(caught.data || caught.statusText);
|
||||
}
|
||||
|
||||
return (
|
||||
<Document title={`${caught.status} ${caught.statusText}`}>
|
||||
<Layout>
|
||||
<h1>
|
||||
{caught.status}: {caught.statusText}
|
||||
</h1>
|
||||
{message}
|
||||
</Layout>
|
||||
</Document>
|
||||
);
|
||||
}
|
||||
|
||||
function Document({
|
||||
children,
|
||||
title
|
||||
}: {
|
||||
children: React.ReactNode;
|
||||
title?: string;
|
||||
}) {
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charSet="utf-8" />
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1" />
|
||||
{title ? <title>{title}</title> : null}
|
||||
<Meta />
|
||||
<Links />
|
||||
</head>
|
||||
<body>
|
||||
{children}
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
{process.env.NODE_ENV === "development" && <LiveReload />}
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
}
|
||||
|
||||
function Layout({ children }: { children: React.ReactNode }) {
|
||||
return (
|
||||
<div className="remix-app">
|
||||
<header className="remix-app__header">
|
||||
<div className="container remix-app__header-content">
|
||||
<Link to="/" title="Remix" className="remix-app__header-home-link">
|
||||
<RemixLogo />
|
||||
</Link>
|
||||
<nav aria-label="Main navigation" className="remix-app__header-nav">
|
||||
<ul>
|
||||
<li>
|
||||
<Link to="/">Home</Link>
|
||||
</li>
|
||||
<li>
|
||||
<a href="https://remix.run/docs">Remix Docs</a>
|
||||
</li>
|
||||
<li>
|
||||
<a href="https://github.com/remix-run/remix">GitHub</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</header>
|
||||
<div className="remix-app__main">
|
||||
<div className="container remix-app__main-content">{children}</div>
|
||||
</div>
|
||||
<footer className="remix-app__footer">
|
||||
<div className="container remix-app__footer-content">
|
||||
<p>© You!</p>
|
||||
</div>
|
||||
</footer>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
function RemixLogo() {
|
||||
return (
|
||||
<svg
|
||||
viewBox="0 0 659 165"
|
||||
version="1.1"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
xmlnsXlink="http://www.w3.org/1999/xlink"
|
||||
aria-labelledby="remix-run-logo-title"
|
||||
role="img"
|
||||
width="106"
|
||||
height="30"
|
||||
fill="currentColor"
|
||||
>
|
||||
<title id="remix-run-logo-title">Remix Logo</title>
|
||||
<path d="M0 161V136H45.5416C53.1486 136 54.8003 141.638 54.8003 145V161H0Z M133.85 124.16C135.3 142.762 135.3 151.482 135.3 161H92.2283C92.2283 158.927 92.2653 157.03 92.3028 155.107C92.4195 149.128 92.5411 142.894 91.5717 130.304C90.2905 111.872 82.3473 107.776 67.7419 107.776H54.8021H0V74.24H69.7918C88.2407 74.24 97.4651 68.632 97.4651 53.784C97.4651 40.728 88.2407 32.816 69.7918 32.816H0V0H77.4788C119.245 0 140 19.712 140 51.2C140 74.752 125.395 90.112 105.665 92.672C122.32 96 132.057 105.472 133.85 124.16Z" />
|
||||
<path d="M229.43 120.576C225.59 129.536 218.422 133.376 207.158 133.376C194.614 133.376 184.374 126.72 183.35 112.64H263.478V101.12C263.478 70.1437 243.254 44.0317 205.11 44.0317C169.526 44.0317 142.902 69.8877 142.902 105.984C142.902 142.336 169.014 164.352 205.622 164.352C235.83 164.352 256.822 149.76 262.71 123.648L229.43 120.576ZM183.862 92.6717C185.398 81.9197 191.286 73.7277 204.598 73.7277C216.886 73.7277 223.542 82.4317 224.054 92.6717H183.862Z" />
|
||||
<path d="M385.256 66.5597C380.392 53.2477 369.896 44.0317 349.672 44.0317C332.52 44.0317 320.232 51.7117 314.088 64.2557V47.1037H272.616V161.28H314.088V105.216C314.088 88.0638 318.952 76.7997 332.52 76.7997C345.064 76.7997 348.136 84.9917 348.136 100.608V161.28H389.608V105.216C389.608 88.0638 394.216 76.7997 408.04 76.7997C420.584 76.7997 423.4 84.9917 423.4 100.608V161.28H464.872V89.5997C464.872 65.7917 455.656 44.0317 424.168 44.0317C404.968 44.0317 391.4 53.7597 385.256 66.5597Z" />
|
||||
<path d="M478.436 47.104V161.28H519.908V47.104H478.436ZM478.18 36.352H520.164V0H478.18V36.352Z" />
|
||||
<path d="M654.54 47.1035H611.788L592.332 74.2395L573.388 47.1035H527.564L568.78 103.168L523.98 161.28H566.732L589.516 130.304L612.3 161.28H658.124L613.068 101.376L654.54 47.1035Z" />
|
||||
</svg>
|
||||
);
|
||||
}
|
||||
44
examples/remix/app/routes/demos/about.tsx
Normal file
44
examples/remix/app/routes/demos/about.tsx
Normal file
@@ -0,0 +1,44 @@
|
||||
import { Outlet } from "remix";
|
||||
import type { MetaFunction, LinksFunction } from "remix";
|
||||
|
||||
import stylesUrl from "~/styles/demos/about.css";
|
||||
|
||||
export let meta: MetaFunction = () => {
|
||||
return {
|
||||
title: "About Remix"
|
||||
};
|
||||
};
|
||||
|
||||
export let links: LinksFunction = () => {
|
||||
return [{ rel: "stylesheet", href: stylesUrl }];
|
||||
};
|
||||
|
||||
export default function Index() {
|
||||
return (
|
||||
<div className="about">
|
||||
<div className="about__intro">
|
||||
<h2>About Us</h2>
|
||||
<p>
|
||||
Ok, so this page isn't really <em>about us</em>, but we did want to
|
||||
show you a few more things Remix can do.
|
||||
</p>
|
||||
<p>
|
||||
Did you notice that things look a little different on this page? The
|
||||
CSS that we import in the route file and include in its{" "}
|
||||
<code>links</code> export is only included on this route and its
|
||||
children.
|
||||
</p>
|
||||
<p>
|
||||
Wait a sec...<em>its children</em>? To understand what we mean by
|
||||
this,{" "}
|
||||
<a href="https://remix.run/tutorial/4-nested-routes-params">
|
||||
read all about nested routes in the docs
|
||||
</a>
|
||||
.
|
||||
</p>
|
||||
<hr />
|
||||
<Outlet />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
17
examples/remix/app/routes/demos/about/index.tsx
Normal file
17
examples/remix/app/routes/demos/about/index.tsx
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Link } from "remix";
|
||||
|
||||
export default function AboutIndex() {
|
||||
return (
|
||||
<div>
|
||||
<p>
|
||||
You are looking at the index route for the <code>/about</code> URL
|
||||
segment, but there are nested routes as well!
|
||||
</p>
|
||||
<p>
|
||||
<strong>
|
||||
<Link to="whoa">Check out one of them here.</Link>
|
||||
</strong>
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
20
examples/remix/app/routes/demos/about/whoa.tsx
Normal file
20
examples/remix/app/routes/demos/about/whoa.tsx
Normal file
@@ -0,0 +1,20 @@
|
||||
import { Link } from "remix";
|
||||
|
||||
export default function AboutIndex() {
|
||||
return (
|
||||
<div>
|
||||
<p>
|
||||
Whoa, this is a nested route! We render the <code>/about</code> layout
|
||||
route component, and its <code>Outlet</code> renders our route
|
||||
component. 🤯
|
||||
</p>
|
||||
<p>
|
||||
<strong>
|
||||
<Link to="..">
|
||||
Go back to the <code>/about</code> index.
|
||||
</Link>
|
||||
</strong>
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
101
examples/remix/app/routes/demos/actions.tsx
Normal file
101
examples/remix/app/routes/demos/actions.tsx
Normal file
@@ -0,0 +1,101 @@
|
||||
import { useEffect, useRef } from "react";
|
||||
import type { ActionFunction } from "remix";
|
||||
import { Form, json, useActionData, redirect } from "remix";
|
||||
|
||||
export function meta() {
|
||||
return { title: "Actions Demo" };
|
||||
}
|
||||
|
||||
// When your form sends a POST, the action is called on the server.
|
||||
// - https://remix.run/api/conventions#action
|
||||
// - https://remix.run/guides/data-updates
|
||||
export let action: ActionFunction = async ({ request }) => {
|
||||
let formData = await request.formData();
|
||||
let answer = formData.get("answer");
|
||||
|
||||
// Typical action workflows start with validating the form data that just came
|
||||
// over the network. Clientside validation is fine, but you definitely need it
|
||||
// server side. If there's a problem, return the the data and the component
|
||||
// can render it.
|
||||
if (typeof answer !== "string") {
|
||||
return json("Come on, at least try!", { status: 400 });
|
||||
}
|
||||
|
||||
if (answer !== "egg") {
|
||||
return json(`Sorry, ${answer} is not right.`, { status: 400 });
|
||||
}
|
||||
|
||||
// Finally, if the data is valid, you'll typically write to a database or send or
|
||||
// email or log the user in, etc. It's recommended to redirect after a
|
||||
// successful action, even if it's to the same place so that non-JavaScript workflows
|
||||
// from the browser doesn't repost the data if the user clicks back.
|
||||
return redirect("/demos/correct");
|
||||
};
|
||||
|
||||
export default function ActionsDemo() {
|
||||
// https://remix.run/api/remix#useactiondata
|
||||
let actionMessage = useActionData<string>();
|
||||
let answerRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
// This form works without JavaScript, but when we have JavaScript we can make
|
||||
// the experience better by selecting the input on wrong answers! Go ahead, disable
|
||||
// JavaScript in your browser and see what happens.
|
||||
useEffect(() => {
|
||||
if (actionMessage && answerRef.current) {
|
||||
answerRef.current.select();
|
||||
}
|
||||
}, [actionMessage]);
|
||||
|
||||
return (
|
||||
<div className="remix__page">
|
||||
<main>
|
||||
<h2>Actions!</h2>
|
||||
<p>
|
||||
This form submission will send a post request that we handle in our
|
||||
`action` export. Any route can export an action to handle data
|
||||
mutations.
|
||||
</p>
|
||||
<Form method="post" className="remix__form">
|
||||
<h3>Post an Action</h3>
|
||||
<p>
|
||||
<i>What is more useful when it is broken?</i>
|
||||
</p>
|
||||
<label>
|
||||
<div>Answer:</div>
|
||||
<input ref={answerRef} name="answer" type="text" />
|
||||
</label>
|
||||
<div>
|
||||
<button>Answer!</button>
|
||||
</div>
|
||||
{actionMessage ? (
|
||||
<p>
|
||||
<b>{actionMessage}</b>
|
||||
</p>
|
||||
) : null}
|
||||
</Form>
|
||||
</main>
|
||||
|
||||
<aside>
|
||||
<h3>Additional Resources</h3>
|
||||
<ul>
|
||||
<li>
|
||||
Guide:{" "}
|
||||
<a href="https://remix.run/guides/data-writes">Data Writes</a>
|
||||
</li>
|
||||
<li>
|
||||
API:{" "}
|
||||
<a href="https://remix.run/api/conventions#action">
|
||||
Route Action Export
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
API:{" "}
|
||||
<a href="https://remix.run/api/remix#useactiondata">
|
||||
<code>useActionData</code>
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</aside>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
3
examples/remix/app/routes/demos/correct.tsx
Normal file
3
examples/remix/app/routes/demos/correct.tsx
Normal file
@@ -0,0 +1,3 @@
|
||||
export default function NiceWork() {
|
||||
return <h1>You got it right!</h1>;
|
||||
}
|
||||
43
examples/remix/app/routes/demos/params.tsx
Normal file
43
examples/remix/app/routes/demos/params.tsx
Normal file
@@ -0,0 +1,43 @@
|
||||
import { useCatch, Link, json, useLoaderData, Outlet } from "remix";
|
||||
|
||||
export function meta() {
|
||||
return { title: "Boundaries Demo" };
|
||||
}
|
||||
|
||||
export default function Boundaries() {
|
||||
return (
|
||||
<div className="remix__page">
|
||||
<main>
|
||||
<Outlet />
|
||||
</main>
|
||||
|
||||
<aside>
|
||||
<h2>Click these Links</h2>
|
||||
<ul>
|
||||
<li>
|
||||
<Link to=".">Start over</Link>
|
||||
</li>
|
||||
<li>
|
||||
<Link to="one">
|
||||
Param: <i>one</i>
|
||||
</Link>
|
||||
</li>
|
||||
<li>
|
||||
<Link to="two">
|
||||
Param: <i>two</i>
|
||||
</Link>
|
||||
</li>
|
||||
<li>
|
||||
<Link to="this-record-does-not-exist">This will be a 404</Link>
|
||||
</li>
|
||||
<li>
|
||||
<Link to="shh-its-a-secret">And this will be 401 Unauthorized</Link>
|
||||
</li>
|
||||
<li>
|
||||
<Link to="kaboom">This one will throw an error</Link>
|
||||
</li>
|
||||
</ul>
|
||||
</aside>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
110
examples/remix/app/routes/demos/params/$id.tsx
Normal file
110
examples/remix/app/routes/demos/params/$id.tsx
Normal file
@@ -0,0 +1,110 @@
|
||||
import { useCatch, Link, json, useLoaderData } from "remix";
|
||||
import type { LoaderFunction, MetaFunction } from "remix";
|
||||
|
||||
// The `$` in route filenames becomes a pattern that's parsed from the URL and
|
||||
// passed to your loaders so you can look up data.
|
||||
// - https://remix.run/api/conventions#loader-params
|
||||
export let loader: LoaderFunction = async ({ params }) => {
|
||||
// pretend like we're using params.id to look something up in the db
|
||||
|
||||
if (params.id === "this-record-does-not-exist") {
|
||||
// If the record doesn't exist we can't render the route normally, so
|
||||
// instead we throw a 404 reponse to stop running code here and show the
|
||||
// user the catch boundary.
|
||||
throw new Response("Not Found", { status: 404 });
|
||||
}
|
||||
|
||||
// now pretend like the record exists but the user just isn't authorized to
|
||||
// see it.
|
||||
if (params.id === "shh-its-a-secret") {
|
||||
// Again, we can't render the component if the user isn't authorized. You
|
||||
// can even put data in the response that might help the user rectify the
|
||||
// issue! Like emailing the webmaster for access to the page. (Oh, right,
|
||||
// `json` is just a Response helper that makes it easier to send JSON
|
||||
// responses).
|
||||
throw json({ webmasterEmail: "hello@remix.run" }, { status: 401 });
|
||||
}
|
||||
|
||||
// Sometimes your code just blows up and you never anticipated it. Remix will
|
||||
// automatically catch it and send the UI to the error boundary.
|
||||
if (params.id === "kaboom") {
|
||||
lol();
|
||||
}
|
||||
|
||||
// but otherwise the record was found, user has access, so we can do whatever
|
||||
// else we needed to in the loader and return the data. (This is boring, we're
|
||||
// just gonna return the params.id).
|
||||
return { param: params.id };
|
||||
};
|
||||
|
||||
export default function ParamDemo() {
|
||||
let data = useLoaderData();
|
||||
return (
|
||||
<h1>
|
||||
The param is <i style={{ color: "red" }}>{data.param}</i>
|
||||
</h1>
|
||||
);
|
||||
}
|
||||
|
||||
// https://remix.run/api/conventions#catchboundary
|
||||
// https://remix.run/api/remix#usecatch
|
||||
// https://remix.run/api/guides/not-found
|
||||
export function CatchBoundary() {
|
||||
let caught = useCatch();
|
||||
|
||||
let message: React.ReactNode;
|
||||
switch (caught.status) {
|
||||
case 401:
|
||||
message = (
|
||||
<p>
|
||||
Looks like you tried to visit a page that you do not have access to.
|
||||
Maybe ask the webmaster ({caught.data.webmasterEmail}) for access.
|
||||
</p>
|
||||
);
|
||||
case 404:
|
||||
message = (
|
||||
<p>Looks like you tried to visit a page that does not exist.</p>
|
||||
);
|
||||
default:
|
||||
message = (
|
||||
<p>
|
||||
There was a problem with your request!
|
||||
<br />
|
||||
{caught.status} {caught.statusText}
|
||||
</p>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
<h2>Oops!</h2>
|
||||
<p>{message}</p>
|
||||
<p>
|
||||
(Isn't it cool that the user gets to stay in context and try a different
|
||||
link in the parts of the UI that didn't blow up?)
|
||||
</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
// https://remix.run/api/conventions#errorboundary
|
||||
// https://remix.run/api/guides/not-found
|
||||
export function ErrorBoundary({ error }: { error: Error }) {
|
||||
console.error(error);
|
||||
return (
|
||||
<>
|
||||
<h2>Error!</h2>
|
||||
<p>{error.message}</p>
|
||||
<p>
|
||||
(Isn't it cool that the user gets to stay in context and try a different
|
||||
link in the parts of the UI that didn't blow up?)
|
||||
</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
|
||||
export let meta: MetaFunction = ({ data }) => {
|
||||
return {
|
||||
title: data ? `Param: ${data.param}` : "Oops...",
|
||||
};
|
||||
};
|
||||
40
examples/remix/app/routes/demos/params/index.tsx
Normal file
40
examples/remix/app/routes/demos/params/index.tsx
Normal file
@@ -0,0 +1,40 @@
|
||||
import { useCatch, Link, json, useLoaderData, Outlet } from "remix";
|
||||
import type { LoaderFunction } from "remix";
|
||||
|
||||
export default function Boundaries() {
|
||||
return (
|
||||
<>
|
||||
<h2>Params</h2>
|
||||
<p>
|
||||
When you name a route segment with $ like{" "}
|
||||
<code>routes/users/$userId.js</code>, the $ segment will be parsed from
|
||||
the URL and sent to your loaders and actions by the same name.
|
||||
</p>
|
||||
<h2>Errors</h2>
|
||||
<p>
|
||||
When a route throws and error in it's action, loader, or component,
|
||||
Remix automatically catches it, won't even try to render the component,
|
||||
but it will render the route's ErrorBoundary instead. If the route
|
||||
doesn't have one, it will bubble up to the routes above it until it hits
|
||||
the root.
|
||||
</p>
|
||||
<p>So be as granular as you want with your error handling.</p>
|
||||
<h2>Not Found</h2>
|
||||
<p>
|
||||
(and other{" "}
|
||||
<a href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Status#client_error_responses">
|
||||
client errors
|
||||
</a>
|
||||
)
|
||||
</p>
|
||||
<p>
|
||||
Loaders and Actions can throw a <code>Response</code> instead of an
|
||||
error and Remix will render the CatchBoundary instead of the component.
|
||||
This is great when loading data from a database isn't found. As soon as
|
||||
you know you can't render the component normally, throw a 404 response
|
||||
and send your app into the catch boundary. Just like error boundaries,
|
||||
catch boundaries bubble, too.
|
||||
</p>
|
||||
</>
|
||||
);
|
||||
}
|
||||
100
examples/remix/app/routes/index.tsx
Normal file
100
examples/remix/app/routes/index.tsx
Normal file
@@ -0,0 +1,100 @@
|
||||
import type { MetaFunction, LoaderFunction } from "remix";
|
||||
import { useLoaderData, json, Link } from "remix";
|
||||
|
||||
type IndexData = {
|
||||
resources: Array<{ name: string; url: string }>;
|
||||
demos: Array<{ name: string; to: string }>;
|
||||
};
|
||||
|
||||
// Loaders provide data to components and are only ever called on the server, so
|
||||
// you can connect to a database or run any server side code you want right next
|
||||
// to the component that renders it.
|
||||
// https://remix.run/api/conventions#loader
|
||||
export let loader: LoaderFunction = () => {
|
||||
let data: IndexData = {
|
||||
resources: [
|
||||
{
|
||||
name: "Remix Docs",
|
||||
url: "https://remix.run/docs"
|
||||
},
|
||||
{
|
||||
name: "React Router Docs",
|
||||
url: "https://reactrouter.com/docs"
|
||||
},
|
||||
{
|
||||
name: "Remix Discord",
|
||||
url: "https://discord.gg/VBePs6d"
|
||||
}
|
||||
],
|
||||
demos: [
|
||||
{
|
||||
to: "demos/actions",
|
||||
name: "Actions"
|
||||
},
|
||||
{
|
||||
to: "demos/about",
|
||||
name: "Nested Routes, CSS loading/unloading"
|
||||
},
|
||||
{
|
||||
to: "demos/params",
|
||||
name: "URL Params and Error Boundaries"
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
// https://remix.run/api/remix#json
|
||||
return json(data);
|
||||
};
|
||||
|
||||
// https://remix.run/api/conventions#meta
|
||||
export let meta: MetaFunction = () => {
|
||||
return {
|
||||
title: "Remix Starter",
|
||||
description: "Welcome to remix!"
|
||||
};
|
||||
};
|
||||
|
||||
// https://remix.run/guides/routing#index-routes
|
||||
export default function Index() {
|
||||
let data = useLoaderData<IndexData>();
|
||||
|
||||
return (
|
||||
<div className="remix__page">
|
||||
<main>
|
||||
<h2>Welcome to Remix!</h2>
|
||||
<p>We're stoked that you're here. 🥳</p>
|
||||
<p>
|
||||
Feel free to take a look around the code to see how Remix does things,
|
||||
it might be a bit different than what you’re used to. When you're
|
||||
ready to dive deeper, we've got plenty of resources to get you
|
||||
up-and-running quickly.
|
||||
</p>
|
||||
<p>
|
||||
Check out all the demos in this starter, and then just delete the{" "}
|
||||
<code>app/routes/demos</code> and <code>app/styles/demos</code>{" "}
|
||||
folders when you're ready to turn this into your next project.
|
||||
</p>
|
||||
</main>
|
||||
<aside>
|
||||
<h2>Demos In This App</h2>
|
||||
<ul>
|
||||
{data.demos.map(demo => (
|
||||
<li key={demo.to} className="remix__page__resource">
|
||||
<Link to={demo.to} prefetch="intent">
|
||||
{demo.name}
|
||||
</Link>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
<h2>Resources</h2>
|
||||
<ul>
|
||||
{data.resources.map(resource => (
|
||||
<li key={resource.url} className="remix__page__resource">
|
||||
<a href={resource.url}>{resource.name}</a>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
</aside>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
7
examples/remix/app/styles/dark.css
Normal file
7
examples/remix/app/styles/dark.css
Normal file
@@ -0,0 +1,7 @@
|
||||
:root {
|
||||
--color-foreground: hsl(0, 0%, 100%);
|
||||
--color-background: hsl(0, 0%, 7%);
|
||||
--color-links: hsl(213, 100%, 73%);
|
||||
--color-links-hover: hsl(213, 100%, 80%);
|
||||
--color-border: hsl(0, 0%, 25%);
|
||||
}
|
||||
26
examples/remix/app/styles/demos/about.css
Normal file
26
examples/remix/app/styles/demos/about.css
Normal file
@@ -0,0 +1,26 @@
|
||||
/*
|
||||
* Whoa whoa whoa, wait a sec...why are we overriding global CSS selectors?
|
||||
* Isn't that kind of scary? How do we know this won't have side effects?
|
||||
*
|
||||
* In Remix, CSS that is included in a route file will *only* show up on that
|
||||
* route (and for nested routes, its children). When the user navigates away
|
||||
* from that route the CSS files linked from those routes will be automatically
|
||||
* unloaded, making your styles much easier to predict and control.
|
||||
*
|
||||
* Read more about styling routes in the docs:
|
||||
* https://remix.run/guides/styling
|
||||
*/
|
||||
|
||||
:root {
|
||||
--color-foreground: hsl(0, 0%, 7%);
|
||||
--color-background: hsl(56, 100%, 50%);
|
||||
--color-links: hsl(345, 56%, 39%);
|
||||
--color-links-hover: hsl(345, 51%, 49%);
|
||||
--color-border: rgb(184, 173, 20);
|
||||
--font-body: ui-monospace, SFMono-Regular, Menlo, Monaco, Consolas,
|
||||
Liberation Mono, Courier New, monospace;
|
||||
}
|
||||
|
||||
.about__intro {
|
||||
max-width: 500px;
|
||||
}
|
||||
216
examples/remix/app/styles/global.css
Normal file
216
examples/remix/app/styles/global.css
Normal file
@@ -0,0 +1,216 @@
|
||||
/*
|
||||
* You can just delete everything here or keep whatever you like, it's just a
|
||||
* quick baseline!
|
||||
*/
|
||||
:root {
|
||||
--color-foreground: hsl(0, 0%, 7%);
|
||||
--color-background: hsl(0, 0%, 100%);
|
||||
--color-links: hsl(213, 100%, 52%);
|
||||
--color-links-hover: hsl(213, 100%, 43%);
|
||||
--color-border: hsl(0, 0%, 82%);
|
||||
--font-body: -apple-system, "Segoe UI", Helvetica Neue, Helvetica, Roboto,
|
||||
Arial, sans-serif, system-ui, "Apple Color Emoji", "Segoe UI Emoji";
|
||||
}
|
||||
|
||||
html {
|
||||
box-sizing: border-box;
|
||||
}
|
||||
|
||||
*,
|
||||
*::before,
|
||||
*::after {
|
||||
box-sizing: inherit;
|
||||
}
|
||||
|
||||
:-moz-focusring {
|
||||
outline: auto;
|
||||
}
|
||||
|
||||
:focus {
|
||||
outline: var(--color-links) solid 2px;
|
||||
outline-offset: 2px;
|
||||
}
|
||||
|
||||
html,
|
||||
body {
|
||||
padding: 0;
|
||||
margin: 0;
|
||||
background-color: var(--color-background);
|
||||
color: var(--color-foreground);
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: var(--font-body);
|
||||
line-height: 1.5;
|
||||
}
|
||||
|
||||
a {
|
||||
color: var(--color-links);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: var(--color-links-hover);
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
hr {
|
||||
display: block;
|
||||
height: 1px;
|
||||
border: 0;
|
||||
background-color: var(--color-border);
|
||||
margin-top: 2rem;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
input:where([type="text"]),
|
||||
input:where([type="search"]) {
|
||||
display: block;
|
||||
border: 1px solid var(--color-border);
|
||||
width: 100%;
|
||||
font: inherit;
|
||||
line-height: 1;
|
||||
height: calc(1ch + 1.5em);
|
||||
padding-right: 0.5em;
|
||||
padding-left: 0.5em;
|
||||
background-color: hsl(0 0% 100% / 20%);
|
||||
color: var(--color-foreground);
|
||||
}
|
||||
|
||||
.sr-only {
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
height: 1px;
|
||||
padding: 0;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
clip: rect(0, 0, 0, 0);
|
||||
white-space: nowrap;
|
||||
border-width: 0;
|
||||
}
|
||||
|
||||
.container {
|
||||
--gutter: 16px;
|
||||
width: 1024px;
|
||||
max-width: calc(100% - var(--gutter) * 2);
|
||||
margin-right: auto;
|
||||
margin-left: auto;
|
||||
}
|
||||
|
||||
.remix-app {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
min-height: 100vh;
|
||||
min-height: calc(100vh - env(safe-area-inset-bottom));
|
||||
}
|
||||
|
||||
.remix-app > * {
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.remix-app__header {
|
||||
padding-top: 1rem;
|
||||
padding-bottom: 1rem;
|
||||
border-bottom: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.remix-app__header-content {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.remix-app__header-home-link {
|
||||
width: 106px;
|
||||
height: 30px;
|
||||
color: var(--color-foreground);
|
||||
}
|
||||
|
||||
.remix-app__header-nav ul {
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: 1.5em;
|
||||
}
|
||||
|
||||
.remix-app__header-nav li {
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
.remix-app__main {
|
||||
flex: 1 1 100%;
|
||||
}
|
||||
|
||||
.remix-app__footer {
|
||||
padding-top: 1rem;
|
||||
padding-bottom: 1rem;
|
||||
border-top: 1px solid var(--color-border);
|
||||
}
|
||||
|
||||
.remix-app__footer-content {
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.remix__page {
|
||||
--gap: 1rem;
|
||||
--space: 2rem;
|
||||
display: grid;
|
||||
grid-auto-rows: min-content;
|
||||
gap: var(--gap);
|
||||
padding-top: var(--space);
|
||||
padding-bottom: var(--space);
|
||||
}
|
||||
|
||||
@media print, screen and (min-width: 640px) {
|
||||
.remix__page {
|
||||
--gap: 2rem;
|
||||
grid-auto-rows: unset;
|
||||
grid-template-columns: repeat(2, 1fr);
|
||||
}
|
||||
}
|
||||
|
||||
@media screen and (min-width: 1024px) {
|
||||
.remix__page {
|
||||
--gap: 4rem;
|
||||
}
|
||||
}
|
||||
|
||||
.remix__page > main > :first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.remix__page > main > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.remix__page > aside {
|
||||
margin: 0;
|
||||
padding: 1.5ch 2ch;
|
||||
border: solid 1px var(--color-border);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
|
||||
.remix__page > aside > :first-child {
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.remix__page > aside > :last-child {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
.remix__form {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 1rem;
|
||||
padding: 1rem;
|
||||
border: 1px solid var(--color-border);
|
||||
border-radius: 0.5rem;
|
||||
}
|
||||
|
||||
.remix__form > * {
|
||||
margin-top: 0;
|
||||
margin-bottom: 0;
|
||||
}
|
||||
8345
examples/remix/package-lock.json
generated
Normal file
8345
examples/remix/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
29
examples/remix/package.json
Normal file
29
examples/remix/package.json
Normal file
@@ -0,0 +1,29 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "remix-app-template",
|
||||
"description": "",
|
||||
"license": "",
|
||||
"scripts": {
|
||||
"build": "remix build",
|
||||
"dev": "remix dev",
|
||||
"postinstall": "remix setup node"
|
||||
},
|
||||
"dependencies": {
|
||||
"@remix-run/react": "^1.0.6",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"remix": "^1.0.6",
|
||||
"@remix-run/serve": "^1.0.6",
|
||||
"@remix-run/vercel": "^1.0.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "^1.0.6",
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9",
|
||||
"typescript": "^4.1.2"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
},
|
||||
"sideEffects": false
|
||||
}
|
||||
BIN
examples/remix/public/favicon.ico
Normal file
BIN
examples/remix/public/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
9
examples/remix/remix.config.js
Normal file
9
examples/remix/remix.config.js
Normal file
@@ -0,0 +1,9 @@
|
||||
/**
|
||||
* @type {import('@remix-run/dev/config').AppConfig}
|
||||
*/
|
||||
module.exports = {
|
||||
appDirectory: "app",
|
||||
browserBuildDirectory: "public/build",
|
||||
publicPath: "/build/",
|
||||
serverBuildDirectory: "api/_build"
|
||||
};
|
||||
2
examples/remix/remix.env.d.ts
vendored
Normal file
2
examples/remix/remix.env.d.ts
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/// <reference types="@remix-run/dev" />
|
||||
/// <reference types="@remix-run/node/globals" />
|
||||
18
examples/remix/tsconfig.json
Normal file
18
examples/remix/tsconfig.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"include": ["remix.env.d.ts", "**/*.ts", "**/*.tsx"],
|
||||
"compilerOptions": {
|
||||
"lib": ["DOM", "DOM.Iterable", "ES2019"],
|
||||
"esModuleInterop": true,
|
||||
"jsx": "react-jsx",
|
||||
"moduleResolution": "node",
|
||||
"resolveJsonModule": true,
|
||||
"target": "ES2019",
|
||||
"strict": true,
|
||||
"paths": {
|
||||
"~/*": ["./app/*"]
|
||||
},
|
||||
|
||||
// Remix takes care of building everything in `remix build`.
|
||||
"noEmit": true
|
||||
}
|
||||
}
|
||||
7
examples/remix/vercel.json
Normal file
7
examples/remix/vercel.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"build": {
|
||||
"env": {
|
||||
"ENABLE_FILE_SYSTEM_API": "1"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -24,7 +24,7 @@
|
||||
"eslint-config-prettier": "8.3.0",
|
||||
"eslint-plugin-jest": "24.3.6",
|
||||
"husky": "6.0.0",
|
||||
"jest": "27.0.6",
|
||||
"jest": "27.3.1",
|
||||
"json5": "2.1.1",
|
||||
"lint-staged": "9.2.5",
|
||||
"node-fetch": "2.6.1",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "2.12.3-canary.13",
|
||||
"version": "2.12.3-canary.31",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -21,7 +21,7 @@
|
||||
"@types/async-retry": "^1.2.1",
|
||||
"@types/cross-spawn": "6.0.0",
|
||||
"@types/end-of-stream": "^1.4.0",
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/jest": "27.0.1",
|
||||
"@types/js-yaml": "3.12.1",
|
||||
@@ -30,7 +30,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "^2.4.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.9",
|
||||
"@vercel/frameworks": "0.5.1-canary.16",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
@@ -38,7 +38,7 @@
|
||||
"boxen": "4.2.0",
|
||||
"cross-spawn": "6.0.5",
|
||||
"end-of-stream": "1.4.1",
|
||||
"fs-extra": "7.0.0",
|
||||
"fs-extra": "10.0.0",
|
||||
"glob": "7.1.3",
|
||||
"into-stream": "5.0.0",
|
||||
"js-yaml": "3.13.1",
|
||||
|
||||
366
packages/build-utils/src/convert-runtime-to-plugin.ts
Normal file
366
packages/build-utils/src/convert-runtime-to-plugin.ts
Normal file
@@ -0,0 +1,366 @@
|
||||
import fs from 'fs-extra';
|
||||
import { join, dirname, parse, relative } from 'path';
|
||||
import glob from './fs/glob';
|
||||
import { normalizePath } from './fs/normalize-path';
|
||||
import { FILES_SYMBOL, Lambda } from './lambda';
|
||||
import type FileBlob from './file-blob';
|
||||
import type { BuildOptions, Files } from './types';
|
||||
import { getIgnoreFilter } from '.';
|
||||
|
||||
// `.output` was already created by the Build Command, so we have
|
||||
// to ensure its contents don't get bundled into the Lambda. Similarily,
|
||||
// we don't want to bundle anything from `.vercel` either. Lastly,
|
||||
// Builders/Runtimes didn't have `vercel.json` or `now.json`.
|
||||
const ignoredPaths = ['.output', '.vercel', 'vercel.json', 'now.json'];
|
||||
|
||||
const shouldIgnorePath = (
|
||||
file: string,
|
||||
ignoreFilter: any,
|
||||
ignoreFile: boolean
|
||||
) => {
|
||||
const isNative = ignoredPaths.some(item => {
|
||||
return file.startsWith(item);
|
||||
});
|
||||
|
||||
if (!ignoreFile) {
|
||||
return isNative;
|
||||
}
|
||||
|
||||
return isNative || ignoreFilter(file);
|
||||
};
|
||||
|
||||
const getSourceFiles = async (workPath: string, ignoreFilter: any) => {
|
||||
const list = await glob('**', {
|
||||
cwd: workPath,
|
||||
});
|
||||
|
||||
// We're not passing this as an `ignore` filter to the `glob` function above,
|
||||
// so that we can re-use exactly the same `getIgnoreFilter` method that the
|
||||
// Build Step uses (literally the same code). Note that this exclusion only applies
|
||||
// when deploying. Locally, another exclusion is needed, which is handled
|
||||
// further below in the `convertRuntimeToPlugin` function.
|
||||
for (const file in list) {
|
||||
if (shouldIgnorePath(file, ignoreFilter, true)) {
|
||||
delete list[file];
|
||||
}
|
||||
}
|
||||
|
||||
return list;
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert legacy Runtime to a Plugin.
|
||||
* @param buildRuntime - a legacy build() function from a Runtime
|
||||
* @param packageName - the name of the package, for example `vercel-plugin-python`
|
||||
* @param ext - the file extension, for example `.py`
|
||||
*/
|
||||
export function convertRuntimeToPlugin(
|
||||
buildRuntime: (options: BuildOptions) => Promise<{ output: Lambda }>,
|
||||
packageName: string,
|
||||
ext: string
|
||||
) {
|
||||
// This `build()` signature should match `plugin.build()` signature in `vercel build`.
|
||||
return async function build({ workPath }: { workPath: string }) {
|
||||
// We also don't want to provide any files to Runtimes that were ignored
|
||||
// through `.vercelignore` or `.nowignore`, because the Build Step does the same.
|
||||
const ignoreFilter = await getIgnoreFilter(workPath);
|
||||
|
||||
// Retrieve the files that are currently available on the File System,
|
||||
// before the Legacy Runtime has even started to build.
|
||||
const sourceFilesPreBuild = await getSourceFiles(workPath, ignoreFilter);
|
||||
|
||||
// Instead of doing another `glob` to get all the matching source files,
|
||||
// we'll filter the list of existing files down to only the ones
|
||||
// that are matching the entrypoint pattern, so we're first creating
|
||||
// a clean new list to begin.
|
||||
const entrypoints = Object.assign({}, sourceFilesPreBuild);
|
||||
|
||||
const entrypointMatch = new RegExp(`^api/.*${ext}$`);
|
||||
|
||||
// Up next, we'll strip out the files from the list of entrypoints
|
||||
// that aren't actually considered entrypoints.
|
||||
for (const file in entrypoints) {
|
||||
if (!entrypointMatch.test(file)) {
|
||||
delete entrypoints[file];
|
||||
}
|
||||
}
|
||||
|
||||
const pages: { [key: string]: any } = {};
|
||||
const pluginName = packageName.replace('vercel-plugin-', '');
|
||||
|
||||
const traceDir = join(
|
||||
workPath,
|
||||
`.output`,
|
||||
`inputs`,
|
||||
// Legacy Runtimes can only provide API Routes, so that's
|
||||
// why we can use this prefix for all of them. Here, we have to
|
||||
// make sure to not use a cryptic hash name, because people
|
||||
// need to be able to easily inspect the output.
|
||||
`api-routes-${pluginName}`
|
||||
);
|
||||
|
||||
await fs.ensureDir(traceDir);
|
||||
|
||||
for (const entrypoint of Object.keys(entrypoints)) {
|
||||
const { output } = await buildRuntime({
|
||||
files: sourceFilesPreBuild,
|
||||
entrypoint,
|
||||
workPath,
|
||||
config: {
|
||||
zeroConfig: true,
|
||||
},
|
||||
meta: {
|
||||
avoidTopLevelInstall: true,
|
||||
},
|
||||
});
|
||||
|
||||
// Legacy Runtimes tend to pollute the `workPath` with compiled results,
|
||||
// because the `workPath` used to be a place that was a place where they could
|
||||
// just put anything, but nowadays it's the working directory of the `vercel build`
|
||||
// command, which is the place where the developer keeps their source files,
|
||||
// so we don't want to pollute this space unnecessarily. That means we have to clean
|
||||
// up files that were created by the build, which is done further below.
|
||||
const sourceFilesAfterBuild = await getSourceFiles(
|
||||
workPath,
|
||||
ignoreFilter
|
||||
);
|
||||
|
||||
// Further down, we will need the filename of the Lambda handler
|
||||
// for placing it inside `server/pages/api`, but because Legacy Runtimes
|
||||
// don't expose the filename directly, we have to construct it
|
||||
// from the handler name, and then find the matching file further below,
|
||||
// because we don't yet know its extension here.
|
||||
const handler = output.handler;
|
||||
const handlerMethod = handler.split('.').reverse()[0];
|
||||
const handlerFileName = handler.replace(`.${handlerMethod}`, '');
|
||||
|
||||
pages[entrypoint] = {
|
||||
handler: handler,
|
||||
runtime: output.runtime,
|
||||
memory: output.memory,
|
||||
maxDuration: output.maxDuration,
|
||||
environment: output.environment,
|
||||
allowQuery: output.allowQuery,
|
||||
};
|
||||
|
||||
// @ts-ignore This symbol is a private API
|
||||
const lambdaFiles: Files = output[FILES_SYMBOL];
|
||||
|
||||
// When deploying, the `files` that are passed to the Legacy Runtimes already
|
||||
// have certain files that are ignored stripped, but locally, that list of
|
||||
// files isn't used by the Legacy Runtimes, so we need to apply the filters
|
||||
// to the outputs that they are returning instead.
|
||||
for (const file in lambdaFiles) {
|
||||
if (shouldIgnorePath(file, ignoreFilter, false)) {
|
||||
delete lambdaFiles[file];
|
||||
}
|
||||
}
|
||||
|
||||
const handlerFilePath = Object.keys(lambdaFiles).find(item => {
|
||||
return parse(item).name === handlerFileName;
|
||||
});
|
||||
|
||||
const handlerFileOrigin = lambdaFiles[handlerFilePath || ''].fsPath;
|
||||
|
||||
if (!handlerFileOrigin) {
|
||||
throw new Error(
|
||||
`Could not find a handler file. Please ensure that the list of \`files\` defined for the returned \`Lambda\` contains a file with the name ${handlerFileName} (+ any extension).`
|
||||
);
|
||||
}
|
||||
|
||||
const entry = join(workPath, '.output', 'server', 'pages', entrypoint);
|
||||
|
||||
await fs.ensureDir(dirname(entry));
|
||||
await linkOrCopy(handlerFileOrigin, entry);
|
||||
|
||||
const toRemove = [];
|
||||
|
||||
// You can find more details about this at the point where the
|
||||
// `sourceFilesAfterBuild` is created originally.
|
||||
for (const file in sourceFilesAfterBuild) {
|
||||
if (!sourceFilesPreBuild[file]) {
|
||||
const path = sourceFilesAfterBuild[file].fsPath;
|
||||
toRemove.push(fs.remove(path));
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(toRemove);
|
||||
|
||||
const tracedFiles: {
|
||||
absolutePath: string;
|
||||
relativePath: string;
|
||||
}[] = [];
|
||||
|
||||
Object.entries(lambdaFiles).forEach(async ([relPath, file]) => {
|
||||
const newPath = join(traceDir, relPath);
|
||||
|
||||
// The handler was already moved into position above.
|
||||
if (relPath === handlerFilePath) {
|
||||
return;
|
||||
}
|
||||
|
||||
tracedFiles.push({ absolutePath: newPath, relativePath: relPath });
|
||||
|
||||
if (file.fsPath) {
|
||||
await linkOrCopy(file.fsPath, newPath);
|
||||
} else if (file.type === 'FileBlob') {
|
||||
const { data, mode } = file as FileBlob;
|
||||
await fs.writeFile(newPath, data, { mode });
|
||||
} else {
|
||||
throw new Error(`Unknown file type: ${file.type}`);
|
||||
}
|
||||
});
|
||||
|
||||
const nft = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'server',
|
||||
'pages',
|
||||
`${entrypoint}.nft.json`
|
||||
);
|
||||
|
||||
const json = JSON.stringify({
|
||||
version: 1,
|
||||
files: tracedFiles.map(file => ({
|
||||
input: normalizePath(relative(nft, file.absolutePath)),
|
||||
output: normalizePath(file.relativePath),
|
||||
})),
|
||||
});
|
||||
|
||||
await fs.ensureDir(dirname(nft));
|
||||
await fs.writeFile(nft, json);
|
||||
}
|
||||
|
||||
await updateFunctionsManifest({ workPath, pages });
|
||||
};
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
} catch (err: any) {
|
||||
if (err.code !== 'EEXIST') {
|
||||
await fs.copyFile(existingPath, newPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function readJson(filePath: string): Promise<{ [key: string]: any }> {
|
||||
try {
|
||||
const str = await fs.readFile(filePath, 'utf8');
|
||||
return JSON.parse(str);
|
||||
} catch (err) {
|
||||
if (err.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If `.output/functions-manifest.json` exists, append to the pages
|
||||
* property. Otherwise write a new file.
|
||||
*/
|
||||
export async function updateFunctionsManifest({
|
||||
workPath,
|
||||
pages,
|
||||
}: {
|
||||
workPath: string;
|
||||
pages: { [key: string]: any };
|
||||
}) {
|
||||
const functionsManifestPath = join(
|
||||
workPath,
|
||||
'.output',
|
||||
'functions-manifest.json'
|
||||
);
|
||||
const functionsManifest = await readJson(functionsManifestPath);
|
||||
|
||||
if (!functionsManifest.version) functionsManifest.version = 1;
|
||||
if (!functionsManifest.pages) functionsManifest.pages = {};
|
||||
|
||||
for (const [pageKey, pageConfig] of Object.entries(pages)) {
|
||||
functionsManifest.pages[pageKey] = { ...pageConfig };
|
||||
}
|
||||
|
||||
await fs.writeFile(functionsManifestPath, JSON.stringify(functionsManifest));
|
||||
}
|
||||
|
||||
/**
|
||||
* Append routes to the `routes-manifest.json` file.
|
||||
* If the file does not exist, it will be created.
|
||||
*/
|
||||
export async function updateRoutesManifest({
|
||||
workPath,
|
||||
redirects,
|
||||
rewrites,
|
||||
headers,
|
||||
dynamicRoutes,
|
||||
staticRoutes,
|
||||
}: {
|
||||
workPath: string;
|
||||
redirects?: {
|
||||
source: string;
|
||||
destination: string;
|
||||
statusCode: number;
|
||||
regex: string;
|
||||
}[];
|
||||
rewrites?: {
|
||||
source: string;
|
||||
destination: string;
|
||||
regex: string;
|
||||
}[];
|
||||
headers?: {
|
||||
source: string;
|
||||
headers: {
|
||||
key: string;
|
||||
value: string;
|
||||
}[];
|
||||
regex: string;
|
||||
}[];
|
||||
dynamicRoutes?: {
|
||||
page: string;
|
||||
regex: string;
|
||||
namedRegex?: string;
|
||||
routeKeys?: { [named: string]: string };
|
||||
}[];
|
||||
staticRoutes?: {
|
||||
page: string;
|
||||
regex: string;
|
||||
namedRegex?: string;
|
||||
routeKeys?: { [named: string]: string };
|
||||
}[];
|
||||
}) {
|
||||
const routesManifestPath = join(workPath, '.output', 'routes-manifest.json');
|
||||
|
||||
const routesManifest = await readJson(routesManifestPath);
|
||||
|
||||
if (!routesManifest.version) routesManifest.version = 3;
|
||||
if (routesManifest.pages404 === undefined) routesManifest.pages404 = true;
|
||||
|
||||
if (redirects) {
|
||||
if (!routesManifest.redirects) routesManifest.redirects = [];
|
||||
routesManifest.redirects.push(...redirects);
|
||||
}
|
||||
|
||||
if (rewrites) {
|
||||
if (!routesManifest.rewrites) routesManifest.rewrites = [];
|
||||
routesManifest.rewrites.push(...rewrites);
|
||||
}
|
||||
|
||||
if (headers) {
|
||||
if (!routesManifest.headers) routesManifest.headers = [];
|
||||
routesManifest.headers.push(...headers);
|
||||
}
|
||||
|
||||
if (dynamicRoutes) {
|
||||
if (!routesManifest.dynamicRoutes) routesManifest.dynamicRoutes = [];
|
||||
routesManifest.dynamicRoutes.push(...dynamicRoutes);
|
||||
}
|
||||
|
||||
if (staticRoutes) {
|
||||
if (!routesManifest.staticRoutes) routesManifest.staticRoutes = [];
|
||||
routesManifest.staticRoutes.push(...staticRoutes);
|
||||
}
|
||||
|
||||
await fs.writeFile(routesManifestPath, JSON.stringify(routesManifest));
|
||||
}
|
||||
@@ -96,6 +96,7 @@ export async function detectBuilders(
|
||||
redirectRoutes: Route[] | null;
|
||||
rewriteRoutes: Route[] | null;
|
||||
errorRoutes: Route[] | null;
|
||||
limitedRoutes: LimitedRoutes | null;
|
||||
}> {
|
||||
const errors: ErrorResponse[] = [];
|
||||
const warnings: ErrorResponse[] = [];
|
||||
@@ -114,6 +115,7 @@ export async function detectBuilders(
|
||||
redirectRoutes: null,
|
||||
rewriteRoutes: null,
|
||||
errorRoutes: null,
|
||||
limitedRoutes: null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -179,6 +181,7 @@ export async function detectBuilders(
|
||||
redirectRoutes: null,
|
||||
rewriteRoutes: null,
|
||||
errorRoutes: null,
|
||||
limitedRoutes: null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -257,6 +260,7 @@ export async function detectBuilders(
|
||||
defaultRoutes: null,
|
||||
rewriteRoutes: null,
|
||||
errorRoutes: null,
|
||||
limitedRoutes: null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -299,6 +303,7 @@ export async function detectBuilders(
|
||||
defaultRoutes: null,
|
||||
rewriteRoutes: null,
|
||||
errorRoutes: null,
|
||||
limitedRoutes: null,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -326,6 +331,7 @@ export async function detectBuilders(
|
||||
}
|
||||
|
||||
const routesResult = getRouteResult(
|
||||
pkg,
|
||||
apiRoutes,
|
||||
dynamicRoutes,
|
||||
usedOutputDirectory,
|
||||
@@ -342,6 +348,7 @@ export async function detectBuilders(
|
||||
defaultRoutes: routesResult.defaultRoutes,
|
||||
rewriteRoutes: routesResult.rewriteRoutes,
|
||||
errorRoutes: routesResult.errorRoutes,
|
||||
limitedRoutes: routesResult.limitedRoutes,
|
||||
};
|
||||
}
|
||||
|
||||
@@ -932,7 +939,14 @@ function createRouteFromPath(
|
||||
return { route, isDynamic };
|
||||
}
|
||||
|
||||
interface LimitedRoutes {
|
||||
defaultRoutes: Route[];
|
||||
redirectRoutes: Route[];
|
||||
rewriteRoutes: Route[];
|
||||
}
|
||||
|
||||
function getRouteResult(
|
||||
pkg: PackageJson | undefined | null,
|
||||
apiRoutes: Source[],
|
||||
dynamicRoutes: Source[],
|
||||
outputDirectory: string,
|
||||
@@ -944,11 +958,18 @@ function getRouteResult(
|
||||
redirectRoutes: Route[];
|
||||
rewriteRoutes: Route[];
|
||||
errorRoutes: Route[];
|
||||
limitedRoutes: LimitedRoutes;
|
||||
} {
|
||||
const deps = Object.assign({}, pkg?.dependencies, pkg?.devDependencies);
|
||||
const defaultRoutes: Route[] = [];
|
||||
const redirectRoutes: Route[] = [];
|
||||
const rewriteRoutes: Route[] = [];
|
||||
const errorRoutes: Route[] = [];
|
||||
const limitedRoutes: LimitedRoutes = {
|
||||
defaultRoutes: [],
|
||||
redirectRoutes: [],
|
||||
rewriteRoutes: [],
|
||||
};
|
||||
const framework = frontendBuilder?.config?.framework || '';
|
||||
const isNextjs =
|
||||
framework === 'nextjs' || isOfficialRuntime('next', frontendBuilder?.use);
|
||||
@@ -956,14 +977,43 @@ function getRouteResult(
|
||||
|
||||
if (apiRoutes && apiRoutes.length > 0) {
|
||||
if (options.featHandleMiss) {
|
||||
// Exclude extension names if the corresponding plugin is not found in package.json
|
||||
// detectBuilders({ignoreRoutesForBuilders: ['@vercel/python']})
|
||||
// return a copy of routes.
|
||||
// We should exclud errorRoutes and
|
||||
const extSet = detectApiExtensions(apiBuilders);
|
||||
const withTag = options.tag ? `@${options.tag}` : '';
|
||||
const extSetLimited = detectApiExtensions(
|
||||
apiBuilders.filter(b => {
|
||||
if (
|
||||
b.use === `@vercel/python${withTag}` &&
|
||||
!('vercel-plugin-python' in deps)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
b.use === `@vercel/go${withTag}` &&
|
||||
!('vercel-plugin-go' in deps)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
if (
|
||||
b.use === `@vercel/ruby${withTag}` &&
|
||||
!('vercel-plugin-ruby' in deps)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
})
|
||||
);
|
||||
|
||||
if (extSet.size > 0) {
|
||||
const exts = Array.from(extSet)
|
||||
const extGroup = `(?:\\.(?:${Array.from(extSet)
|
||||
.map(ext => ext.slice(1))
|
||||
.join('|');
|
||||
|
||||
const extGroup = `(?:\\.(?:${exts}))`;
|
||||
.join('|')}))`;
|
||||
const extGroupLimited = `(?:\\.(?:${Array.from(extSetLimited)
|
||||
.map(ext => ext.slice(1))
|
||||
.join('|')}))`;
|
||||
|
||||
if (options.cleanUrls) {
|
||||
redirectRoutes.push({
|
||||
@@ -979,6 +1029,20 @@ function getRouteResult(
|
||||
},
|
||||
status: 308,
|
||||
});
|
||||
|
||||
limitedRoutes.redirectRoutes.push({
|
||||
src: `^/(api(?:.+)?)/index${extGroupLimited}?/?$`,
|
||||
headers: { Location: options.trailingSlash ? '/$1/' : '/$1' },
|
||||
status: 308,
|
||||
});
|
||||
|
||||
limitedRoutes.redirectRoutes.push({
|
||||
src: `^/api/(.+)${extGroupLimited}/?$`,
|
||||
headers: {
|
||||
Location: options.trailingSlash ? '/api/$1/' : '/api/$1',
|
||||
},
|
||||
status: 308,
|
||||
});
|
||||
} else {
|
||||
defaultRoutes.push({ handle: 'miss' });
|
||||
defaultRoutes.push({
|
||||
@@ -986,10 +1050,18 @@ function getRouteResult(
|
||||
dest: '/api/$1',
|
||||
check: true,
|
||||
});
|
||||
|
||||
limitedRoutes.defaultRoutes.push({ handle: 'miss' });
|
||||
limitedRoutes.defaultRoutes.push({
|
||||
src: `^/api/(.+)${extGroupLimited}$`,
|
||||
dest: '/api/$1',
|
||||
check: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
rewriteRoutes.push(...dynamicRoutes);
|
||||
limitedRoutes.rewriteRoutes.push(...dynamicRoutes);
|
||||
|
||||
if (typeof ignoreRuntimes === 'undefined') {
|
||||
// This route is only necessary to hide the directory listing
|
||||
@@ -1040,6 +1112,7 @@ function getRouteResult(
|
||||
redirectRoutes,
|
||||
rewriteRoutes,
|
||||
errorRoutes,
|
||||
limitedRoutes,
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ import assert from 'assert';
|
||||
import vanillaGlob_ from 'glob';
|
||||
import { promisify } from 'util';
|
||||
import { lstat, Stats } from 'fs-extra';
|
||||
import { normalizePath } from './normalize-path';
|
||||
import FileFsRef from '../file-fs-ref';
|
||||
|
||||
export type GlobOptions = vanillaGlob_.IOptions;
|
||||
@@ -45,7 +46,7 @@ export default async function glob(
|
||||
const files = await vanillaGlob(pattern, options);
|
||||
|
||||
for (const relativePath of files) {
|
||||
const fsPath = path.join(options.cwd!, relativePath).replace(/\\/g, '/');
|
||||
const fsPath = normalizePath(path.join(options.cwd!, relativePath));
|
||||
let stat: Stats = options.statCache![fsPath] as Stats;
|
||||
assert(
|
||||
stat,
|
||||
|
||||
8
packages/build-utils/src/fs/normalize-path.ts
Normal file
8
packages/build-utils/src/fs/normalize-path.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
const isWin = process.platform === 'win32';
|
||||
|
||||
/**
|
||||
* Convert Windows separators to Unix separators.
|
||||
*/
|
||||
export function normalizePath(p: string): string {
|
||||
return isWin ? p.replace(/\\/g, '/') : p;
|
||||
}
|
||||
84
packages/build-utils/src/get-ignore-filter.ts
Normal file
84
packages/build-utils/src/get-ignore-filter.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import path from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import ignore from 'ignore';
|
||||
|
||||
interface CodedError extends Error {
|
||||
code: string;
|
||||
}
|
||||
|
||||
function isCodedError(error: unknown): error is CodedError {
|
||||
return (
|
||||
error !== null &&
|
||||
error !== undefined &&
|
||||
(error as CodedError).code !== undefined
|
||||
);
|
||||
}
|
||||
|
||||
function clearRelative(s: string) {
|
||||
return s.replace(/(\n|^)\.\//g, '$1');
|
||||
}
|
||||
|
||||
export default async function (
|
||||
downloadPath: string,
|
||||
rootDirectory?: string | undefined
|
||||
) {
|
||||
const readFile = async (p: string) => {
|
||||
try {
|
||||
return await fs.readFile(p, 'utf8');
|
||||
} catch (error: any) {
|
||||
if (
|
||||
error.code === 'ENOENT' ||
|
||||
(error instanceof Error && error.message.includes('ENOENT'))
|
||||
) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
const vercelIgnorePath = path.join(
|
||||
downloadPath,
|
||||
rootDirectory || '',
|
||||
'.vercelignore'
|
||||
);
|
||||
const nowIgnorePath = path.join(
|
||||
downloadPath,
|
||||
rootDirectory || '',
|
||||
'.nowignore'
|
||||
);
|
||||
const ignoreContents = [];
|
||||
|
||||
try {
|
||||
ignoreContents.push(
|
||||
...(
|
||||
await Promise.all([readFile(vercelIgnorePath), readFile(nowIgnorePath)])
|
||||
).filter(Boolean)
|
||||
);
|
||||
} catch (error) {
|
||||
if (isCodedError(error) && error.code === 'ENOTDIR') {
|
||||
console.log(`Warning: Cannot read ignore file from ${vercelIgnorePath}`);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
if (ignoreContents.length === 2) {
|
||||
throw new Error(
|
||||
'Cannot use both a `.vercelignore` and `.nowignore` file. Please delete the `.nowignore` file.'
|
||||
);
|
||||
}
|
||||
|
||||
if (ignoreContents.length === 0) {
|
||||
return () => false;
|
||||
}
|
||||
|
||||
const ignoreFilter: any = ignore().add(clearRelative(ignoreContents[0]!));
|
||||
|
||||
return function (p: string) {
|
||||
// we should not ignore now.json and vercel.json if it asked to.
|
||||
// we depend on these files for building the app with sourceless
|
||||
if (p === 'now.json' || p === 'vercel.json') return false;
|
||||
return ignoreFilter.test(p).ignored;
|
||||
};
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
import { createHash } from 'crypto';
|
||||
import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
@@ -33,6 +34,7 @@ import { NowBuildError } from './errors';
|
||||
import streamToBuffer from './fs/stream-to-buffer';
|
||||
import shouldServe from './should-serve';
|
||||
import debug from './debug';
|
||||
import getIgnoreFilter from './get-ignore-filter';
|
||||
|
||||
export {
|
||||
FileBlob,
|
||||
@@ -70,6 +72,7 @@ export {
|
||||
isSymbolicLink,
|
||||
getLambdaOptionsFromFunction,
|
||||
scanParentDirs,
|
||||
getIgnoreFilter,
|
||||
};
|
||||
|
||||
export {
|
||||
@@ -81,6 +84,12 @@ export {
|
||||
export { detectFramework } from './detect-framework';
|
||||
export { DetectorFilesystem } from './detectors/filesystem';
|
||||
export { readConfigFile } from './fs/read-config-file';
|
||||
export { normalizePath } from './fs/normalize-path';
|
||||
export {
|
||||
convertRuntimeToPlugin,
|
||||
updateFunctionsManifest,
|
||||
updateRoutesManifest,
|
||||
} from './convert-runtime-to-plugin';
|
||||
|
||||
export * from './schemas';
|
||||
export * from './types';
|
||||
@@ -126,3 +135,11 @@ export const getPlatformEnv = (name: string): string | undefined => {
|
||||
}
|
||||
return n;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function for generating file or directories names in `.output/inputs`
|
||||
* for dependencies of files provided to the File System API.
|
||||
*/
|
||||
export const getInputHash = (source: Buffer | string): string => {
|
||||
return createHash('sha1').update(source).digest('hex');
|
||||
};
|
||||
|
||||
@@ -36,9 +36,11 @@ interface CreateLambdaOptions {
|
||||
|
||||
interface GetLambdaOptionsFromFunctionOptions {
|
||||
sourceFile: string;
|
||||
config?: Config;
|
||||
config?: Pick<Config, 'functions'>;
|
||||
}
|
||||
|
||||
export const FILES_SYMBOL = Symbol('files');
|
||||
|
||||
export class Lambda {
|
||||
public type: 'Lambda';
|
||||
public zipBuffer: Buffer;
|
||||
@@ -118,7 +120,7 @@ export async function createLambda({
|
||||
|
||||
try {
|
||||
const zipBuffer = await createZip(files);
|
||||
return new Lambda({
|
||||
const lambda = new Lambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
@@ -127,6 +129,9 @@ export async function createLambda({
|
||||
environment,
|
||||
regions,
|
||||
});
|
||||
// @ts-ignore This symbol is a private API
|
||||
lambda[FILES_SYMBOL] = files;
|
||||
return lambda;
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ export interface File {
|
||||
mode: number;
|
||||
contentType?: string;
|
||||
toStream: () => NodeJS.ReadableStream;
|
||||
toStreamAsync?: () => Promise<NodeJS.ReadableStream>;
|
||||
/**
|
||||
* The absolute path to the file in the filesystem
|
||||
*/
|
||||
@@ -57,6 +58,7 @@ export interface Meta {
|
||||
filesRemoved?: string[];
|
||||
env?: Env;
|
||||
buildEnv?: Env;
|
||||
avoidTopLevelInstall?: boolean;
|
||||
}
|
||||
|
||||
export interface AnalyzeOptions {
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
# users.rb
|
||||
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/users.rb": {
|
||||
"memory": 3008
|
||||
},
|
||||
"api/doesnt-exist.rb": {
|
||||
"memory": 768
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
# [id].py
|
||||
@@ -0,0 +1 @@
|
||||
# index
|
||||
@@ -0,0 +1 @@
|
||||
# project/[aid]/[bid]/index.py
|
||||
@@ -0,0 +1 @@
|
||||
# get
|
||||
@@ -0,0 +1 @@
|
||||
# post
|
||||
@@ -0,0 +1 @@
|
||||
This file should also be included
|
||||
@@ -0,0 +1 @@
|
||||
# date
|
||||
@@ -0,0 +1 @@
|
||||
# math
|
||||
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"functions": {
|
||||
"api/users/post.py": {
|
||||
"memory": 3008
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [{ "src": "package.json", "use": "@vercel/static-build" }],
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 7" }]
|
||||
"probes": [{ "path": "/", "mustContain": "npm version: 8" }]
|
||||
}
|
||||
|
||||
@@ -2385,13 +2385,10 @@ it('Test `detectRoutes` with `featHandleMiss=true`', async () => {
|
||||
{
|
||||
const files = ['api/user.go', 'api/team.js', 'api/package.json'];
|
||||
|
||||
const { defaultRoutes, rewriteRoutes, errorRoutes } = await detectBuilders(
|
||||
files,
|
||||
null,
|
||||
{
|
||||
const { defaultRoutes, rewriteRoutes, errorRoutes, limitedRoutes } =
|
||||
await detectBuilders(files, null, {
|
||||
featHandleMiss,
|
||||
}
|
||||
);
|
||||
});
|
||||
expect(defaultRoutes).toStrictEqual([
|
||||
{ handle: 'miss' },
|
||||
{
|
||||
@@ -2414,6 +2411,22 @@ it('Test `detectRoutes` with `featHandleMiss=true`', async () => {
|
||||
},
|
||||
]);
|
||||
|
||||
// Limited routes should have js but not go since the go plugin is not installed
|
||||
expect(limitedRoutes).toStrictEqual({
|
||||
redirectRoutes: [],
|
||||
rewriteRoutes: [],
|
||||
defaultRoutes: [
|
||||
{
|
||||
handle: 'miss',
|
||||
},
|
||||
{
|
||||
src: '^/api/(.+)(?:\\.(?:js))$',
|
||||
dest: '/api/$1',
|
||||
check: true,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const pattern = new RegExp(errorRoutes![0].src!);
|
||||
|
||||
[
|
||||
@@ -2816,8 +2829,13 @@ it('Test `detectRoutes` with `featHandleMiss=true`, `cleanUrls=true`', async ()
|
||||
{
|
||||
const files = ['api/user.go', 'api/team.js', 'api/package.json'];
|
||||
|
||||
const { defaultRoutes, redirectRoutes, rewriteRoutes, errorRoutes } =
|
||||
await detectBuilders(files, null, options);
|
||||
const {
|
||||
defaultRoutes,
|
||||
redirectRoutes,
|
||||
rewriteRoutes,
|
||||
errorRoutes,
|
||||
limitedRoutes,
|
||||
} = await detectBuilders(files, null, options);
|
||||
testHeaders(redirectRoutes);
|
||||
expect(defaultRoutes).toStrictEqual([]);
|
||||
expect(rewriteRoutes).toStrictEqual([
|
||||
@@ -2834,6 +2852,28 @@ it('Test `detectRoutes` with `featHandleMiss=true`, `cleanUrls=true`', async ()
|
||||
},
|
||||
]);
|
||||
|
||||
// Limited routes should have js but not go since the go plugin is not installed
|
||||
expect(limitedRoutes).toStrictEqual({
|
||||
redirectRoutes: [
|
||||
{
|
||||
src: '^/(api(?:.+)?)/index(?:\\.(?:js))?/?$',
|
||||
headers: {
|
||||
Location: '/$1',
|
||||
},
|
||||
status: 308,
|
||||
},
|
||||
{
|
||||
src: '^/api/(.+)(?:\\.(?:js))/?$',
|
||||
headers: {
|
||||
Location: '/api/$1',
|
||||
},
|
||||
status: 308,
|
||||
},
|
||||
],
|
||||
rewriteRoutes: [],
|
||||
defaultRoutes: [],
|
||||
});
|
||||
|
||||
// expected redirect should match inputs
|
||||
const getLocation = createReplaceLocation(redirectRoutes);
|
||||
|
||||
@@ -3077,7 +3117,7 @@ it('Test `detectRoutes` with `featHandleMiss=true`, `cleanUrls=true`, `trailingS
|
||||
{
|
||||
const files = ['api/user.go', 'api/team.js', 'api/package.json'];
|
||||
|
||||
const { defaultRoutes, redirectRoutes, rewriteRoutes } =
|
||||
const { defaultRoutes, redirectRoutes, rewriteRoutes, limitedRoutes } =
|
||||
await detectBuilders(files, null, options);
|
||||
testHeaders(redirectRoutes);
|
||||
expect(defaultRoutes).toStrictEqual([]);
|
||||
@@ -3088,6 +3128,28 @@ it('Test `detectRoutes` with `featHandleMiss=true`, `cleanUrls=true`, `trailingS
|
||||
},
|
||||
]);
|
||||
|
||||
// Limited routes should have js but not go since the go plugin is not installed
|
||||
expect(limitedRoutes).toStrictEqual({
|
||||
redirectRoutes: [
|
||||
{
|
||||
src: '^/(api(?:.+)?)/index(?:\\.(?:js))?/?$',
|
||||
headers: {
|
||||
Location: '/$1/',
|
||||
},
|
||||
status: 308,
|
||||
},
|
||||
{
|
||||
src: '^/api/(.+)(?:\\.(?:js))/?$',
|
||||
headers: {
|
||||
Location: '/api/$1/',
|
||||
},
|
||||
status: 308,
|
||||
},
|
||||
],
|
||||
rewriteRoutes: [],
|
||||
defaultRoutes: [],
|
||||
});
|
||||
|
||||
// expected redirect should match inputs
|
||||
const getLocation = createReplaceLocation(redirectRoutes);
|
||||
|
||||
|
||||
231
packages/build-utils/test/unit.convert-runtime-to-plugin.test.ts
vendored
Normal file
231
packages/build-utils/test/unit.convert-runtime-to-plugin.test.ts
vendored
Normal file
@@ -0,0 +1,231 @@
|
||||
import { join } from 'path';
|
||||
import fs from 'fs-extra';
|
||||
import { BuildOptions, createLambda, FileFsRef } from '../src';
|
||||
import { convertRuntimeToPlugin } from '../src/convert-runtime-to-plugin';
|
||||
|
||||
async function fsToJson(dir: string, output: Record<string, any> = {}) {
|
||||
const files = await fs.readdir(dir);
|
||||
for (const file of files) {
|
||||
const fsPath = join(dir, file);
|
||||
const stat = await fs.stat(fsPath);
|
||||
if (stat.isDirectory()) {
|
||||
output[file] = {};
|
||||
await fsToJson(fsPath, output[file]);
|
||||
} else {
|
||||
output[file] = await fs.readFile(fsPath, 'utf8');
|
||||
}
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
||||
const invalidFuncWorkpath = join(
|
||||
__dirname,
|
||||
'convert-runtime',
|
||||
'invalid-functions'
|
||||
);
|
||||
const pythonApiWorkpath = join(__dirname, 'convert-runtime', 'python-api');
|
||||
|
||||
describe('convert-runtime-to-plugin', () => {
|
||||
afterEach(async () => {
|
||||
await fs.remove(join(invalidFuncWorkpath, '.output'));
|
||||
await fs.remove(join(pythonApiWorkpath, '.output'));
|
||||
});
|
||||
|
||||
it('should create correct fileystem for python', async () => {
|
||||
const ext = '.py';
|
||||
const workPath = pythonApiWorkpath;
|
||||
const handlerName = 'vc__handler__python';
|
||||
const handlerFileName = handlerName + ext;
|
||||
|
||||
const lambdaOptions = {
|
||||
handler: `${handlerName}.vc_handler`,
|
||||
runtime: 'python3.9',
|
||||
memory: 512,
|
||||
maxDuration: 5,
|
||||
environment: {},
|
||||
};
|
||||
|
||||
const buildRuntime = async (opts: BuildOptions) => {
|
||||
const handlerPath = join(workPath, handlerFileName);
|
||||
|
||||
// This is the usual time at which a Legacy Runtime writes its Lambda launcher.
|
||||
await fs.writeFile(handlerPath, '# handler');
|
||||
|
||||
opts.files[handlerFileName] = new FileFsRef({
|
||||
fsPath: handlerPath,
|
||||
});
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: opts.files,
|
||||
...lambdaOptions,
|
||||
});
|
||||
return { output: lambda };
|
||||
};
|
||||
|
||||
const lambdaFiles = await fsToJson(workPath);
|
||||
const packageName = 'vercel-plugin-python';
|
||||
const build = await convertRuntimeToPlugin(buildRuntime, packageName, ext);
|
||||
|
||||
await build({ workPath });
|
||||
|
||||
const output = await fsToJson(join(workPath, '.output'));
|
||||
|
||||
delete lambdaFiles['vercel.json'];
|
||||
delete lambdaFiles['vc__handler__python.py'];
|
||||
|
||||
expect(output).toMatchObject({
|
||||
'functions-manifest.json': expect.stringContaining('{'),
|
||||
inputs: {
|
||||
'api-routes-python': lambdaFiles,
|
||||
},
|
||||
server: {
|
||||
pages: {
|
||||
api: {
|
||||
'index.py': expect.stringContaining('handler'),
|
||||
'index.py.nft.json': expect.stringContaining('{'),
|
||||
users: {
|
||||
'get.py': expect.stringContaining('handler'),
|
||||
'get.py.nft.json': expect.stringContaining('{'),
|
||||
'post.py': expect.stringContaining('handler'),
|
||||
'post.py.nft.json': expect.stringContaining('{'),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
const funcManifest = JSON.parse(output['functions-manifest.json']);
|
||||
expect(funcManifest).toMatchObject({
|
||||
version: 1,
|
||||
pages: {
|
||||
'api/index.py': lambdaOptions,
|
||||
'api/users/get.py': lambdaOptions,
|
||||
'api/users/post.py': { ...lambdaOptions, memory: 512 },
|
||||
},
|
||||
});
|
||||
|
||||
const indexJson = JSON.parse(output.server.pages.api['index.py.nft.json']);
|
||||
expect(indexJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const getJson = JSON.parse(
|
||||
output.server.pages.api.users['get.py.nft.json']
|
||||
);
|
||||
expect(getJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
const postJson = JSON.parse(
|
||||
output.server.pages.api.users['post.py.nft.json']
|
||||
);
|
||||
expect(postJson).toMatchObject({
|
||||
version: 1,
|
||||
files: [
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/db/[id].py`,
|
||||
output: 'api/db/[id].py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/index.py`,
|
||||
output: 'api/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/project/[aid]/[bid]/index.py`,
|
||||
output: 'api/project/[aid]/[bid]/index.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/get.py`,
|
||||
output: 'api/users/get.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/api/users/post.py`,
|
||||
output: 'api/users/post.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/file.txt`,
|
||||
output: 'file.txt',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/date.py`,
|
||||
output: 'util/date.py',
|
||||
},
|
||||
{
|
||||
input: `../../../../../inputs/api-routes-python/util/math.py`,
|
||||
output: 'util/math.py',
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
expect(output.server.pages['file.txt']).toBeUndefined();
|
||||
expect(output.server.pages.api['file.txt']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "23.1.3-canary.15",
|
||||
"version": "23.1.3-canary.52",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -43,14 +43,17 @@
|
||||
"node": ">= 12"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "2.12.3-canary.13",
|
||||
"@vercel/go": "1.2.4-canary.3",
|
||||
"@vercel/node": "1.12.2-canary.4",
|
||||
"@vercel/python": "2.0.6-canary.4",
|
||||
"@vercel/ruby": "1.2.8-canary.3",
|
||||
"update-notifier": "4.1.0"
|
||||
"@vercel/build-utils": "2.12.3-canary.31",
|
||||
"@vercel/go": "1.2.4-canary.4",
|
||||
"@vercel/node": "1.12.2-canary.7",
|
||||
"@vercel/python": "2.1.2-canary.1",
|
||||
"@vercel/ruby": "1.2.8-canary.6",
|
||||
"update-notifier": "4.1.0",
|
||||
"vercel-plugin-middleware": "0.0.0-canary.7",
|
||||
"vercel-plugin-node": "1.12.2-canary.23"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@next/env": "11.1.2",
|
||||
"@sentry/node": "5.5.0",
|
||||
"@sindresorhus/slugify": "0.11.0",
|
||||
"@tootallnate/once": "1.1.2",
|
||||
@@ -63,7 +66,7 @@
|
||||
"@types/dotenv": "6.1.1",
|
||||
"@types/escape-html": "0.0.20",
|
||||
"@types/express": "4.17.13",
|
||||
"@types/fs-extra": "5.0.5",
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/glob": "7.1.1",
|
||||
"@types/http-proxy": "1.16.2",
|
||||
"@types/inquirer": "7.3.1",
|
||||
@@ -87,8 +90,9 @@
|
||||
"@types/update-notifier": "5.1.0",
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@vercel/frameworks": "0.5.1-canary.9",
|
||||
"@vercel/frameworks": "0.5.1-canary.16",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@vercel/nft": "0.17.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
"ajv": "6.12.2",
|
||||
@@ -120,7 +124,7 @@
|
||||
"execa": "3.2.0",
|
||||
"express": "4.17.1",
|
||||
"fast-deep-equal": "3.1.3",
|
||||
"fs-extra": "7.0.1",
|
||||
"fs-extra": "10.0.0",
|
||||
"get-port": "5.1.1",
|
||||
"glob": "7.1.2",
|
||||
"http-proxy": "1.18.1",
|
||||
|
||||
879
packages/cli/src/commands/build.ts
Normal file
879
packages/cli/src/commands/build.ts
Normal file
@@ -0,0 +1,879 @@
|
||||
import { loadEnvConfig, processEnv } from '@next/env';
|
||||
import {
|
||||
execCommand,
|
||||
getScriptName,
|
||||
GlobOptions,
|
||||
scanParentDirs,
|
||||
spawnAsync,
|
||||
} from '@vercel/build-utils';
|
||||
import { nodeFileTrace } from '@vercel/nft';
|
||||
import Sema from 'async-sema';
|
||||
import chalk from 'chalk';
|
||||
import { SpawnOptions } from 'child_process';
|
||||
import { assert } from 'console';
|
||||
import { createHash } from 'crypto';
|
||||
import fs from 'fs-extra';
|
||||
import ogGlob from 'glob';
|
||||
import { dirname, isAbsolute, join, parse, relative, resolve } from 'path';
|
||||
import pluralize from 'pluralize';
|
||||
import Client from '../util/client';
|
||||
import { VercelConfig } from '../util/dev/types';
|
||||
import { emoji, prependEmoji } from '../util/emoji';
|
||||
import getArgs from '../util/get-args';
|
||||
import handleError from '../util/handle-error';
|
||||
import confirm from '../util/input/confirm';
|
||||
import { isSettingValue } from '../util/is-setting-value';
|
||||
import cmd from '../util/output/cmd';
|
||||
import logo from '../util/output/logo';
|
||||
import param from '../util/output/param';
|
||||
import stamp from '../util/output/stamp';
|
||||
import { getCommandName, getPkgName } from '../util/pkg-name';
|
||||
import { loadCliPlugins } from '../util/plugins';
|
||||
import { findFramework } from '../util/projects/find-framework';
|
||||
import { VERCEL_DIR } from '../util/projects/link';
|
||||
import { readProjectSettings } from '../util/projects/project-settings';
|
||||
import pull from './pull';
|
||||
|
||||
const sema = new Sema(16, {
|
||||
capacity: 100,
|
||||
});
|
||||
|
||||
const help = () => {
|
||||
return console.log(`
|
||||
${chalk.bold(`${logo} ${getPkgName()} build`)}
|
||||
|
||||
${chalk.dim('Options:')}
|
||||
|
||||
-h, --help Output usage information
|
||||
-A ${chalk.bold.underline('FILE')}, --local-config=${chalk.bold.underline(
|
||||
'FILE'
|
||||
)} Path to the local ${'`vercel.json`'} file
|
||||
-Q ${chalk.bold.underline('DIR')}, --global-config=${chalk.bold.underline(
|
||||
'DIR'
|
||||
)} Path to the global ${'`.vercel`'} directory
|
||||
--cwd [path] The current working directory
|
||||
-d, --debug Debug mode [off]
|
||||
-y, --yes Skip the confirmation prompt
|
||||
|
||||
${chalk.dim('Examples:')}
|
||||
|
||||
${chalk.gray('–')} Build the project
|
||||
|
||||
${chalk.cyan(`$ ${getPkgName()} build`)}
|
||||
${chalk.cyan(`$ ${getPkgName()} build --cwd ./path-to-project`)}
|
||||
`);
|
||||
};
|
||||
|
||||
const OUTPUT_DIR = '.output';
|
||||
|
||||
export default async function main(client: Client) {
|
||||
if (process.env.__VERCEL_BUILD_RUNNING) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} build`
|
||||
)} must not recursively invoke itself. Check the Build Command in the Project Settings or the ${cmd(
|
||||
'build'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
return 1;
|
||||
} else {
|
||||
process.env.__VERCEL_BUILD_RUNNING = '1';
|
||||
}
|
||||
|
||||
let argv;
|
||||
const buildStamp = stamp();
|
||||
try {
|
||||
argv = getArgs(client.argv.slice(2), {
|
||||
'--debug': Boolean,
|
||||
'--cwd': String,
|
||||
});
|
||||
} catch (err) {
|
||||
handleError(err);
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (argv['--help']) {
|
||||
help();
|
||||
return 2;
|
||||
}
|
||||
|
||||
let cwd = argv['--cwd'] || process.cwd();
|
||||
|
||||
let project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
// If there are no project settings, only then do we pull them down
|
||||
while (!project?.settings) {
|
||||
const confirmed = await confirm(
|
||||
`No Project Settings found locally. Run ${getCommandName(
|
||||
'pull'
|
||||
)} for retrieving them?`,
|
||||
true
|
||||
);
|
||||
if (!confirmed) {
|
||||
client.output.print(`Aborted. No Project Settings retrieved.\n`);
|
||||
return 0;
|
||||
}
|
||||
const result = await pull(client);
|
||||
if (result !== 0) {
|
||||
return result;
|
||||
}
|
||||
project = await readProjectSettings(join(cwd, VERCEL_DIR));
|
||||
}
|
||||
|
||||
// If `rootDirectory` exists, then `baseDir` will be the repo's root directory.
|
||||
const baseDir = cwd;
|
||||
|
||||
cwd = project.settings.rootDirectory
|
||||
? join(cwd, project.settings.rootDirectory)
|
||||
: cwd;
|
||||
|
||||
// Load the environment
|
||||
const { combinedEnv, loadedEnvFiles } = loadEnvConfig(cwd, false, {
|
||||
info: () => ({}), // we don't want to log this yet.
|
||||
error: (...args: any[]) => client.output.error(args.join(' ')),
|
||||
});
|
||||
|
||||
// Set process.env with loaded environment variables
|
||||
processEnv(loadedEnvFiles);
|
||||
|
||||
const spawnOpts: {
|
||||
env: Record<string, string | undefined>;
|
||||
} = {
|
||||
env: { ...combinedEnv, VERCEL: '1' },
|
||||
};
|
||||
|
||||
process.chdir(cwd);
|
||||
|
||||
const framework = findFramework(project.settings.framework);
|
||||
// If this is undefined, we bail. If it is null, then findFramework should return "Other",
|
||||
// so this should really never happen, but just in case....
|
||||
if (framework === undefined) {
|
||||
client.output.error(
|
||||
`Framework detection failed or is malformed. Please run ${getCommandName(
|
||||
'pull'
|
||||
)} again.`
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const buildState = { ...project.settings };
|
||||
const formatSetting = (
|
||||
name: string,
|
||||
override: string | null | undefined,
|
||||
defaults: typeof framework.settings.outputDirectory
|
||||
) =>
|
||||
` - ${chalk.bold(`${name}:`)} ${`${
|
||||
override
|
||||
? override + ` (override)`
|
||||
: 'placeholder' in defaults
|
||||
? chalk.italic(`${defaults.placeholder}`)
|
||||
: defaults.value
|
||||
}`}`;
|
||||
console.log(`Retrieved Project Settings:`);
|
||||
console.log(
|
||||
chalk.dim(` - ${chalk.bold(`Framework Preset:`)} ${framework.name}`)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Build Command',
|
||||
project.settings.buildCommand,
|
||||
framework.settings.buildCommand
|
||||
)
|
||||
)
|
||||
);
|
||||
console.log(
|
||||
chalk.dim(
|
||||
formatSetting(
|
||||
'Output Directory',
|
||||
project.settings.outputDirectory,
|
||||
framework.settings.outputDirectory
|
||||
)
|
||||
)
|
||||
);
|
||||
|
||||
buildState.outputDirectory =
|
||||
project.settings.outputDirectory ||
|
||||
(isSettingValue(framework.settings.outputDirectory)
|
||||
? framework.settings.outputDirectory.value
|
||||
: null);
|
||||
buildState.rootDirectory = project.settings.rootDirectory;
|
||||
|
||||
if (loadedEnvFiles.length > 0) {
|
||||
console.log(
|
||||
`Loaded Environment Variables from ${loadedEnvFiles.length} ${pluralize(
|
||||
'file',
|
||||
loadedEnvFiles.length
|
||||
)}:`
|
||||
);
|
||||
for (let envFile of loadedEnvFiles) {
|
||||
console.log(chalk.dim(` - ${envFile.path}`));
|
||||
}
|
||||
}
|
||||
|
||||
// Load plugins
|
||||
const debug = argv['--debug'];
|
||||
let plugins;
|
||||
try {
|
||||
plugins = await loadCliPlugins(cwd, client.output);
|
||||
} catch (error) {
|
||||
client.output.error('Failed to load CLI Plugins');
|
||||
handleError(error, { debug });
|
||||
return 1;
|
||||
}
|
||||
|
||||
const origLog = console.log;
|
||||
const origErr = console.error;
|
||||
const prefixedLog = (
|
||||
prefix: string,
|
||||
args: any[],
|
||||
logger: (...args: any[]) => void
|
||||
) => {
|
||||
if (typeof args[0] === 'string') {
|
||||
args[0] = `${prefix} ${args[0]}`;
|
||||
} else {
|
||||
args.unshift(prefix);
|
||||
}
|
||||
return logger(...args);
|
||||
};
|
||||
|
||||
if (plugins?.pluginCount && plugins?.pluginCount > 0) {
|
||||
console.log(
|
||||
`Loaded ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
)}`
|
||||
);
|
||||
// preBuild Plugins
|
||||
if (plugins.preBuildPlugins.length > 0) {
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
)} before Build Command:`
|
||||
);
|
||||
for (let item of plugins.preBuildPlugins) {
|
||||
const { name, plugin, color } = item;
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
const pluginStamp = stamp();
|
||||
const fullName = name + '.preBuild';
|
||||
const prefix = chalk.gray(' > ') + color(fullName + ':');
|
||||
client.output.debug(`Running ${fullName}:`);
|
||||
try {
|
||||
console.log = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origLog);
|
||||
console.error = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origErr);
|
||||
await plugin.preBuild();
|
||||
client.output.debug(
|
||||
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
|
||||
);
|
||||
} catch (error) {
|
||||
client.output.error(`${prefix} failed`);
|
||||
handleError(error, { debug });
|
||||
return 1;
|
||||
} finally {
|
||||
console.log = origLog;
|
||||
console.error = origErr;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clean the output directory
|
||||
fs.removeSync(join(cwd, OUTPUT_DIR));
|
||||
|
||||
if (framework && process.env.VERCEL_URL && 'envPrefix' in framework) {
|
||||
for (const key of Object.keys(process.env)) {
|
||||
if (key.startsWith('VERCEL_')) {
|
||||
const newKey = `${framework.envPrefix}${key}`;
|
||||
// Set `process.env` and `spawnOpts.env` to make sure the variables are
|
||||
// available to the `build` step and the CLI Plugins.
|
||||
process.env[newKey] = process.env[newKey] || process.env[key];
|
||||
spawnOpts.env[newKey] = process.env[newKey];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Required for Next.js to produce the correct `.nft.json` files.
|
||||
spawnOpts.env.NEXT_PRIVATE_OUTPUT_TRACE_ROOT = baseDir;
|
||||
|
||||
// Yarn v2 PnP mode may be activated, so force
|
||||
// "node-modules" linker style
|
||||
const env = {
|
||||
YARN_NODE_LINKER: 'node-modules',
|
||||
...spawnOpts.env,
|
||||
};
|
||||
|
||||
if (typeof buildState.buildCommand === 'string') {
|
||||
console.log(`Running Build Command: ${cmd(buildState.buildCommand)}`);
|
||||
await execCommand(buildState.buildCommand, {
|
||||
...spawnOpts,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
} else if (fs.existsSync(join(cwd, 'package.json'))) {
|
||||
await runPackageJsonScript(
|
||||
client,
|
||||
cwd,
|
||||
['vercel-build', 'now-build', 'build'],
|
||||
spawnOpts
|
||||
);
|
||||
} else if (typeof framework.settings.buildCommand.value === 'string') {
|
||||
console.log(
|
||||
`Running Build Command: ${cmd(framework.settings.buildCommand.value)}`
|
||||
);
|
||||
await execCommand(framework.settings.buildCommand.value, {
|
||||
...spawnOpts,
|
||||
env,
|
||||
cwd,
|
||||
});
|
||||
}
|
||||
|
||||
if (!fs.existsSync(join(cwd, OUTPUT_DIR))) {
|
||||
let dotNextDir: string | null = null;
|
||||
|
||||
// If a custom `outputDirectory` was set, we'll need to verify
|
||||
// if it's `.next` output, or just static output.
|
||||
const userOutputDirectory = project.settings.outputDirectory;
|
||||
|
||||
if (typeof userOutputDirectory === 'string') {
|
||||
if (fs.existsSync(join(cwd, userOutputDirectory, 'BUILD_ID'))) {
|
||||
dotNextDir = join(cwd, userOutputDirectory);
|
||||
client.output.debug(
|
||||
`Consider ${param(userOutputDirectory)} as ${param('.next')} output.`
|
||||
);
|
||||
}
|
||||
} else if (fs.existsSync(join(cwd, '.next'))) {
|
||||
dotNextDir = join(cwd, '.next');
|
||||
client.output.debug(`Found ${param('.next')} directory.`);
|
||||
}
|
||||
|
||||
// We cannot rely on the `framework` alone, as it might be a static export,
|
||||
// and the current build might use a differnt project that's not in the settings.
|
||||
const isNextOutput = Boolean(dotNextDir);
|
||||
const outputDir = isNextOutput ? OUTPUT_DIR : join(OUTPUT_DIR, 'static');
|
||||
const distDir =
|
||||
dotNextDir ||
|
||||
userOutputDirectory ||
|
||||
(await framework.getFsOutputDir(cwd));
|
||||
|
||||
await fs.ensureDir(join(cwd, outputDir));
|
||||
|
||||
const copyStamp = stamp();
|
||||
client.output.spinner(
|
||||
`Copying files from ${param(distDir)} to ${param(outputDir)}`
|
||||
);
|
||||
const files = await glob(join(relative(cwd, distDir), '**'), {
|
||||
ignore: [
|
||||
'node_modules/**',
|
||||
'.vercel/**',
|
||||
'.env',
|
||||
'.env.*',
|
||||
'.*ignore',
|
||||
'_middleware.ts',
|
||||
'_middleware.mts',
|
||||
'_middleware.cts',
|
||||
'_middleware.mjs',
|
||||
'_middleware.cjs',
|
||||
'_middleware.js',
|
||||
'api/**',
|
||||
'.git/**',
|
||||
'.next/cache/**',
|
||||
],
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
await Promise.all(
|
||||
files.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
distDir === '.'
|
||||
? join(cwd, outputDir, relative(cwd, f))
|
||||
: f.replace(distDir, outputDir)
|
||||
)
|
||||
)
|
||||
);
|
||||
client.output.stopSpinner();
|
||||
console.log(
|
||||
`Copied ${files.length.toLocaleString()} files from ${param(
|
||||
distDir
|
||||
)} to ${param(outputDir)} ${copyStamp()}`
|
||||
);
|
||||
|
||||
const buildManifestPath = join(cwd, OUTPUT_DIR, 'build-manifest.json');
|
||||
const routesManifestPath = join(cwd, OUTPUT_DIR, 'routes-manifest.json');
|
||||
|
||||
if (!fs.existsSync(buildManifestPath)) {
|
||||
client.output.debug(
|
||||
`Generating build manifest: ${param(buildManifestPath)}`
|
||||
);
|
||||
const buildManifest = {
|
||||
version: 1,
|
||||
cache: framework.cachePattern ? [framework.cachePattern] : [],
|
||||
};
|
||||
await fs.writeJSON(buildManifestPath, buildManifest, { spaces: 2 });
|
||||
}
|
||||
|
||||
if (!fs.existsSync(routesManifestPath)) {
|
||||
client.output.debug(
|
||||
`Generating routes manifest: ${param(routesManifestPath)}`
|
||||
);
|
||||
const routesManifest = {
|
||||
version: 3,
|
||||
pages404: true,
|
||||
basePath: '',
|
||||
redirects: framework.defaultRedirects ?? [],
|
||||
headers: framework.defaultHeaders ?? [],
|
||||
dynamicRoutes: [],
|
||||
dataRoutes: [],
|
||||
rewrites: framework.defaultRewrites ?? [],
|
||||
};
|
||||
await fs.writeJSON(
|
||||
join(cwd, OUTPUT_DIR, 'routes-manifest.json'),
|
||||
routesManifest,
|
||||
{ spaces: 2 }
|
||||
);
|
||||
}
|
||||
|
||||
// Special Next.js processing.
|
||||
if (isNextOutput) {
|
||||
// The contents of `.output/static` should be placed inside of `.output/static/_next/static`
|
||||
const tempStatic = '___static';
|
||||
await fs.rename(
|
||||
join(cwd, OUTPUT_DIR, 'static'),
|
||||
join(cwd, OUTPUT_DIR, tempStatic)
|
||||
);
|
||||
await fs.mkdirp(join(cwd, OUTPUT_DIR, 'static', '_next', 'static'));
|
||||
await fs.rename(
|
||||
join(cwd, OUTPUT_DIR, tempStatic),
|
||||
join(cwd, OUTPUT_DIR, 'static', '_next', 'static')
|
||||
);
|
||||
|
||||
// Next.js might reference files from the `static` directory in `middleware-manifest.json`.
|
||||
// Since we move all files from `static` to `static/_next/static`, we'll need to change
|
||||
// those references as well and update the manifest file.
|
||||
const middlewareManifest = join(
|
||||
cwd,
|
||||
OUTPUT_DIR,
|
||||
'server',
|
||||
'middleware-manifest.json'
|
||||
);
|
||||
if (fs.existsSync(middlewareManifest)) {
|
||||
const manifest = await fs.readJSON(middlewareManifest);
|
||||
Object.keys(manifest.middleware).forEach(key => {
|
||||
const files = manifest.middleware[key].files.map((f: string) => {
|
||||
if (f.startsWith('static/')) {
|
||||
const next = f.replace(/^static\//gm, 'static/_next/static/');
|
||||
client.output.debug(
|
||||
`Replacing file in \`middleware-manifest.json\`: ${f} => ${next}`
|
||||
);
|
||||
return next;
|
||||
}
|
||||
|
||||
return f;
|
||||
});
|
||||
|
||||
manifest.middleware[key].files = files;
|
||||
});
|
||||
|
||||
await fs.writeJSON(middlewareManifest, manifest);
|
||||
}
|
||||
|
||||
// We want to pick up directories for user-provided static files into `.`output/static`.
|
||||
// More specifically, the static directory contents would then be mounted to `output/static/static`,
|
||||
// and the public directory contents would be mounted to `output/static`. Old Next.js versions
|
||||
// allow `static`, and newer ones allow both, but since there's nobody that actually uses both,
|
||||
// we can check for the existence of both and pick the first match that we find (first
|
||||
// `public`, then`static`). We can't read both at the same time because that would mean we'd
|
||||
// read public for old Next.js versions that don't support it, which might be breaking (and
|
||||
// we don't want to make vercel build specific framework versions).
|
||||
const nextSrcDirectory = dirname(distDir);
|
||||
|
||||
const publicFiles = await glob('public/**', {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd: nextSrcDirectory,
|
||||
absolute: true,
|
||||
});
|
||||
if (publicFiles.length > 0) {
|
||||
await Promise.all(
|
||||
publicFiles.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
join(
|
||||
OUTPUT_DIR,
|
||||
'static',
|
||||
relative(join(dirname(distDir), 'public'), f)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
} else {
|
||||
const staticFiles = await glob('static/**', {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd: nextSrcDirectory,
|
||||
absolute: true,
|
||||
});
|
||||
await Promise.all(
|
||||
staticFiles.map(f =>
|
||||
smartCopy(
|
||||
client,
|
||||
f,
|
||||
join(
|
||||
OUTPUT_DIR,
|
||||
'static',
|
||||
'static',
|
||||
relative(join(dirname(distDir), 'static'), f)
|
||||
)
|
||||
)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
// Regardless of the Next.js version, we make sure that it is compatible with
|
||||
// the Filesystem API. We get there by moving all the files needed
|
||||
// into the outputs directory `inputs` folder. Next.js is > 12, we can
|
||||
// read the .nft.json files directly. If there aren't .nft.json files
|
||||
// we trace and create them. We then resolve the files in each nft file list
|
||||
// and move them into the "inputs" directory. We rename them with hashes to
|
||||
// prevent collisions and then update the related .nft files accordingly
|
||||
// to point to the newly named input files. Again, all of this is so that Next.js
|
||||
// works with the Filesystem API (and so .output contains all inputs
|
||||
// needed to run Next.js) and `vc --prebuilt`.
|
||||
const nftFiles = await glob(join(OUTPUT_DIR, '**', '*.nft.json'), {
|
||||
nodir: true,
|
||||
dot: true,
|
||||
ignore: ['cache/**'],
|
||||
cwd,
|
||||
absolute: true,
|
||||
});
|
||||
|
||||
// If there are no .nft.json files, we know that Next.js < 12. We then
|
||||
// execute the tracing on our own.
|
||||
if (nftFiles.length === 0) {
|
||||
const serverFiles = await glob(
|
||||
join(OUTPUT_DIR, 'server', 'pages', '**', '*.js'),
|
||||
{
|
||||
nodir: true,
|
||||
dot: true,
|
||||
cwd,
|
||||
ignore: ['webpack-runtime.js'],
|
||||
absolute: true,
|
||||
}
|
||||
);
|
||||
for (let f of serverFiles) {
|
||||
const { ext, dir } = parse(f);
|
||||
const { fileList } = await nodeFileTrace([f], {
|
||||
ignore: [
|
||||
relative(cwd, f),
|
||||
'node_modules/next/dist/pages/**/*',
|
||||
'node_modules/next/dist/compiled/webpack/(bundle4|bundle5).js',
|
||||
'node_modules/react/**/*.development.js',
|
||||
'node_modules/react-dom/**/*.development.js',
|
||||
'node_modules/use-subscription/**/*.development.js',
|
||||
'node_modules/sharp/**/*',
|
||||
],
|
||||
});
|
||||
fileList.delete(relative(cwd, f));
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f.replace(ext, '.js.nft.json'),
|
||||
distDir,
|
||||
nft: {
|
||||
version: 1,
|
||||
files: Array.from(fileList).map(fileListEntry =>
|
||||
relative(dir, fileListEntry)
|
||||
),
|
||||
},
|
||||
});
|
||||
}
|
||||
} else {
|
||||
for (let f of nftFiles) {
|
||||
const json = await fs.readJson(f);
|
||||
await resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir: OUTPUT_DIR,
|
||||
nftFileName: f,
|
||||
nft: json,
|
||||
distDir,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const requiredServerFilesPath = join(
|
||||
OUTPUT_DIR,
|
||||
'required-server-files.json'
|
||||
);
|
||||
|
||||
if (fs.existsSync(requiredServerFilesPath)) {
|
||||
client.output.debug(`Resolve ${param('required-server-files.json')}.`);
|
||||
|
||||
const requiredServerFilesJson = await fs.readJSON(
|
||||
requiredServerFilesPath
|
||||
);
|
||||
|
||||
await fs.writeJSON(requiredServerFilesPath, {
|
||||
...requiredServerFilesJson,
|
||||
appDir: '.',
|
||||
files: requiredServerFilesJson.files.map((i: string) => {
|
||||
const originalPath = join(requiredServerFilesJson.appDir, i);
|
||||
const relPath = join(OUTPUT_DIR, relative(distDir, originalPath));
|
||||
|
||||
const absolutePath = join(cwd, relPath);
|
||||
const output = relative(baseDir, absolutePath);
|
||||
|
||||
return relPath === output
|
||||
? relPath
|
||||
: {
|
||||
input: relPath,
|
||||
output,
|
||||
};
|
||||
}),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Build Plugins
|
||||
if (plugins?.buildPlugins && plugins.buildPlugins.length > 0) {
|
||||
console.log(
|
||||
`Running ${plugins.pluginCount} CLI ${pluralize(
|
||||
'Plugin',
|
||||
plugins.pluginCount
|
||||
)} after Build Command:`
|
||||
);
|
||||
let vercelConfig: VercelConfig = {};
|
||||
try {
|
||||
vercelConfig = await fs.readJSON(join(cwd, 'vercel.json'));
|
||||
} catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw new Error(`Failed to read vercel.json: ${error.message}`);
|
||||
}
|
||||
}
|
||||
for (let item of plugins.buildPlugins) {
|
||||
const { name, plugin, color } = item;
|
||||
if (typeof plugin.build === 'function') {
|
||||
const pluginStamp = stamp();
|
||||
const fullName = name + '.build';
|
||||
const prefix = chalk.gray(' > ') + color(fullName + ':');
|
||||
client.output.debug(`Running ${fullName}:`);
|
||||
try {
|
||||
console.log = (...args: any[]) => prefixedLog(prefix, args, origLog);
|
||||
console.error = (...args: any[]) =>
|
||||
prefixedLog(prefix, args, origErr);
|
||||
await plugin.build({
|
||||
vercelConfig,
|
||||
workPath: cwd,
|
||||
});
|
||||
client.output.debug(
|
||||
`Completed ${fullName} ${chalk.dim(`${pluginStamp()}`)}`
|
||||
);
|
||||
} catch (error) {
|
||||
client.output.error(`${prefix} failed`);
|
||||
handleError(error, { debug });
|
||||
return 1;
|
||||
} finally {
|
||||
console.log = origLog;
|
||||
console.error = origLog;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log(
|
||||
`${prependEmoji(
|
||||
`Build Completed in ${chalk.bold(OUTPUT_DIR)} ${chalk.gray(
|
||||
buildStamp()
|
||||
)}`,
|
||||
emoji('success')
|
||||
)}`
|
||||
);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
export async function runPackageJsonScript(
|
||||
client: Client,
|
||||
destPath: string,
|
||||
scriptNames: string | Iterable<string>,
|
||||
spawnOpts?: SpawnOptions
|
||||
) {
|
||||
assert(isAbsolute(destPath));
|
||||
|
||||
const { packageJson, cliType, lockfileVersion } = await scanParentDirs(
|
||||
destPath,
|
||||
true
|
||||
);
|
||||
const scriptName = getScriptName(
|
||||
packageJson,
|
||||
typeof scriptNames === 'string' ? [scriptNames] : scriptNames
|
||||
);
|
||||
if (!scriptName) return false;
|
||||
|
||||
client.output.debug('Running user script...');
|
||||
const runScriptTime = Date.now();
|
||||
|
||||
const opts: any = { cwd: destPath, ...spawnOpts };
|
||||
const env = (opts.env = { ...process.env, ...opts.env });
|
||||
|
||||
if (cliType === 'npm') {
|
||||
opts.prettyCommand = `npm run ${scriptName}`;
|
||||
|
||||
if (typeof lockfileVersion === 'number' && lockfileVersion >= 2) {
|
||||
// Ensure that npm 7 is at the beginning of the `$PATH`
|
||||
env.PATH = `/node16/bin-npm7:${env.PATH}`;
|
||||
}
|
||||
} else {
|
||||
opts.prettyCommand = `yarn run ${scriptName}`;
|
||||
|
||||
// Yarn v2 PnP mode may be activated, so force "node-modules" linker style
|
||||
if (!env.YARN_NODE_LINKER) {
|
||||
env.YARN_NODE_LINKER = 'node-modules';
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Running Build Command: ${cmd(opts.prettyCommand)}\n`);
|
||||
await spawnAsync(cliType, ['run', scriptName], opts);
|
||||
console.log(); // give it some room
|
||||
client.output.debug(`Script complete [${Date.now() - runScriptTime}ms]`);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function linkOrCopy(existingPath: string, newPath: string) {
|
||||
try {
|
||||
if (
|
||||
newPath.endsWith('.nft.json') ||
|
||||
newPath.endsWith('middleware-manifest.json') ||
|
||||
newPath.endsWith('required-server-files.json')
|
||||
) {
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
} else {
|
||||
await fs.createLink(existingPath, newPath);
|
||||
}
|
||||
} catch (err: any) {
|
||||
// eslint-disable-line
|
||||
// If a symlink to the same file already exists
|
||||
// then trying to copy it will make an empty file from it.
|
||||
if (err['code'] === 'EEXIST') return;
|
||||
// In some VERY rare cases (1 in a thousand), symlink creation fails on Windows.
|
||||
// In that case, we just fall back to copying.
|
||||
// This issue is reproducible with "pnpm add @material-ui/icons@4.9.1"
|
||||
await fs.copy(existingPath, newPath, {
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async function smartCopy(client: Client, from: string, to: string) {
|
||||
sema.acquire();
|
||||
try {
|
||||
client.output.debug(`Copying from ${from} to ${to}`);
|
||||
await linkOrCopy(from, to);
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
}
|
||||
|
||||
async function glob(pattern: string, options: GlobOptions): Promise<string[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
ogGlob(pattern, options, (err, files) => {
|
||||
err ? reject(err) : resolve(files);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes a hash for the given buf.
|
||||
*
|
||||
* @param {Buffer} file data
|
||||
* @return {String} hex digest
|
||||
*/
|
||||
function hash(buf: Buffer): string {
|
||||
return createHash('sha1').update(buf).digest('hex');
|
||||
}
|
||||
|
||||
interface NftFile {
|
||||
version: number;
|
||||
files: (string | { input: string; output: string })[];
|
||||
}
|
||||
|
||||
// resolveNftToOutput takes nft file and moves all of its trace files
|
||||
// into the specified directory + `inputs`, (renaming them to their hash + ext) and
|
||||
// subsequently updating the original nft file accordingly. This is done
|
||||
// to make the `.output` directory be self-contained, so that it works
|
||||
// properly with `vc --prebuilt`.
|
||||
async function resolveNftToOutput({
|
||||
client,
|
||||
baseDir,
|
||||
outputDir,
|
||||
nftFileName,
|
||||
distDir,
|
||||
nft,
|
||||
}: {
|
||||
client: Client;
|
||||
baseDir: string;
|
||||
outputDir: string;
|
||||
nftFileName: string;
|
||||
distDir: string;
|
||||
nft: NftFile;
|
||||
}) {
|
||||
client.output.debug(`Processing and resolving ${nftFileName}`);
|
||||
await fs.ensureDir(join(outputDir, 'inputs'));
|
||||
const newFilesList: NftFile['files'] = [];
|
||||
|
||||
// If `distDir` is a subdirectory, then the input has to be resolved to where the `.output` directory will be.
|
||||
const relNftFileName = relative(outputDir, nftFileName);
|
||||
const origNftFilename = join(distDir, relNftFileName);
|
||||
|
||||
if (relNftFileName.startsWith('cache/')) {
|
||||
// No need to process the `cache/` directory.
|
||||
// Paths in it might also not be relative to `cache` itself.
|
||||
return;
|
||||
}
|
||||
|
||||
for (let fileEntity of nft.files) {
|
||||
const relativeInput =
|
||||
typeof fileEntity === 'string' ? fileEntity : fileEntity.input;
|
||||
const fullInput = resolve(join(parse(origNftFilename).dir, relativeInput));
|
||||
|
||||
// if the resolved path is NOT in the .output directory we move in it there
|
||||
if (!fullInput.includes(distDir)) {
|
||||
const { ext } = parse(fullInput);
|
||||
const raw = await fs.readFile(fullInput);
|
||||
const newFilePath = join(outputDir, 'inputs', hash(raw) + ext);
|
||||
smartCopy(client, fullInput, newFilePath);
|
||||
|
||||
// We have to use `baseDir` instead of `cwd`, because we want to
|
||||
// mount everything from there (especially `node_modules`).
|
||||
// This is important for NPM Workspaces where `node_modules` is not
|
||||
// in the directory of the workspace.
|
||||
const output = relative(baseDir, fullInput).replace('.output', '.next');
|
||||
|
||||
newFilesList.push({
|
||||
input: relative(parse(nftFileName).dir, newFilePath),
|
||||
output,
|
||||
});
|
||||
} else {
|
||||
newFilesList.push(relativeInput);
|
||||
}
|
||||
}
|
||||
// Update the .nft.json with new input and output mapping
|
||||
await fs.writeJSON(nftFileName, {
|
||||
...nft,
|
||||
files: newFilesList,
|
||||
});
|
||||
}
|
||||
@@ -60,6 +60,7 @@ import { getCommandName } from '../../util/pkg-name';
|
||||
import { getPreferredPreviewURL } from '../../util/deploy/get-preferred-preview-url';
|
||||
import { Output } from '../../util/output';
|
||||
import { help } from './args';
|
||||
import { getDeploymentChecks } from '../../util/deploy/get-deployment-checks';
|
||||
|
||||
export default async (client: Client) => {
|
||||
const { output } = client;
|
||||
@@ -78,6 +79,7 @@ export default async (client: Client) => {
|
||||
// This is not an array in favor of matching
|
||||
// the config property name.
|
||||
'--regions': String,
|
||||
'--prebuilt': Boolean,
|
||||
'--prod': Boolean,
|
||||
'--confirm': Boolean,
|
||||
'-f': '--force',
|
||||
@@ -444,6 +446,7 @@ export default async (client: Client) => {
|
||||
build: { env: deploymentBuildEnv },
|
||||
forceNew: argv['--force'],
|
||||
withCache: argv['--with-cache'],
|
||||
prebuilt: argv['--prebuilt'],
|
||||
quiet,
|
||||
wantsPublic: argv['--public'] || localConfig.public,
|
||||
isFile,
|
||||
@@ -527,6 +530,20 @@ export default async (client: Client) => {
|
||||
return 1;
|
||||
}
|
||||
|
||||
if (deployment.checksConclusion === 'failed') {
|
||||
const { checks } = await getDeploymentChecks(client, deployment.id);
|
||||
const counters = new Map<string, number>();
|
||||
checks.forEach(c => {
|
||||
counters.set(c.conclusion, (counters.get(c.conclusion) ?? 0) + 1);
|
||||
});
|
||||
|
||||
const counterList = Array.from(counters)
|
||||
.map(([name, no]) => `${no} ${name}`)
|
||||
.join(', ');
|
||||
output.error(`Running Checks: ${counterList}`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
const deploymentResponse = await getDeploymentByIdOrHost(
|
||||
client,
|
||||
contextName,
|
||||
|
||||
@@ -6,7 +6,6 @@ import { ProjectEnvVariable } from '../../types';
|
||||
import Client from '../../util/client';
|
||||
import { getLinkedProject } from '../../util/projects/link';
|
||||
import { getFrameworks } from '../../util/get-frameworks';
|
||||
import { isSettingValue } from '../../util/is-setting-value';
|
||||
import { ProjectSettings } from '../../types';
|
||||
import getDecryptedEnvRecords from '../../util/get-decrypted-env-records';
|
||||
import setupAndLink from '../../util/link/setup-and-link';
|
||||
@@ -71,9 +70,9 @@ export default async function dev(
|
||||
frameworkSlug = framework.slug;
|
||||
}
|
||||
|
||||
const defaults = framework.settings.devCommand;
|
||||
if (isSettingValue(defaults)) {
|
||||
devCommand = defaults.value;
|
||||
const defaults = framework.settings.devCommand.value;
|
||||
if (defaults) {
|
||||
devCommand = defaults;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,6 +48,22 @@ const help = () => {
|
||||
};
|
||||
|
||||
export default async function main(client: Client) {
|
||||
if (process.env.__VERCEL_DEV_RUNNING) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} dev`
|
||||
)} must not recursively invoke itself. Check the Development Command in the Project Settings or the ${cmd(
|
||||
'dev'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
return 1;
|
||||
} else {
|
||||
process.env.__VERCEL_DEV_RUNNING = '1';
|
||||
}
|
||||
|
||||
let argv;
|
||||
let args;
|
||||
const { output } = client;
|
||||
@@ -90,22 +106,21 @@ export default async function main(client: Client) {
|
||||
if (pkg) {
|
||||
const { scripts } = pkg as PackageJson;
|
||||
|
||||
if (scripts && scripts.dev && /\bnow\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
`The ${cmd('dev')} script in ${cmd(
|
||||
'package.json'
|
||||
)} must not contain ${cmd('now dev')}`
|
||||
if (
|
||||
scripts &&
|
||||
scripts.dev &&
|
||||
/\b(now|vercel)\b\W+\bdev\b/.test(scripts.dev)
|
||||
) {
|
||||
client.output.error(
|
||||
`${cmd(
|
||||
`${getPkgName()} dev`
|
||||
)} must not recursively invoke itself. Check the Development Command in the Project Settings or the ${cmd(
|
||||
'dev'
|
||||
)} script in ${cmd('package.json')}`
|
||||
);
|
||||
output.error(`Learn More: http://err.sh/vercel/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
if (scripts && scripts.dev && /\bvercel\b\W+\bdev\b/.test(scripts.dev)) {
|
||||
output.error(
|
||||
`The ${cmd('dev')} script in ${cmd(
|
||||
'package.json'
|
||||
)} must not contain ${cmd('vercel dev')}`
|
||||
client.output.error(
|
||||
`Learn More: https://vercel.link/recursive-invocation-of-commands`
|
||||
);
|
||||
output.error(`Learn More: http://err.sh/vercel/now-dev-as-dev-script`);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ export default new Map([
|
||||
['alias', 'alias'],
|
||||
['aliases', 'alias'],
|
||||
['billing', 'billing'],
|
||||
['build', 'build'],
|
||||
['cc', 'billing'],
|
||||
['cert', 'certs'],
|
||||
['certs', 'certs'],
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
VERCEL_DIR,
|
||||
VERCEL_DIR_PROJECT,
|
||||
} from '../util/projects/link';
|
||||
import { writeProjectSettings } from '../util/projects/write-project-settings';
|
||||
import { writeProjectSettings } from '../util/projects/project-settings';
|
||||
import pull from './env/pull';
|
||||
|
||||
const help = () => {
|
||||
@@ -87,6 +87,8 @@ export default async function main(client: Client) {
|
||||
|
||||
const { project, org } = link;
|
||||
|
||||
client.config.currentTeam = org.type === 'team' ? org.id : undefined;
|
||||
|
||||
const result = await pull(
|
||||
client,
|
||||
project,
|
||||
|
||||
@@ -160,18 +160,26 @@ const main = async () => {
|
||||
// * a path to deploy (as in: `vercel path/`)
|
||||
// * a subcommand (as in: `vercel ls`)
|
||||
const targetOrSubcommand = argv._[2];
|
||||
const isBuildOrDev =
|
||||
targetOrSubcommand === 'build' || targetOrSubcommand === 'dev';
|
||||
|
||||
output.print(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${pkg.version}${
|
||||
targetOrSubcommand === 'dev' ? ' dev (beta)' : ''
|
||||
}${
|
||||
isCanary || targetOrSubcommand === 'dev'
|
||||
? ' — https://vercel.com/feedback'
|
||||
: ''
|
||||
}`
|
||||
)}\n`
|
||||
);
|
||||
if (isBuildOrDev) {
|
||||
console.log(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${
|
||||
pkg.version
|
||||
} ${targetOrSubcommand} (beta) — https://vercel.com/feedback`
|
||||
)}`
|
||||
);
|
||||
} else {
|
||||
output.print(
|
||||
`${chalk.grey(
|
||||
`${getTitleName()} CLI ${pkg.version}${
|
||||
isCanary ? ' — https://vercel.com/feedback' : ''
|
||||
}`
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
|
||||
// Handle `--version` directly
|
||||
if (!targetOrSubcommand && argv['--version']) {
|
||||
@@ -286,7 +294,14 @@ const main = async () => {
|
||||
|
||||
let authConfig = null;
|
||||
|
||||
const subcommandsWithoutToken = ['login', 'logout', 'help', 'init', 'update'];
|
||||
const subcommandsWithoutToken = [
|
||||
'login',
|
||||
'logout',
|
||||
'help',
|
||||
'init',
|
||||
'update',
|
||||
'build',
|
||||
];
|
||||
|
||||
if (authConfigExists) {
|
||||
try {
|
||||
@@ -393,20 +408,33 @@ const main = async () => {
|
||||
} else if (commands.has(singular)) {
|
||||
alternative = singular;
|
||||
}
|
||||
console.error(
|
||||
error(
|
||||
`The supplied argument ${param(targetOrSubcommand)} is ambiguous.` +
|
||||
`\nIf you wish to deploy the ${fileType} ${param(
|
||||
targetOrSubcommand
|
||||
)}, first run "cd ${targetOrSubcommand}". ` +
|
||||
if (targetOrSubcommand === 'build') {
|
||||
output.note(
|
||||
`If you wish to deploy the ${fileType} ${param(
|
||||
targetOrSubcommand
|
||||
)}, run ${getCommandName('deploy build')}.` +
|
||||
(alternative
|
||||
? `\nIf you wish to use the subcommand ${param(
|
||||
targetOrSubcommand
|
||||
)}, use ${param(alternative)} instead.`
|
||||
: '')
|
||||
)
|
||||
);
|
||||
return 1;
|
||||
);
|
||||
} else {
|
||||
console.error(
|
||||
error(
|
||||
`The supplied argument ${param(targetOrSubcommand)} is ambiguous.` +
|
||||
`\nIf you wish to deploy the ${fileType} ${param(
|
||||
targetOrSubcommand
|
||||
)}, first run "cd ${targetOrSubcommand}". ` +
|
||||
(alternative
|
||||
? `\nIf you wish to use the subcommand ${param(
|
||||
targetOrSubcommand
|
||||
)}, use ${param(alternative)} instead.`
|
||||
: '')
|
||||
)
|
||||
);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (subcommandExists) {
|
||||
@@ -601,6 +629,9 @@ const main = async () => {
|
||||
case 'billing':
|
||||
func = await import('./commands/billing');
|
||||
break;
|
||||
case 'build':
|
||||
func = await import('./commands/build');
|
||||
break;
|
||||
case 'certs':
|
||||
func = await import('./commands/certs');
|
||||
break;
|
||||
|
||||
@@ -18,12 +18,8 @@ export const isDirectory = (path: string): boolean => {
|
||||
const getGlobalPathConfig = (): string => {
|
||||
let customPath: string | undefined;
|
||||
|
||||
try {
|
||||
const argv = getArgs(process.argv.slice(2), {});
|
||||
customPath = argv['--global-config'];
|
||||
} catch (_error) {
|
||||
// args are optional so consume error
|
||||
}
|
||||
const argv = getArgs(process.argv.slice(2), {}, { permissive: true });
|
||||
customPath = argv['--global-config'];
|
||||
|
||||
const vercelDirectories = XDGAppPaths('com.vercel.cli').dataDirs();
|
||||
|
||||
|
||||
@@ -7,12 +7,8 @@ import getArgs from '../../util/get-args';
|
||||
export default function getLocalPathConfig(prefix: string) {
|
||||
let customPath: string | undefined;
|
||||
|
||||
try {
|
||||
const argv = getArgs(process.argv.slice(2), {});
|
||||
customPath = argv['--local-config'];
|
||||
} catch (_error) {
|
||||
// args are optional so consume error
|
||||
}
|
||||
const argv = getArgs(process.argv.slice(2), {}, { permissive: true });
|
||||
customPath = argv['--local-config'];
|
||||
|
||||
// If `--local-config` flag was specified, then that takes priority
|
||||
if (customPath) {
|
||||
|
||||
37
packages/cli/src/util/deploy/get-deployment-checks.ts
Normal file
37
packages/cli/src/util/deploy/get-deployment-checks.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import Client from '../client';
|
||||
|
||||
type CheckStatus = 'registered' | 'running' | 'completed';
|
||||
type CheckConclusion =
|
||||
| 'canceled'
|
||||
| 'failed'
|
||||
| 'neutral'
|
||||
| 'succeeded'
|
||||
| 'skipped'
|
||||
| 'stale';
|
||||
|
||||
export interface DeploymentCheck {
|
||||
id: string;
|
||||
status: CheckStatus;
|
||||
conclusion: CheckConclusion;
|
||||
name: string;
|
||||
startedAt: number;
|
||||
completedAt: number;
|
||||
createdAt: number;
|
||||
updatedAt: number;
|
||||
integrationId: string;
|
||||
rerequestable: boolean;
|
||||
}
|
||||
|
||||
export interface DeploymentChecksResponse {
|
||||
checks: DeploymentCheck[];
|
||||
}
|
||||
|
||||
export async function getDeploymentChecks(
|
||||
client: Client,
|
||||
deploymentId: string
|
||||
) {
|
||||
const checksResponse = await client.fetch<DeploymentChecksResponse>(
|
||||
`/v1/deployments/${encodeURIComponent(deploymentId)}/checks`
|
||||
);
|
||||
return checksResponse;
|
||||
}
|
||||
@@ -47,6 +47,7 @@ export default async function processDeployment({
|
||||
force?: boolean;
|
||||
withCache?: boolean;
|
||||
org: Org;
|
||||
prebuilt: boolean;
|
||||
projectName: string;
|
||||
isSettingUpProject: boolean;
|
||||
skipAutoDetectionConfirmation?: boolean;
|
||||
@@ -62,6 +63,7 @@ export default async function processDeployment({
|
||||
withCache,
|
||||
nowConfig,
|
||||
quiet,
|
||||
prebuilt,
|
||||
} = args;
|
||||
|
||||
const { debug } = output;
|
||||
@@ -83,6 +85,7 @@ export default async function processDeployment({
|
||||
path: paths[0],
|
||||
force,
|
||||
withCache,
|
||||
prebuilt,
|
||||
skipAutoDetectionConfirmation,
|
||||
};
|
||||
|
||||
@@ -179,10 +182,26 @@ export default async function processDeployment({
|
||||
return event.payload;
|
||||
}
|
||||
|
||||
if (event.type === 'ready') {
|
||||
// If `checksState` is present, we can only continue to "Completing" if the checks finished,
|
||||
// otherwise we might show "Completing" before "Running Checks".
|
||||
if (
|
||||
event.type === 'ready' &&
|
||||
(event.payload.checksState
|
||||
? event.payload.checksState === 'completed'
|
||||
: true)
|
||||
) {
|
||||
output.spinner('Completing', 0);
|
||||
}
|
||||
|
||||
if (event.type === 'checks-running') {
|
||||
output.spinner('Running Checks', 0);
|
||||
}
|
||||
|
||||
if (event.type === 'checks-conclusion-failed') {
|
||||
output.stopSpinner();
|
||||
return event.payload;
|
||||
}
|
||||
|
||||
// Handle error events
|
||||
if (event.type === 'error') {
|
||||
output.stopSpinner();
|
||||
|
||||
@@ -89,6 +89,7 @@ import {
|
||||
} from './types';
|
||||
import { ProjectEnvVariable, ProjectSettings } from '../../types';
|
||||
import exposeSystemEnvs from './expose-system-envs';
|
||||
import { loadCliPlugins } from '../plugins';
|
||||
|
||||
const frontendRuntimeSet = new Set(
|
||||
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
|
||||
@@ -967,7 +968,7 @@ export default class DevServer {
|
||||
socket.destroy();
|
||||
return;
|
||||
}
|
||||
const target = `http://localhost:${this.devProcessPort}`;
|
||||
const target = `http://127.0.0.1:${this.devProcessPort}`;
|
||||
this.output.debug(`Detected "upgrade" event, proxying to ${target}`);
|
||||
this.proxy.ws(req, socket, head, { target });
|
||||
});
|
||||
@@ -1349,6 +1350,30 @@ export default class DevServer {
|
||||
return false;
|
||||
};
|
||||
|
||||
runDevMiddleware = async (
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse
|
||||
) => {
|
||||
const { devMiddlewarePlugins } = await loadCliPlugins(
|
||||
this.cwd,
|
||||
this.output
|
||||
);
|
||||
try {
|
||||
for (let plugin of devMiddlewarePlugins) {
|
||||
const result = await plugin.plugin.runDevMiddleware(req, res, this.cwd);
|
||||
if (result.finished) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return { finished: false };
|
||||
} catch (e) {
|
||||
return {
|
||||
finished: true,
|
||||
error: e,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Serve project directory as a v2 deployment.
|
||||
*/
|
||||
@@ -1416,6 +1441,36 @@ export default class DevServer {
|
||||
let prevUrl = req.url;
|
||||
let prevHeaders: HttpHeadersConfig = {};
|
||||
|
||||
const middlewareResult = await this.runDevMiddleware(req, res);
|
||||
|
||||
if (middlewareResult) {
|
||||
if (middlewareResult.error) {
|
||||
this.sendError(
|
||||
req,
|
||||
res,
|
||||
requestId,
|
||||
'EDGE_FUNCTION_INVOCATION_FAILED',
|
||||
500
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (middlewareResult.finished) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (middlewareResult.pathname) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
origUrl.pathname = middlewareResult.pathname;
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
if (middlewareResult.query && prevUrl) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
delete origUrl.search;
|
||||
Object.assign(origUrl.query, middlewareResult.query);
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
}
|
||||
|
||||
for (const phase of phases) {
|
||||
statusCode = undefined;
|
||||
|
||||
@@ -1608,7 +1663,7 @@ export default class DevServer {
|
||||
if (!match) {
|
||||
// If the dev command is started, then proxy to it
|
||||
if (this.devProcessPort) {
|
||||
const upstream = `http://localhost:${this.devProcessPort}`;
|
||||
const upstream = `http://127.0.0.1:${this.devProcessPort}`;
|
||||
debug(`Proxying to frontend dev server: ${upstream}`);
|
||||
|
||||
// Add the Vercel platform proxy request headers
|
||||
@@ -1755,7 +1810,7 @@ export default class DevServer {
|
||||
return proxyPass(
|
||||
req,
|
||||
res,
|
||||
`http://localhost:${port}`,
|
||||
`http://127.0.0.1:${port}`,
|
||||
this,
|
||||
requestId,
|
||||
false
|
||||
@@ -1792,7 +1847,7 @@ export default class DevServer {
|
||||
return proxyPass(
|
||||
req,
|
||||
res,
|
||||
`http://localhost:${this.devProcessPort}`,
|
||||
`http://127.0.0.1:${this.devProcessPort}`,
|
||||
this,
|
||||
requestId,
|
||||
false
|
||||
@@ -2106,7 +2161,10 @@ export default class DevServer {
|
||||
process.stdout.write(data.replace(proxyPort, devPort));
|
||||
});
|
||||
|
||||
p.on('exit', () => {
|
||||
p.on('exit', (code: number) => {
|
||||
if (code > 0) {
|
||||
process.exit(code);
|
||||
}
|
||||
this.devProcessPort = undefined;
|
||||
});
|
||||
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { Stats } from 'fs';
|
||||
import { sep, dirname, join, resolve } from 'path';
|
||||
import { readJSON, lstat, readlink, readFile, realpath } from 'fs-extra';
|
||||
import { lstat, readlink, readFile, realpath } from 'fs-extra';
|
||||
import { isCanary } from './is-canary';
|
||||
import { getPkgName } from './pkg-name';
|
||||
|
||||
// `npm` tacks a bunch of extra properties on the `package.json` file,
|
||||
// so check for one of them to determine yarn vs. npm.
|
||||
async function isYarn(): Promise<boolean> {
|
||||
let s: Stats;
|
||||
let binPath = process.argv[1];
|
||||
@@ -20,8 +18,12 @@ async function isYarn(): Promise<boolean> {
|
||||
}
|
||||
}
|
||||
const pkgPath = join(dirname(binPath), '..', 'package.json');
|
||||
const pkg = await readJSON(pkgPath).catch(() => ({}));
|
||||
return !('_id' in pkg);
|
||||
/*
|
||||
* Generally, pkgPath looks like:
|
||||
* "/Users/username/.config/yarn/global/node_modules/vercel/package.json"
|
||||
* "/usr/local/share/.config/yarn/global/node_modules/vercel/package.json"
|
||||
*/
|
||||
return pkgPath.includes(join('yarn', 'global'));
|
||||
}
|
||||
|
||||
async function getConfigPrefix() {
|
||||
|
||||
@@ -36,6 +36,7 @@ export interface CreateOptions {
|
||||
name: string;
|
||||
project?: string;
|
||||
wantsPublic: boolean;
|
||||
prebuilt?: boolean;
|
||||
meta: Dictionary<string>;
|
||||
regions?: string[];
|
||||
quiet?: boolean;
|
||||
@@ -111,6 +112,7 @@ export default class Now extends EventEmitter {
|
||||
// Latest
|
||||
name,
|
||||
project,
|
||||
prebuilt = false,
|
||||
wantsPublic,
|
||||
meta,
|
||||
regions,
|
||||
@@ -165,6 +167,7 @@ export default class Now extends EventEmitter {
|
||||
isSettingUpProject,
|
||||
skipAutoDetectionConfirmation,
|
||||
cwd,
|
||||
prebuilt,
|
||||
});
|
||||
|
||||
if (deployment && deployment.warnings) {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import crypto from 'crypto';
|
||||
import ua from 'universal-analytics';
|
||||
import { platform, release, userInfo } from 'os';
|
||||
import { getPlatformEnv } from '@vercel/build-utils';
|
||||
|
||||
import userAgent from './ua-browser';
|
||||
@@ -16,10 +15,15 @@ export const shouldCollectMetrics =
|
||||
|
||||
export const metrics = (): ua.Visitor => {
|
||||
const token =
|
||||
typeof config.token === 'string' ? config.token : platform() + release();
|
||||
const salt = userInfo().username;
|
||||
typeof config.token === 'string'
|
||||
? config.token
|
||||
: process.platform + process.arch;
|
||||
const salt =
|
||||
(process.env.USER || '') +
|
||||
(process.env.LANG || '') +
|
||||
(process.env.SHELL || '');
|
||||
const hash = crypto
|
||||
.pbkdf2Sync(token, salt, 1000, 64, 'sha512')
|
||||
.pbkdf2Sync(token, salt, 100, 64, 'sha512')
|
||||
.toString('hex')
|
||||
.substring(0, 24);
|
||||
|
||||
|
||||
24
packages/cli/src/util/output/color-name-cache.ts
Normal file
24
packages/cli/src/util/output/color-name-cache.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import chalk from 'chalk';
|
||||
|
||||
const colors = [
|
||||
chalk.cyan,
|
||||
chalk.magenta,
|
||||
chalk.green,
|
||||
chalk.yellow,
|
||||
chalk.blue,
|
||||
];
|
||||
|
||||
let childIndex = 0;
|
||||
const packageNameColorCache = new Map<string, chalk.Chalk>();
|
||||
|
||||
/** Return a consistent (gradient) color for a given package name */
|
||||
export function getColorForPkgName(pkgName: string) {
|
||||
let color = packageNameColorCache.get(pkgName);
|
||||
|
||||
if (!color) {
|
||||
color = colors[childIndex++ % colors.length];
|
||||
packageNameColorCache.set(pkgName, color);
|
||||
}
|
||||
|
||||
return color;
|
||||
}
|
||||
@@ -125,10 +125,14 @@ export class Output {
|
||||
this.debug(`Spinner invoked (${message}) with a ${delay}ms delay`);
|
||||
return;
|
||||
}
|
||||
if (this._spinner) {
|
||||
this._spinner.text = message;
|
||||
if (this.isTTY) {
|
||||
if (this._spinner) {
|
||||
this._spinner.text = message;
|
||||
} else {
|
||||
this._spinner = wait(message, delay);
|
||||
}
|
||||
} else {
|
||||
this._spinner = wait(message, delay);
|
||||
this.print(`${message}\n`);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,11 +1,6 @@
|
||||
import { relative as nativeRelative } from 'path';
|
||||
|
||||
const isWin = process.platform === 'win32';
|
||||
import { normalizePath } from '@vercel/build-utils';
|
||||
|
||||
export function relative(a: string, b: string): string {
|
||||
let p = nativeRelative(a, b);
|
||||
if (isWin) {
|
||||
p = p.replace(/\\/g, '/');
|
||||
}
|
||||
return p;
|
||||
return normalizePath(nativeRelative(a, b));
|
||||
}
|
||||
|
||||
76
packages/cli/src/util/plugins.ts
Normal file
76
packages/cli/src/util/plugins.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import code from '../util/output/code';
|
||||
import { getColorForPkgName } from '../util/output/color-name-cache';
|
||||
import cliPkgJson from '../util/pkg';
|
||||
import { scanParentDirs } from '@vercel/build-utils';
|
||||
import { Output } from './output';
|
||||
|
||||
const VERCEL_PLUGIN_PREFIX = 'vercel-plugin-';
|
||||
|
||||
export async function loadCliPlugins(cwd: string, output: Output) {
|
||||
const { packageJson } = await scanParentDirs(cwd, true);
|
||||
|
||||
let pluginCount = 0;
|
||||
const preBuildPlugins = [];
|
||||
const buildPlugins = [];
|
||||
const devServerPlugins = [];
|
||||
const devMiddlewarePlugins = [];
|
||||
const deps = new Set(
|
||||
[
|
||||
...Object.keys(packageJson?.dependencies || {}),
|
||||
...Object.keys(packageJson?.devDependencies || {}),
|
||||
...Object.keys(cliPkgJson.dependencies),
|
||||
].filter(dep => dep.startsWith(VERCEL_PLUGIN_PREFIX))
|
||||
);
|
||||
|
||||
for (let dep of deps) {
|
||||
pluginCount++;
|
||||
const resolved = require.resolve(dep, {
|
||||
paths: [cwd, process.cwd(), __dirname],
|
||||
});
|
||||
let plugin;
|
||||
try {
|
||||
plugin = require(resolved);
|
||||
|
||||
const color = getColorForPkgName(dep);
|
||||
if (typeof plugin.preBuild === 'function') {
|
||||
preBuildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.build === 'function') {
|
||||
buildPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.startDevServer === 'function') {
|
||||
devServerPlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
if (typeof plugin.runDevMiddleware === 'function') {
|
||||
devMiddlewarePlugins.push({
|
||||
plugin,
|
||||
name: dep,
|
||||
color,
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
output.error(`Failed to import ${code(dep)}`);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
pluginCount,
|
||||
preBuildPlugins,
|
||||
buildPlugins,
|
||||
devServerPlugins,
|
||||
devMiddlewarePlugins,
|
||||
};
|
||||
}
|
||||
5
packages/cli/src/util/projects/find-framework.ts
Normal file
5
packages/cli/src/util/projects/find-framework.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import { Framework, frameworks } from '@vercel/frameworks';
|
||||
|
||||
export function findFramework(slug?: string | null) {
|
||||
return (frameworks as any as Framework[]).find(f => f.slug === slug);
|
||||
}
|
||||
@@ -23,6 +23,7 @@ const readFile = promisify(fs.readFile);
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
export const VERCEL_DIR = '.vercel';
|
||||
export const VERCEL_OUTPUT_DIR = '.output';
|
||||
export const VERCEL_DIR_FALLBACK = '.now';
|
||||
export const VERCEL_DIR_README = 'README.txt';
|
||||
export const VERCEL_DIR_PROJECT = 'project.json';
|
||||
@@ -67,12 +68,14 @@ async function getLink(path?: string): Promise<ProjectLink | null> {
|
||||
return getLinkFromDir(dir);
|
||||
}
|
||||
|
||||
async function getLinkFromDir(dir: string): Promise<ProjectLink | null> {
|
||||
export async function getLinkFromDir<T = ProjectLink>(
|
||||
dir: string
|
||||
): Promise<T | null> {
|
||||
try {
|
||||
const json = await readFile(join(dir, VERCEL_DIR_PROJECT), 'utf8');
|
||||
|
||||
const ajv = new AJV();
|
||||
const link: ProjectLink = JSON.parse(json);
|
||||
const link: T = JSON.parse(json);
|
||||
|
||||
if (!ajv.validate(linkSchema, link)) {
|
||||
throw new Error(
|
||||
@@ -241,16 +244,27 @@ export async function linkFolderToProject(
|
||||
try {
|
||||
const gitIgnorePath = join(path, '.gitignore');
|
||||
|
||||
const gitIgnore = await readFile(gitIgnorePath, 'utf8').catch(() => null);
|
||||
const EOL = gitIgnore && gitIgnore.includes('\r\n') ? '\r\n' : os.EOL;
|
||||
let gitIgnore =
|
||||
(await readFile(gitIgnorePath, 'utf8').catch(() => null)) ?? '';
|
||||
const EOL = gitIgnore.includes('\r\n') ? '\r\n' : os.EOL;
|
||||
let contentModified = false;
|
||||
|
||||
if (!gitIgnore || !gitIgnore.split(EOL).includes(VERCEL_DIR)) {
|
||||
await writeFile(
|
||||
gitIgnorePath,
|
||||
gitIgnore
|
||||
? `${gitIgnore}${EOL}${VERCEL_DIR}${EOL}`
|
||||
: `${VERCEL_DIR}${EOL}`
|
||||
);
|
||||
if (!gitIgnore.split(EOL).includes(VERCEL_DIR)) {
|
||||
gitIgnore += `${
|
||||
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
|
||||
}${VERCEL_DIR}${EOL}`;
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (!gitIgnore.split(EOL).includes(VERCEL_OUTPUT_DIR)) {
|
||||
gitIgnore += `${
|
||||
gitIgnore.endsWith(EOL) || gitIgnore.length === 0 ? '' : EOL
|
||||
}${VERCEL_OUTPUT_DIR}${EOL}`;
|
||||
contentModified = true;
|
||||
}
|
||||
|
||||
if (contentModified) {
|
||||
await writeFile(gitIgnorePath, gitIgnore);
|
||||
isGitIgnoreUpdated = true;
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -1,8 +1,20 @@
|
||||
import { writeFile } from 'fs-extra';
|
||||
import { Org, Project } from '../../types';
|
||||
import { VERCEL_DIR, VERCEL_DIR_PROJECT } from './link';
|
||||
import { Org, Project, ProjectLink } from '../../types';
|
||||
import { getLinkFromDir, VERCEL_DIR, VERCEL_DIR_PROJECT } from './link';
|
||||
import { join } from 'path';
|
||||
|
||||
export type ProjectLinkAndSettings = ProjectLink & {
|
||||
settings: {
|
||||
buildCommand: Project['buildCommand'];
|
||||
devCommand: Project['devCommand'];
|
||||
outputDirectory: Project['outputDirectory'];
|
||||
directoryListing: Project['directoryListing'];
|
||||
rootDirectory: Project['rootDirectory'];
|
||||
framework: Project['framework'];
|
||||
};
|
||||
};
|
||||
|
||||
// writeProjectSettings writes the project configuration to `vercel/project.json`
|
||||
// Write the project configuration to `.vercel/project.json`
|
||||
// that is needed for `vercel build` and `vercel dev` commands
|
||||
export async function writeProjectSettings(
|
||||
@@ -18,11 +30,15 @@ export async function writeProjectSettings(
|
||||
settings: {
|
||||
buildCommand: project.buildCommand,
|
||||
devCommand: project.devCommand,
|
||||
directoryListing: project.directoryListing,
|
||||
outputDirectory: project.outputDirectory,
|
||||
directoryListing: project.directoryListing,
|
||||
rootDirectory: project.rootDirectory,
|
||||
framework: project.framework,
|
||||
},
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
export async function readProjectSettings(cwd: string) {
|
||||
return await getLinkFromDir<ProjectLinkAndSettings>(cwd);
|
||||
}
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware-error/_middleware.js
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware-error/_middleware.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export default () => {
|
||||
throw new Error('asdf');
|
||||
};
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware-invalid-response/_middleware.js
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware-invalid-response/_middleware.js
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
export default function () {
|
||||
return 'freecandy';
|
||||
}
|
||||
5
packages/cli/test/fixtures/unit/edge-middleware-ts/_middleware.ts
vendored
Normal file
5
packages/cli/test/fixtures/unit/edge-middleware-ts/_middleware.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
import { response } from './response';
|
||||
|
||||
export default () => {
|
||||
return new Response(response);
|
||||
};
|
||||
1
packages/cli/test/fixtures/unit/edge-middleware-ts/response.ts
vendored
Normal file
1
packages/cli/test/fixtures/unit/edge-middleware-ts/response.ts
vendored
Normal file
@@ -0,0 +1 @@
|
||||
export const response = 'response';
|
||||
10
packages/cli/test/fixtures/unit/edge-middleware/_middleware.js
vendored
Normal file
10
packages/cli/test/fixtures/unit/edge-middleware/_middleware.js
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import response from './response.json';
|
||||
|
||||
export default function () {
|
||||
return new Response(JSON.stringify(response), {
|
||||
status: 200,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/edge-middleware/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/edge-middleware/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
not hello world
|
||||
3
packages/cli/test/fixtures/unit/edge-middleware/response.json
vendored
Normal file
3
packages/cli/test/fixtures/unit/edge-middleware/response.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"text": "hello world"
|
||||
}
|
||||
69
packages/cli/test/fixtures/unit/vercel-build-gatsby/.gitignore
vendored
Normal file
69
packages/cli/test/fixtures/unit/vercel-build-gatsby/.gitignore
vendored
Normal file
@@ -0,0 +1,69 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
npm-debug.log*
|
||||
yarn-debug.log*
|
||||
yarn-error.log*
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
*.pid.lock
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
|
||||
# nyc test coverage
|
||||
.nyc_output
|
||||
|
||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||
.grunt
|
||||
|
||||
# Bower dependency directory (https://bower.io/)
|
||||
bower_components
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
|
||||
# Dependency directories
|
||||
node_modules/
|
||||
jspm_packages/
|
||||
|
||||
# Typescript v1 declaration files
|
||||
typings/
|
||||
|
||||
# Optional npm cache directory
|
||||
.npm
|
||||
|
||||
# Optional eslint cache
|
||||
.eslintcache
|
||||
|
||||
# Optional REPL history
|
||||
.node_repl_history
|
||||
|
||||
# Output of 'npm pack'
|
||||
*.tgz
|
||||
|
||||
# dotenv environment variable files
|
||||
.env*
|
||||
|
||||
# gatsby files
|
||||
.cache/
|
||||
public
|
||||
|
||||
# Mac files
|
||||
.DS_Store
|
||||
|
||||
# Yarn
|
||||
yarn-error.log
|
||||
.pnp/
|
||||
.pnp.js
|
||||
# Yarn Integrity file
|
||||
.yarn-integrity
|
||||
4
packages/cli/test/fixtures/unit/vercel-build-gatsby/.prettierignore
vendored
Normal file
4
packages/cli/test/fixtures/unit/vercel-build-gatsby/.prettierignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
.cache
|
||||
package.json
|
||||
package-lock.json
|
||||
public
|
||||
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/.prettierrc
vendored
Normal file
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/.prettierrc
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"endOfLine": "lf",
|
||||
"semi": false,
|
||||
"singleQuote": false,
|
||||
"tabWidth": 2,
|
||||
"trailingComma": "es5"
|
||||
}
|
||||
22
packages/cli/test/fixtures/unit/vercel-build-gatsby/LICENSE
vendored
Normal file
22
packages/cli/test/fixtures/unit/vercel-build-gatsby/LICENSE
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 gatsbyjs
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
97
packages/cli/test/fixtures/unit/vercel-build-gatsby/README.md
vendored
Normal file
97
packages/cli/test/fixtures/unit/vercel-build-gatsby/README.md
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
<!-- AUTO-GENERATED-CONTENT:START (STARTER) -->
|
||||
<p align="center">
|
||||
<a href="https://www.gatsbyjs.org">
|
||||
<img alt="Gatsby" src="https://www.gatsbyjs.org/monogram.svg" width="60" />
|
||||
</a>
|
||||
</p>
|
||||
<h1 align="center">
|
||||
Gatsby's default starter
|
||||
</h1>
|
||||
|
||||
Kick off your project with this default boilerplate. This starter ships with the main Gatsby configuration files you might need to get up and running blazing fast with the blazing fast app generator for React.
|
||||
|
||||
_Have another more specific idea? You may want to check out our vibrant collection of [official and community-created starters](https://www.gatsbyjs.org/docs/gatsby-starters/)._
|
||||
|
||||
## 🚀 Quick start
|
||||
|
||||
1. **Create a Gatsby site.**
|
||||
|
||||
Use the Gatsby CLI to create a new site, specifying the default starter.
|
||||
|
||||
```shell
|
||||
# create a new Gatsby site using the default starter
|
||||
gatsby new my-default-starter https://github.com/gatsbyjs/gatsby-starter-default
|
||||
```
|
||||
|
||||
1. **Start developing.**
|
||||
|
||||
Navigate into your new site’s directory and start it up.
|
||||
|
||||
```shell
|
||||
cd my-default-starter/
|
||||
gatsby develop
|
||||
```
|
||||
|
||||
1. **Open the source code and start editing!**
|
||||
|
||||
Your site is now running at `http://localhost:8000`!
|
||||
|
||||
_Note: You'll also see a second link: _`http://localhost:8000/___graphql`_. This is a tool you can use to experiment with querying your data. Learn more about using this tool in the [Gatsby tutorial](https://www.gatsbyjs.org/tutorial/part-five/#introducing-graphiql)._
|
||||
|
||||
Open the `my-default-starter` directory in your code editor of choice and edit `src/pages/index.js`. Save your changes and the browser will update in real time!
|
||||
|
||||
## 🧐 What's inside?
|
||||
|
||||
A quick look at the top-level files and directories you'll see in a Gatsby project.
|
||||
|
||||
.
|
||||
├── node_modules
|
||||
├── src
|
||||
├── .gitignore
|
||||
├── .prettierrc
|
||||
├── gatsby-browser.js
|
||||
├── gatsby-config.js
|
||||
├── gatsby-node.js
|
||||
├── gatsby-ssr.js
|
||||
├── LICENSE
|
||||
├── package-lock.json
|
||||
├── package.json
|
||||
└── README.md
|
||||
|
||||
1. **`/node_modules`**: This directory contains all of the modules of code that your project depends on (npm packages) are automatically installed.
|
||||
|
||||
2. **`/src`**: This directory will contain all of the code related to what you will see on the front-end of your site (what you see in the browser) such as your site header or a page template. `src` is a convention for “source code”.
|
||||
|
||||
3. **`.gitignore`**: This file tells git which files it should not track / not maintain a version history for.
|
||||
|
||||
4. **`.prettierrc`**: This is a configuration file for [Prettier](https://prettier.io/). Prettier is a tool to help keep the formatting of your code consistent.
|
||||
|
||||
5. **`gatsby-browser.js`**: This file is where Gatsby expects to find any usage of the [Gatsby browser APIs](https://www.gatsbyjs.org/docs/browser-apis/) (if any). These allow customization/extension of default Gatsby settings affecting the browser.
|
||||
|
||||
6. **`gatsby-config.js`**: This is the main configuration file for a Gatsby site. This is where you can specify information about your site (metadata) like the site title and description, which Gatsby plugins you’d like to include, etc. (Check out the [config docs](https://www.gatsbyjs.org/docs/gatsby-config/) for more detail).
|
||||
|
||||
7. **`gatsby-node.js`**: This file is where Gatsby expects to find any usage of the [Gatsby Node APIs](https://www.gatsbyjs.org/docs/node-apis/) (if any). These allow customization/extension of default Gatsby settings affecting pieces of the site build process.
|
||||
|
||||
8. **`gatsby-ssr.js`**: This file is where Gatsby expects to find any usage of the [Gatsby server-side rendering APIs](https://www.gatsbyjs.org/docs/ssr-apis/) (if any). These allow customization of default Gatsby settings affecting server-side rendering.
|
||||
|
||||
9. **`LICENSE`**: Gatsby is licensed under the MIT license.
|
||||
|
||||
10. **`package-lock.json`** (See `package.json` below, first). This is an automatically generated file based on the exact versions of your npm dependencies that were installed for your project. **(You won’t change this file directly).**
|
||||
|
||||
11. **`package.json`**: A manifest file for Node.js projects, which includes things like metadata (the project’s name, author, etc). This manifest is how npm knows which packages to install for your project.
|
||||
|
||||
12. **`README.md`**: A text file containing useful reference information about your project.
|
||||
|
||||
## 🎓 Learning Gatsby
|
||||
|
||||
Looking for more guidance? Full documentation for Gatsby lives [on the website](https://www.gatsbyjs.org/). Here are some places to start:
|
||||
|
||||
- **For most developers, we recommend starting with our [in-depth tutorial for creating a site with Gatsby](https://www.gatsbyjs.org/tutorial/).** It starts with zero assumptions about your level of ability and walks through every step of the process.
|
||||
|
||||
- **To dive straight into code samples, head [to our documentation](https://www.gatsbyjs.org/docs/).** In particular, check out the _Guides_, _API Reference_, and _Advanced Tutorials_ sections in the sidebar.
|
||||
|
||||
## 💫 Deploy
|
||||
|
||||
[](https://app.netlify.com/start/deploy?repository=https://github.com/gatsbyjs/gatsby-starter-default)
|
||||
|
||||
<!-- AUTO-GENERATED-CONTENT:END -->
|
||||
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-browser.js
vendored
Normal file
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-browser.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Implement Gatsby's Browser APIs in this file.
|
||||
*
|
||||
* See: https://www.gatsbyjs.org/docs/browser-apis/
|
||||
*/
|
||||
|
||||
// You can delete this file if you're not using it
|
||||
34
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-config.js
vendored
Normal file
34
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-config.js
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
module.exports = {
|
||||
siteMetadata: {
|
||||
title: `Gatsby Default Starter`,
|
||||
description: `Kick off your next, great Gatsby project with this default starter. This barebones starter ships with the main Gatsby configuration files you might need.`,
|
||||
author: `@gatsbyjs`,
|
||||
},
|
||||
plugins: [
|
||||
`gatsby-plugin-react-helmet`,
|
||||
{
|
||||
resolve: `gatsby-source-filesystem`,
|
||||
options: {
|
||||
name: `images`,
|
||||
path: `${__dirname}/src/images`,
|
||||
},
|
||||
},
|
||||
`gatsby-transformer-sharp`,
|
||||
`gatsby-plugin-sharp`,
|
||||
{
|
||||
resolve: `gatsby-plugin-manifest`,
|
||||
options: {
|
||||
name: `05-zero-config-gatsby`,
|
||||
short_name: `starter`,
|
||||
start_url: `/`,
|
||||
background_color: `#663399`,
|
||||
theme_color: `#663399`,
|
||||
display: `minimal-ui`,
|
||||
icon: `src/images/gatsby-icon.png`, // This path is relative to the root of the site.
|
||||
},
|
||||
},
|
||||
// this (optional) plugin enables Progressive Web App + Offline functionality
|
||||
// To learn more, visit: https://gatsby.dev/offline
|
||||
// `gatsby-plugin-offline`,
|
||||
],
|
||||
}
|
||||
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-node.js
vendored
Normal file
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-node.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Implement Gatsby's Node APIs in this file.
|
||||
*
|
||||
* See: https://www.gatsbyjs.org/docs/node-apis/
|
||||
*/
|
||||
|
||||
// You can delete this file if you're not using it
|
||||
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-ssr.js
vendored
Normal file
7
packages/cli/test/fixtures/unit/vercel-build-gatsby/gatsby-ssr.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Implement Gatsby's SSR (Server Side Rendering) APIs in this file.
|
||||
*
|
||||
* See: https://www.gatsbyjs.org/docs/ssr-apis/
|
||||
*/
|
||||
|
||||
// You can delete this file if you're not using it
|
||||
43
packages/cli/test/fixtures/unit/vercel-build-gatsby/package.json
vendored
Normal file
43
packages/cli/test/fixtures/unit/vercel-build-gatsby/package.json
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
{
|
||||
"name": "vercel-build-gatsby",
|
||||
"private": true,
|
||||
"description": "A simple starter to get up and developing quickly with Gatsby",
|
||||
"version": "0.1.0",
|
||||
"author": "Kyle Mathews <mathews.kyle@gmail.com>",
|
||||
"dependencies": {
|
||||
"gatsby": "^2.17.6",
|
||||
"gatsby-image": "^2.2.30",
|
||||
"gatsby-plugin-manifest": "^2.2.25",
|
||||
"gatsby-plugin-offline": "^3.0.17",
|
||||
"gatsby-plugin-react-helmet": "^3.1.13",
|
||||
"gatsby-plugin-sharp": "^2.2.34",
|
||||
"gatsby-source-filesystem": "^2.1.35",
|
||||
"gatsby-transformer-sharp": "^2.3.1",
|
||||
"prop-types": "^15.7.2",
|
||||
"react": "^16.11.0",
|
||||
"react-dom": "^16.11.0",
|
||||
"react-helmet": "^5.2.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "^1.18.2"
|
||||
},
|
||||
"keywords": [
|
||||
"gatsby"
|
||||
],
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "gatsby build",
|
||||
"develop": "gatsby develop",
|
||||
"format": "prettier --write \"**/*.{js,jsx,json,md}\"",
|
||||
"start": "npm run develop",
|
||||
"serve": "gatsby serve",
|
||||
"test": "echo \"Write tests! -> https://gatsby.dev/unit-testing\" && exit 1"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/gatsbyjs/gatsby-starter-default"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/gatsbyjs/gatsby/issues"
|
||||
}
|
||||
}
|
||||
42
packages/cli/test/fixtures/unit/vercel-build-gatsby/src/components/header.js
vendored
Normal file
42
packages/cli/test/fixtures/unit/vercel-build-gatsby/src/components/header.js
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
import { Link } from "gatsby"
|
||||
import PropTypes from "prop-types"
|
||||
import React from "react"
|
||||
|
||||
const Header = ({ siteTitle }) => (
|
||||
<header
|
||||
style={{
|
||||
background: `rebeccapurple`,
|
||||
marginBottom: `1.45rem`,
|
||||
}}
|
||||
>
|
||||
<div
|
||||
style={{
|
||||
margin: `0 auto`,
|
||||
maxWidth: 960,
|
||||
padding: `1.45rem 1.0875rem`,
|
||||
}}
|
||||
>
|
||||
<h1 style={{ margin: 0 }}>
|
||||
<Link
|
||||
to="/"
|
||||
style={{
|
||||
color: `white`,
|
||||
textDecoration: `none`,
|
||||
}}
|
||||
>
|
||||
{siteTitle}
|
||||
</Link>
|
||||
</h1>
|
||||
</div>
|
||||
</header>
|
||||
)
|
||||
|
||||
Header.propTypes = {
|
||||
siteTitle: PropTypes.string,
|
||||
}
|
||||
|
||||
Header.defaultProps = {
|
||||
siteTitle: ``,
|
||||
}
|
||||
|
||||
export default Header
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user