mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-11 12:57:46 +00:00
Compare commits
60 Commits
@vercel/py
...
@vercel/py
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
eed39913e1 | ||
|
|
03e9047bc9 | ||
|
|
0e35205bf1 | ||
|
|
e42fe34c4a | ||
|
|
3ece7ac969 | ||
|
|
4f832acf90 | ||
|
|
918726e01d | ||
|
|
dc2ddf867b | ||
|
|
ee1211416f | ||
|
|
570fd24e29 | ||
|
|
40681ad0f4 | ||
|
|
f20703b15d | ||
|
|
68eb197112 | ||
|
|
b8b87b96da | ||
|
|
967c24f1bb | ||
|
|
609f781234 | ||
|
|
998f6bf6e6 | ||
|
|
7511c2ef85 | ||
|
|
71425fac1f | ||
|
|
6973cd5989 | ||
|
|
24785ff50a | ||
|
|
aa3ad4478c | ||
|
|
f0d73049ca | ||
|
|
6cef07354a | ||
|
|
50af9f5b75 | ||
|
|
af76b134d8 | ||
|
|
c7640005fd | ||
|
|
3deed977ba | ||
|
|
b38c360e36 | ||
|
|
1595e48414 | ||
|
|
e6b0ee3e3c | ||
|
|
a247e31688 | ||
|
|
dc02e763a4 | ||
|
|
8567fc0de6 | ||
|
|
4f8f3d373f | ||
|
|
debb85b690 | ||
|
|
bfef989ada | ||
|
|
4e0b6c5eaf | ||
|
|
0ace69ef75 | ||
|
|
b7b7923f92 | ||
|
|
8167233c56 | ||
|
|
32ee6aba92 | ||
|
|
b48f7a7e6e | ||
|
|
a961c9b992 | ||
|
|
cf7c50d691 | ||
|
|
f4be388a1f | ||
|
|
c1bc53dea8 | ||
|
|
6855e3df54 | ||
|
|
0d39dbd1d9 | ||
|
|
509c85182a | ||
|
|
ae801e563d | ||
|
|
0e8278f490 | ||
|
|
0d302a6f48 | ||
|
|
4e4f5f28a2 | ||
|
|
5205a4ec4b | ||
|
|
2c15e496ed | ||
|
|
1f0ca46626 | ||
|
|
17cb5f1bc6 | ||
|
|
b095031292 | ||
|
|
f50bcbc0ba |
4
.gitattributes
vendored
4
.gitattributes
vendored
@@ -8,3 +8,7 @@ packages/*/test/* linguist-vendored
|
||||
# Go build fails with Windows line endings.
|
||||
*.go text eol=lf
|
||||
go.mod text eol=lf
|
||||
|
||||
# Mark certain files as "binary" -- hide diffs
|
||||
**/test/fixtures/**/git/**/* binary
|
||||
**/test/fixtures/**/git/**/* linguist-generated
|
||||
|
||||
4
.prettierignore
Normal file
4
.prettierignore
Normal file
@@ -0,0 +1,4 @@
|
||||
# https://prettier.io/docs/en/ignore.html
|
||||
|
||||
# ignore this file with an intentional syntax error
|
||||
packages/cli/test/dev/fixtures/edge-function-error/api/edge-error-syntax.js
|
||||
@@ -1,18 +1 @@
|
||||
*
|
||||
|
||||
# general
|
||||
!utils/
|
||||
!utils/run.js
|
||||
!.yarnrc
|
||||
!yarn.lock
|
||||
!package.json
|
||||
!turbo.json
|
||||
|
||||
# api
|
||||
!api/
|
||||
!api/**
|
||||
|
||||
# packages
|
||||
!packages/
|
||||
!packages/frameworks
|
||||
!packages/frameworks/**
|
||||
packages/*/test/**
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import fs from 'fs/promises';
|
||||
import { join } from 'path';
|
||||
import { join, dirname } from 'path';
|
||||
import execa from 'execa';
|
||||
import { getExampleList } from '../examples/example-list';
|
||||
import { mapOldToNew } from '../examples/map-old-to-new';
|
||||
|
||||
@@ -40,7 +41,32 @@ async function main() {
|
||||
JSON.stringify([...existingExamples, ...oldExamples])
|
||||
);
|
||||
|
||||
const { stdout: sha } = await execa('git', ['rev-parse', '--short', 'HEAD'], {
|
||||
cwd: repoRoot,
|
||||
});
|
||||
|
||||
const tarballsDir = join(pubDir, 'tarballs');
|
||||
const packagesDir = join(repoRoot, 'packages');
|
||||
const packages = await fs.readdir(packagesDir);
|
||||
for (const pkg of packages) {
|
||||
const fullDir = join(packagesDir, pkg);
|
||||
const packageJsonRaw = await fs.readFile(
|
||||
join(fullDir, 'package.json'),
|
||||
'utf-8'
|
||||
);
|
||||
const packageJson = JSON.parse(packageJsonRaw);
|
||||
const tarballName = `${packageJson.name
|
||||
.replace('@', '')
|
||||
.replace('/', '-')}-v${packageJson.version}-${sha.trim()}.tgz`;
|
||||
const destTarballPath = join(tarballsDir, `${packageJson.name}.tgz`);
|
||||
await fs.mkdir(dirname(destTarballPath), { recursive: true });
|
||||
await fs.copyFile(join(fullDir, tarballName), destTarballPath);
|
||||
}
|
||||
|
||||
console.log('Completed building static frontend.');
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
main().catch(err => {
|
||||
console.log('error running build:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
2
examples/create-react-app/.env.production
Normal file
2
examples/create-react-app/.env.production
Normal file
@@ -0,0 +1,2 @@
|
||||
# `REACT_APP` prefix is required to expose to client-side
|
||||
REACT_APP_VERCEL_ANALYTICS_ID=$VERCEL_ANALYTICS_ID
|
||||
@@ -1,13 +1,13 @@
|
||||
{
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@testing-library/jest-dom": "^5.16.1",
|
||||
"@testing-library/react": "^12.1.2",
|
||||
"@testing-library/user-event": "^13.5.0",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"react-scripts": "5.0.0",
|
||||
"web-vitals": "^2.1.3"
|
||||
"@testing-library/jest-dom": "^5.16.4",
|
||||
"@testing-library/react": "^13.3.0",
|
||||
"@testing-library/user-event": "^14.2.0",
|
||||
"react": "^18.1.0",
|
||||
"react-dom": "^18.1.0",
|
||||
"react-scripts": "5.0.1",
|
||||
"web-vitals": "^2.1.4"
|
||||
},
|
||||
"scripts": {
|
||||
"start": "react-scripts start",
|
||||
|
||||
@@ -3,6 +3,7 @@ import ReactDOM from 'react-dom';
|
||||
import './index.css';
|
||||
import App from './App';
|
||||
import reportWebVitals from './reportWebVitals';
|
||||
import { sendToVercelAnalytics } from './vitals';
|
||||
|
||||
ReactDOM.render(
|
||||
<React.StrictMode>
|
||||
@@ -11,7 +12,4 @@ ReactDOM.render(
|
||||
document.getElementById('root')
|
||||
);
|
||||
|
||||
// If you want to start measuring performance in your app, pass a function
|
||||
// to log results (for example: reportWebVitals(console.log))
|
||||
// or send to an analytics endpoint. Learn more: https://bit.ly/CRA-vitals
|
||||
reportWebVitals();
|
||||
reportWebVitals(sendToVercelAnalytics);
|
||||
|
||||
41
examples/create-react-app/src/vitals.js
Normal file
41
examples/create-react-app/src/vitals.js
Normal file
@@ -0,0 +1,41 @@
|
||||
const vitalsUrl = 'https://vitals.vercel-analytics.com/v1/vitals';
|
||||
|
||||
function getConnectionSpeed() {
|
||||
return 'connection' in navigator &&
|
||||
navigator['connection'] &&
|
||||
'effectiveType' in navigator['connection']
|
||||
? navigator['connection']['effectiveType']
|
||||
: '';
|
||||
}
|
||||
|
||||
export function sendToVercelAnalytics(metric) {
|
||||
const analyticsId = process.env.REACT_APP_VERCEL_ANALYTICS_ID;
|
||||
if (!analyticsId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const body = {
|
||||
dsn: analyticsId,
|
||||
id: metric.id,
|
||||
page: window.location.pathname,
|
||||
href: window.location.href,
|
||||
event_name: metric.name,
|
||||
value: metric.value.toString(),
|
||||
speed: getConnectionSpeed(),
|
||||
};
|
||||
|
||||
console.log({ body });
|
||||
const blob = new Blob([new URLSearchParams(body).toString()], {
|
||||
// This content type is necessary for `sendBeacon`
|
||||
type: 'application/x-www-form-urlencoded',
|
||||
});
|
||||
if (navigator.sendBeacon) {
|
||||
navigator.sendBeacon(vitalsUrl, blob);
|
||||
} else
|
||||
fetch(vitalsUrl, {
|
||||
body: blob,
|
||||
method: 'POST',
|
||||
credentials: 'omit',
|
||||
keepalive: true,
|
||||
});
|
||||
}
|
||||
@@ -1484,10 +1484,10 @@
|
||||
"@svgr/plugin-svgo" "^5.5.0"
|
||||
loader-utils "^2.0.0"
|
||||
|
||||
"@testing-library/dom@^8.0.0":
|
||||
version "8.11.2"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.11.2.tgz#fc110c665a066c2287be765e4a35ba8dad737015"
|
||||
integrity sha512-idsS/cqbYudXcVWngc1PuWNmXs416oBy2g/7Q8QAUREt5Z3MUkAL2XJD7xazLJ6esDfqRDi/ZBxk+OPPXitHRw==
|
||||
"@testing-library/dom@^8.5.0":
|
||||
version "8.13.0"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/dom/-/dom-8.13.0.tgz#bc00bdd64c7d8b40841e27a70211399ad3af46f5"
|
||||
integrity sha512-9VHgfIatKNXQNaZTtLnalIy0jNZzY35a4S3oi08YAt9Hv1VsfZ/DfA45lM8D/UhtHBGJ4/lGwp0PZkVndRkoOQ==
|
||||
dependencies:
|
||||
"@babel/code-frame" "^7.10.4"
|
||||
"@babel/runtime" "^7.12.5"
|
||||
@@ -1498,10 +1498,10 @@
|
||||
lz-string "^1.4.4"
|
||||
pretty-format "^27.0.2"
|
||||
|
||||
"@testing-library/jest-dom@^5.16.1":
|
||||
version "5.16.1"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.16.1.tgz#3db7df5ae97596264a7da9696fe14695ba02e51f"
|
||||
integrity sha512-ajUJdfDIuTCadB79ukO+0l8O+QwN0LiSxDaYUTI4LndbbUsGi6rWU1SCexXzBA2NSjlVB9/vbkasQIL3tmPBjw==
|
||||
"@testing-library/jest-dom@^5.16.4":
|
||||
version "5.16.4"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/jest-dom/-/jest-dom-5.16.4.tgz#938302d7b8b483963a3ae821f1c0808f872245cd"
|
||||
integrity sha512-Gy+IoFutbMQcky0k+bqqumXZ1cTGswLsFqmNLzNdSKkU9KGV2u9oXhukCbbJ9/LRPKiqwxEE8VpV/+YZlfkPUA==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.9.2"
|
||||
"@types/testing-library__jest-dom" "^5.9.1"
|
||||
@@ -1513,20 +1513,19 @@
|
||||
lodash "^4.17.15"
|
||||
redent "^3.0.0"
|
||||
|
||||
"@testing-library/react@^12.1.2":
|
||||
version "12.1.2"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-12.1.2.tgz#f1bc9a45943461fa2a598bb4597df1ae044cfc76"
|
||||
integrity sha512-ihQiEOklNyHIpo2Y8FREkyD1QAea054U0MVbwH1m8N9TxeFz+KoJ9LkqoKqJlzx2JDm56DVwaJ1r36JYxZM05g==
|
||||
"@testing-library/react@^13.3.0":
|
||||
version "13.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/react/-/react-13.3.0.tgz#bf298bfbc5589326bbcc8052b211f3bb097a97c5"
|
||||
integrity sha512-DB79aA426+deFgGSjnf5grczDPiL4taK3hFaa+M5q7q20Kcve9eQottOG5kZ74KEr55v0tU2CQormSSDK87zYQ==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.12.5"
|
||||
"@testing-library/dom" "^8.0.0"
|
||||
"@testing-library/dom" "^8.5.0"
|
||||
"@types/react-dom" "^18.0.0"
|
||||
|
||||
"@testing-library/user-event@^13.5.0":
|
||||
version "13.5.0"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-13.5.0.tgz#69d77007f1e124d55314a2b73fd204b333b13295"
|
||||
integrity sha512-5Kwtbo3Y/NowpkbRuSepbyMFkZmHgD+vPzYB/RJ4oxt5Gj/avFFBYjhw27cqSVPVw/3a67NK1PbiIr9k4Gwmdg==
|
||||
dependencies:
|
||||
"@babel/runtime" "^7.12.5"
|
||||
"@testing-library/user-event@^14.2.0":
|
||||
version "14.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@testing-library/user-event/-/user-event-14.2.0.tgz#8293560f8f80a00383d6c755ec3e0b918acb1683"
|
||||
integrity sha512-+hIlG4nJS6ivZrKnOP7OGsDu9Fxmryj9vCl8x0ZINtTJcCHs2zLsYif5GzuRiBF2ck5GZG2aQr7Msg+EHlnYVQ==
|
||||
|
||||
"@tootallnate/once@1":
|
||||
version "1.1.2"
|
||||
@@ -1735,6 +1734,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.4.3.tgz#a3c65525b91fca7da00ab1a3ac2b5a2a4afbffbf"
|
||||
integrity sha512-QzSuZMBuG5u8HqYz01qtMdg/Jfctlnvj1z/lYnIDXs/golxw0fxtRAHd9KrzjR7Yxz1qVeI00o0kiO3PmVdJ9w==
|
||||
|
||||
"@types/prop-types@*":
|
||||
version "15.7.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/prop-types/-/prop-types-15.7.5.tgz#5f19d2b85a98e9558036f6a3cacc8819420f05cf"
|
||||
integrity sha512-JCB8C6SnDoQf0cNycqd/35A7MjcnK+ZTqE7judS6o7utxUCg6imJg3QK2qzHKszlTjcj2cn+NwMB2i96ubpj7w==
|
||||
|
||||
"@types/q@^1.5.1":
|
||||
version "1.5.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/q/-/q-1.5.5.tgz#75a2a8e7d8ab4b230414505d92335d1dcb53a6df"
|
||||
@@ -1750,6 +1754,22 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/range-parser/-/range-parser-1.2.4.tgz#cd667bcfdd025213aafb7ca5915a932590acdcdc"
|
||||
integrity sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==
|
||||
|
||||
"@types/react-dom@^18.0.0":
|
||||
version "18.0.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/react-dom/-/react-dom-18.0.5.tgz#330b2d472c22f796e5531446939eacef8378444a"
|
||||
integrity sha512-OWPWTUrY/NIrjsAPkAk1wW9LZeIjSvkXRhclsFO8CZcZGCOg2G0YZy4ft+rOyYxy8B7ui5iZzi9OkDebZ7/QSA==
|
||||
dependencies:
|
||||
"@types/react" "*"
|
||||
|
||||
"@types/react@*":
|
||||
version "18.0.9"
|
||||
resolved "https://registry.yarnpkg.com/@types/react/-/react-18.0.9.tgz#d6712a38bd6cd83469603e7359511126f122e878"
|
||||
integrity sha512-9bjbg1hJHUm4De19L1cHiW0Jvx3geel6Qczhjd0qY5VKVE2X5+x77YxAepuCwVh4vrgZJdgEJw48zrhRIeF4Nw==
|
||||
dependencies:
|
||||
"@types/prop-types" "*"
|
||||
"@types/scheduler" "*"
|
||||
csstype "^3.0.2"
|
||||
|
||||
"@types/resolve@1.17.1":
|
||||
version "1.17.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.17.1.tgz#3afd6ad8967c77e4376c598a82ddd58f46ec45d6"
|
||||
@@ -1762,6 +1782,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/retry/-/retry-0.12.1.tgz#d8f1c0d0dc23afad6dc16a9e993a0865774b4065"
|
||||
integrity sha512-xoDlM2S4ortawSWORYqsdU+2rxdh4LRW9ytc3zmT37RIKQh6IHyKwwtKhKis9ah8ol07DCkZxPt8BBvPjC6v4g==
|
||||
|
||||
"@types/scheduler@*":
|
||||
version "0.16.2"
|
||||
resolved "https://registry.yarnpkg.com/@types/scheduler/-/scheduler-0.16.2.tgz#1a62f89525723dde24ba1b01b092bf5df8ad4d39"
|
||||
integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew==
|
||||
|
||||
"@types/serve-index@^1.9.1":
|
||||
version "1.9.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/serve-index/-/serve-index-1.9.1.tgz#1b5e85370a192c01ec6cec4735cf2917337a6278"
|
||||
@@ -3175,6 +3200,11 @@ cssstyle@^2.3.0:
|
||||
dependencies:
|
||||
cssom "~0.3.6"
|
||||
|
||||
csstype@^3.0.2:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/csstype/-/csstype-3.1.0.tgz#4ddcac3718d787cf9df0d1b7d15033925c8f29f2"
|
||||
integrity sha512-uX1KG+x9h5hIJsaKR9xHUeUraxf8IODOwq9JLNPq6BwB04a/xgpq3rcx47l5BZu5zBPlgD342tdke3Hom/nJRA==
|
||||
|
||||
damerau-levenshtein@^1.0.7:
|
||||
version "1.0.8"
|
||||
resolved "https://registry.yarnpkg.com/damerau-levenshtein/-/damerau-levenshtein-1.0.8.tgz#b43d286ccbd36bc5b2f7ed41caf2d0aba1f8a6e7"
|
||||
@@ -3622,10 +3652,10 @@ escodegen@^2.0.0:
|
||||
optionalDependencies:
|
||||
source-map "~0.6.1"
|
||||
|
||||
eslint-config-react-app@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.0.tgz#0fa96d5ec1dfb99c029b1554362ab3fa1c3757df"
|
||||
integrity sha512-xyymoxtIt1EOsSaGag+/jmcywRuieQoA2JbPCjnw9HukFj9/97aGPoZVFioaotzk1K5Qt9sHO5EutZbkrAXS0g==
|
||||
eslint-config-react-app@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/eslint-config-react-app/-/eslint-config-react-app-7.0.1.tgz#73ba3929978001c5c86274c017ea57eb5fa644b4"
|
||||
integrity sha512-K6rNzvkIeHaTd8m/QEh1Zko0KI7BACWkkneSs6s9cKZC/J27X3eZR6Upt1jkmZ/4FK+XUOPPxMEN7+lbUXfSlA==
|
||||
dependencies:
|
||||
"@babel/core" "^7.16.0"
|
||||
"@babel/eslint-parser" "^7.16.3"
|
||||
@@ -6841,10 +6871,10 @@ react-app-polyfill@^3.0.0:
|
||||
regenerator-runtime "^0.13.9"
|
||||
whatwg-fetch "^3.6.2"
|
||||
|
||||
react-dev-utils@^12.0.0:
|
||||
version "12.0.0"
|
||||
resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.0.tgz#4eab12cdb95692a077616770b5988f0adf806526"
|
||||
integrity sha512-xBQkitdxozPxt1YZ9O1097EJiVpwHr9FoAuEVURCKV0Av8NBERovJauzP7bo1ThvuhZ4shsQ1AJiu4vQpoT1AQ==
|
||||
react-dev-utils@^12.0.1:
|
||||
version "12.0.1"
|
||||
resolved "https://registry.yarnpkg.com/react-dev-utils/-/react-dev-utils-12.0.1.tgz#ba92edb4a1f379bd46ccd6bcd4e7bc398df33e73"
|
||||
integrity sha512-84Ivxmr17KjUupyqzFode6xKhjwuEJDROWKJy/BthkL7Wn6NJ8h4WE6k/exAv6ImS+0oZLRRW5j/aINMHyeGeQ==
|
||||
dependencies:
|
||||
"@babel/code-frame" "^7.16.0"
|
||||
address "^1.1.2"
|
||||
@@ -6865,25 +6895,24 @@ react-dev-utils@^12.0.0:
|
||||
open "^8.4.0"
|
||||
pkg-up "^3.1.0"
|
||||
prompts "^2.4.2"
|
||||
react-error-overlay "^6.0.10"
|
||||
react-error-overlay "^6.0.11"
|
||||
recursive-readdir "^2.2.2"
|
||||
shell-quote "^1.7.3"
|
||||
strip-ansi "^6.0.1"
|
||||
text-table "^0.2.0"
|
||||
|
||||
react-dom@^17.0.2:
|
||||
version "17.0.2"
|
||||
resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-17.0.2.tgz#ecffb6845e3ad8dbfcdc498f0d0a939736502c23"
|
||||
integrity sha512-s4h96KtLDUQlsENhMn1ar8t2bEa+q/YAtj8pPPdIjPDGBDIVNsrD9aXNWqspUe6AzKCIG0C1HZZLqLV7qpOBGA==
|
||||
react-dom@^18.1.0:
|
||||
version "18.1.0"
|
||||
resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-18.1.0.tgz#7f6dd84b706408adde05e1df575b3a024d7e8a2f"
|
||||
integrity sha512-fU1Txz7Budmvamp7bshe4Zi32d0ll7ect+ccxNu9FlObT605GOEB8BfO4tmRJ39R5Zj831VCpvQ05QPBW5yb+w==
|
||||
dependencies:
|
||||
loose-envify "^1.1.0"
|
||||
object-assign "^4.1.1"
|
||||
scheduler "^0.20.2"
|
||||
scheduler "^0.22.0"
|
||||
|
||||
react-error-overlay@^6.0.10:
|
||||
version "6.0.10"
|
||||
resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.10.tgz#0fe26db4fa85d9dbb8624729580e90e7159a59a6"
|
||||
integrity sha512-mKR90fX7Pm5seCOfz8q9F+66VCc1PGsWSBxKbITjfKVQHMNF2zudxHnMdJiB1fRCb+XsbQV9sO9DCkgsMQgBIA==
|
||||
react-error-overlay@^6.0.11:
|
||||
version "6.0.11"
|
||||
resolved "https://registry.yarnpkg.com/react-error-overlay/-/react-error-overlay-6.0.11.tgz#92835de5841c5cf08ba00ddd2d677b6d17ff9adb"
|
||||
integrity sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==
|
||||
|
||||
react-is@^16.13.1:
|
||||
version "16.13.1"
|
||||
@@ -6900,10 +6929,10 @@ react-refresh@^0.11.0:
|
||||
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.11.0.tgz#77198b944733f0f1f1a90e791de4541f9f074046"
|
||||
integrity sha512-F27qZr8uUqwhWZboondsPx8tnC3Ct3SxZA3V5WyEvujRyyNv0VYPhoBg1gZ8/MV5tubQp76Trw8lTv9hzRBa+A==
|
||||
|
||||
react-scripts@5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.0.tgz#6547a6d7f8b64364ef95273767466cc577cb4b60"
|
||||
integrity sha512-3i0L2CyIlROz7mxETEdfif6Sfhh9Lfpzi10CtcGs1emDQStmZfWjJbAIMtRD0opVUjQuFWqHZyRZ9PPzKCFxWg==
|
||||
react-scripts@5.0.1:
|
||||
version "5.0.1"
|
||||
resolved "https://registry.yarnpkg.com/react-scripts/-/react-scripts-5.0.1.tgz#6285dbd65a8ba6e49ca8d651ce30645a6d980003"
|
||||
integrity sha512-8VAmEm/ZAwQzJ+GOMLbBsTdDKOpuZh7RPs0UymvBR2vRk4iZWCskjbFnxqjrzoIvlNNRZ3QJFx6/qDSi6zSnaQ==
|
||||
dependencies:
|
||||
"@babel/core" "^7.16.0"
|
||||
"@pmmmwh/react-refresh-webpack-plugin" "^0.5.3"
|
||||
@@ -6921,7 +6950,7 @@ react-scripts@5.0.0:
|
||||
dotenv "^10.0.0"
|
||||
dotenv-expand "^5.1.0"
|
||||
eslint "^8.3.0"
|
||||
eslint-config-react-app "^7.0.0"
|
||||
eslint-config-react-app "^7.0.1"
|
||||
eslint-webpack-plugin "^3.1.1"
|
||||
file-loader "^6.2.0"
|
||||
fs-extra "^10.0.0"
|
||||
@@ -6938,7 +6967,7 @@ react-scripts@5.0.0:
|
||||
postcss-preset-env "^7.0.1"
|
||||
prompts "^2.4.2"
|
||||
react-app-polyfill "^3.0.0"
|
||||
react-dev-utils "^12.0.0"
|
||||
react-dev-utils "^12.0.1"
|
||||
react-refresh "^0.11.0"
|
||||
resolve "^1.20.0"
|
||||
resolve-url-loader "^4.0.0"
|
||||
@@ -6955,13 +6984,12 @@ react-scripts@5.0.0:
|
||||
optionalDependencies:
|
||||
fsevents "^2.3.2"
|
||||
|
||||
react@^17.0.2:
|
||||
version "17.0.2"
|
||||
resolved "https://registry.yarnpkg.com/react/-/react-17.0.2.tgz#d0b5cc516d29eb3eee383f75b62864cfb6800037"
|
||||
integrity sha512-gnhPt75i/dq/z3/6q/0asP78D0u592D5L1pd7M8P+dck6Fu/jJeL6iVVK23fptSUZj8Vjf++7wXA8UNclGQcbA==
|
||||
react@^18.1.0:
|
||||
version "18.1.0"
|
||||
resolved "https://registry.yarnpkg.com/react/-/react-18.1.0.tgz#6f8620382decb17fdc5cc223a115e2adbf104890"
|
||||
integrity sha512-4oL8ivCz5ZEPyclFQXaNksK3adutVS8l2xzZU0cqEFrE9Sb7fC0EFK5uEk74wIreL1DERyjvsU915j1pcT2uEQ==
|
||||
dependencies:
|
||||
loose-envify "^1.1.0"
|
||||
object-assign "^4.1.1"
|
||||
|
||||
readable-stream@^2.0.1:
|
||||
version "2.3.7"
|
||||
@@ -7235,13 +7263,12 @@ saxes@^5.0.1:
|
||||
dependencies:
|
||||
xmlchars "^2.2.0"
|
||||
|
||||
scheduler@^0.20.2:
|
||||
version "0.20.2"
|
||||
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.20.2.tgz#4baee39436e34aa93b4874bddcbf0fe8b8b50e91"
|
||||
integrity sha512-2eWfGgAqqWFGqtdMmcL5zCMK1U8KlXv8SQFGglL3CEtd0aDVDWgeF/YoCmvln55m5zSk3J/20hTaSBeSObsQDQ==
|
||||
scheduler@^0.22.0:
|
||||
version "0.22.0"
|
||||
resolved "https://registry.yarnpkg.com/scheduler/-/scheduler-0.22.0.tgz#83a5d63594edf074add9a7198b1bae76c3db01b8"
|
||||
integrity sha512-6QAm1BgQI88NPYymgGQLCZgvep4FyePDWFpXVK+zNSUgHwlqpJy8VEh8Et0KxTACS4VWwMousBElAZOH9nkkoQ==
|
||||
dependencies:
|
||||
loose-envify "^1.1.0"
|
||||
object-assign "^4.1.1"
|
||||
|
||||
schema-utils@2.7.0:
|
||||
version "2.7.0"
|
||||
@@ -8156,10 +8183,10 @@ wbuf@^1.1.0, wbuf@^1.7.3:
|
||||
dependencies:
|
||||
minimalistic-assert "^1.0.0"
|
||||
|
||||
web-vitals@^2.1.3:
|
||||
version "2.1.3"
|
||||
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-2.1.3.tgz#6dca59f41dbc3fcccdb889da06191b437b18f534"
|
||||
integrity sha512-+ijpniAzcnQicXaXIN0/eHQAiV/jMt1oHGHTmz7VdAJPPkzzDhmoYPSpLgJTuFtUh+jCjxCoeTZPg7Ic+g8o7w==
|
||||
web-vitals@^2.1.4:
|
||||
version "2.1.4"
|
||||
resolved "https://registry.yarnpkg.com/web-vitals/-/web-vitals-2.1.4.tgz#76563175a475a5e835264d373704f9dde718290c"
|
||||
integrity sha512-sVWcwhU5mX6crfI5Vd2dC4qchyTqxV8URinzt25XqVh+bHEPGH4C3NPrNionCP7Obx59wrYEbNlw4Z8sjALzZg==
|
||||
|
||||
webidl-conversions@^4.0.2:
|
||||
version "4.0.2"
|
||||
|
||||
3
examples/remix/.eslintrc.js
Normal file
3
examples/remix/.eslintrc.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
extends: ["@remix-run/eslint-config", "@remix-run/eslint-config/node"],
|
||||
};
|
||||
8
examples/remix/.gitignore
vendored
8
examples/remix/.gitignore
vendored
@@ -1,7 +1,11 @@
|
||||
node_modules
|
||||
|
||||
.cache
|
||||
.env
|
||||
.vercel
|
||||
.output
|
||||
|
||||
public/build
|
||||
api/_build
|
||||
/build/
|
||||
/public/build
|
||||
/api/index.js
|
||||
/api/index.js.map
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
const { createRequestHandler } = require("@remix-run/vercel");
|
||||
|
||||
module.exports = createRequestHandler({
|
||||
build: require("./_build")
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import { RemixBrowser } from "@remix-run/react";
|
||||
import { hydrate } from "react-dom";
|
||||
import { RemixBrowser } from "remix";
|
||||
|
||||
hydrate(<RemixBrowser />, document);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import type { EntryContext } from "@remix-run/node";
|
||||
import { RemixServer } from "@remix-run/react";
|
||||
import { renderToString } from "react-dom/server";
|
||||
import { RemixServer } from "remix";
|
||||
import type { EntryContext } from "remix";
|
||||
|
||||
export default function handleRequest(
|
||||
request: Request,
|
||||
@@ -16,6 +16,6 @@ export default function handleRequest(
|
||||
|
||||
return new Response("<!DOCTYPE html>" + markup, {
|
||||
status: responseStatusCode,
|
||||
headers: responseHeaders
|
||||
headers: responseHeaders,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import type { LinksFunction, MetaFunction } from "@remix-run/node";
|
||||
import {
|
||||
Link,
|
||||
Links,
|
||||
@@ -6,14 +7,13 @@ import {
|
||||
Outlet,
|
||||
Scripts,
|
||||
ScrollRestoration,
|
||||
useCatch
|
||||
} from "remix";
|
||||
import type { LinksFunction } from "remix";
|
||||
useCatch,
|
||||
} from "@remix-run/react";
|
||||
|
||||
import globalStylesUrl from "~/styles/global.css";
|
||||
import darkStylesUrl from "~/styles/dark.css";
|
||||
import globalStylesUrl from "~/styles/global.css";
|
||||
|
||||
// https://remix.run/api/app#links
|
||||
// https://remix.run/api/conventions#links
|
||||
export let links: LinksFunction = () => {
|
||||
return [
|
||||
{ rel: "stylesheet", href: globalStylesUrl },
|
||||
@@ -25,6 +25,12 @@ export let links: LinksFunction = () => {
|
||||
];
|
||||
};
|
||||
|
||||
// https://remix.run/api/conventions#meta
|
||||
export let meta: MetaFunction = () => ({
|
||||
charset: "utf-8",
|
||||
viewport: "width=device-width,initial-scale=1",
|
||||
});
|
||||
|
||||
// https://remix.run/api/conventions#default-export
|
||||
// https://remix.run/api/conventions#route-filenames
|
||||
export default function App() {
|
||||
@@ -37,7 +43,7 @@ export default function App() {
|
||||
);
|
||||
}
|
||||
|
||||
// https://remix.run/docs/en/v1/api/conventions#errorboundary
|
||||
// https://remix.run/api/conventions#errorboundary
|
||||
export function ErrorBoundary({ error }: { error: Error }) {
|
||||
console.error(error);
|
||||
return (
|
||||
@@ -57,7 +63,7 @@ export function ErrorBoundary({ error }: { error: Error }) {
|
||||
);
|
||||
}
|
||||
|
||||
// https://remix.run/docs/en/v1/api/conventions#catchboundary
|
||||
// https://remix.run/api/conventions#catchboundary
|
||||
export function CatchBoundary() {
|
||||
let caught = useCatch();
|
||||
|
||||
@@ -103,8 +109,6 @@ function Document({
|
||||
return (
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charSet="utf-8" />
|
||||
<meta name="viewport" content="width=device-width,initial-scale=1" />
|
||||
{title ? <title>{title}</title> : null}
|
||||
<Meta />
|
||||
<Links />
|
||||
@@ -113,7 +117,7 @@ function Document({
|
||||
{children}
|
||||
<ScrollRestoration />
|
||||
<Scripts />
|
||||
{process.env.NODE_ENV === "development" && <LiveReload />}
|
||||
<LiveReload />
|
||||
</body>
|
||||
</html>
|
||||
);
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Outlet } from "remix";
|
||||
import type { MetaFunction, LinksFunction } from "remix";
|
||||
import type { MetaFunction, LinksFunction } from "@remix-run/node";
|
||||
import { Outlet } from "@remix-run/react";
|
||||
|
||||
import stylesUrl from "~/styles/demos/about.css";
|
||||
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Link } from "remix";
|
||||
import { Link } from "@remix-run/react";
|
||||
|
||||
export default function AboutIndex() {
|
||||
return (
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { Link } from "remix";
|
||||
import { Link } from "@remix-run/react";
|
||||
|
||||
export default function AboutIndex() {
|
||||
return (
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import type { ActionFunction } from "@remix-run/node";
|
||||
import { json, redirect } from "@remix-run/node";
|
||||
import { Form, useActionData } from "@remix-run/react";
|
||||
import { useEffect, useRef } from "react";
|
||||
import type { ActionFunction } from "remix";
|
||||
import { Form, json, useActionData, redirect } from "remix";
|
||||
|
||||
export function meta() {
|
||||
return { title: "Actions Demo" };
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { useCatch, Link, json, useLoaderData, Outlet } from "remix";
|
||||
import type { MetaFunction } from "@remix-run/node";
|
||||
import { json } from "@remix-run/node";
|
||||
import { Link, Outlet, useCatch, useLoaderData } from "@remix-run/react";
|
||||
|
||||
export function meta() {
|
||||
return { title: "Boundaries Demo" };
|
||||
}
|
||||
export let meta: MetaFunction = () => ({ title: "Boundaries Demo" });
|
||||
|
||||
export default function Boundaries() {
|
||||
return (
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCatch, Link, json, useLoaderData } from "remix";
|
||||
import type { LoaderFunction, MetaFunction } from "remix";
|
||||
import type { LoaderFunction, MetaFunction } from "@remix-run/node";
|
||||
import { json } from "@remix-run/node";
|
||||
import { Link, useCatch, useLoaderData } from "@remix-run/react";
|
||||
|
||||
// The `$` in route filenames becomes a pattern that's parsed from the URL and
|
||||
// passed to your loaders so you can look up data.
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { useCatch, Link, json, useLoaderData, Outlet } from "remix";
|
||||
import type { LoaderFunction } from "remix";
|
||||
import type { LoaderFunction } from "@remix-run/node";
|
||||
import { json } from "@remix-run/node";
|
||||
import { Link, Outlet, useCatch, useLoaderData } from "@remix-run/react";
|
||||
|
||||
export default function Boundaries() {
|
||||
return (
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import type { MetaFunction, LoaderFunction } from "remix";
|
||||
import { useLoaderData, json, Link } from "remix";
|
||||
import type { MetaFunction, LoaderFunction } from "@remix-run/node";
|
||||
import { json } from "@remix-run/node";
|
||||
import { Link, useLoaderData } from "@remix-run/react";
|
||||
|
||||
type IndexData = {
|
||||
resources: Array<{ name: string; url: string }>;
|
||||
|
||||
8345
examples/remix/package-lock.json
generated
8345
examples/remix/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -1,26 +1,28 @@
|
||||
{
|
||||
"private": true,
|
||||
"name": "remix-app-template",
|
||||
"description": "",
|
||||
"license": "",
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"build": "remix build",
|
||||
"dev": "remix dev",
|
||||
"postinstall": "remix setup node"
|
||||
"dev": "remix dev"
|
||||
},
|
||||
"dependencies": {
|
||||
"@remix-run/react": "^1.0.6",
|
||||
"@remix-run/node": "^1.5.1",
|
||||
"@remix-run/react": "^1.5.1",
|
||||
"@remix-run/vercel": "^1.5.1",
|
||||
"@vercel/node": "^2.0.0",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2",
|
||||
"remix": "^1.0.6",
|
||||
"@remix-run/serve": "^1.0.6",
|
||||
"@remix-run/vercel": "^1.0.6"
|
||||
"react-dom": "^17.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@remix-run/dev": "^1.0.6",
|
||||
"@types/react": "^17.0.24",
|
||||
"@types/react-dom": "^17.0.9",
|
||||
"typescript": "^4.1.2"
|
||||
"@remix-run/dev": "^1.5.1",
|
||||
"@remix-run/eslint-config": "^1.5.1",
|
||||
"@remix-run/serve": "^1.5.1",
|
||||
"@types/react": "^17.0.45",
|
||||
"@types/react-dom": "^17.0.17",
|
||||
"eslint": "^8.15.0",
|
||||
"typescript": "^4.6.4"
|
||||
},
|
||||
"sideEffects": false
|
||||
"engines": {
|
||||
"node": ">=14"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
/**
|
||||
* @type {import('@remix-run/dev/config').AppConfig}
|
||||
* @type {import('@remix-run/dev').AppConfig}
|
||||
*/
|
||||
module.exports = {
|
||||
appDirectory: "app",
|
||||
browserBuildDirectory: "public/build",
|
||||
publicPath: "/build/",
|
||||
serverBuildDirectory: "api/_build"
|
||||
serverBuildTarget: "vercel",
|
||||
// When running locally in development mode, we use the built in remix
|
||||
// server. This does not understand the vercel lambda module format,
|
||||
// so we default back to the standard build output.
|
||||
server: process.env.NODE_ENV === "development" ? undefined : "./server.js",
|
||||
ignoredRouteFiles: ["**/.*"],
|
||||
// appDirectory: "app",
|
||||
// assetsBuildDirectory: "public/build",
|
||||
// serverBuildPath: "api/index.js",
|
||||
// publicPath: "/build/",
|
||||
};
|
||||
|
||||
4
examples/remix/server.js
Normal file
4
examples/remix/server.js
Normal file
@@ -0,0 +1,4 @@
|
||||
import * as build from "@remix-run/dev/server-build";
|
||||
import { createRequestHandler } from "@remix-run/vercel";
|
||||
|
||||
export default createRequestHandler({ build, mode: process.env.NODE_ENV });
|
||||
@@ -12,8 +12,8 @@
|
||||
"format": "prettier --write --plugin-search-dir=. ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"@sveltejs/adapter-auto": "next",
|
||||
"@sveltejs/kit": "next",
|
||||
"@sveltejs/adapter-auto": "1.0.0-next.50",
|
||||
"@sveltejs/kit": "1.0.0-next.347",
|
||||
"@types/cookie": "^0.4.1",
|
||||
"prettier": "^2.5.1",
|
||||
"prettier-plugin-svelte": "^2.5.0",
|
||||
@@ -24,6 +24,7 @@
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"@fontsource/fira-mono": "^4.5.0",
|
||||
"cookie": "^0.4.1"
|
||||
"cookie": "^0.4.1",
|
||||
"web-vitals": "^2.1.4"
|
||||
}
|
||||
}
|
||||
|
||||
1633
examples/sveltekit/pnpm-lock.yaml
generated
1633
examples/sveltekit/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
66
examples/sveltekit/src/lib/vitals.js
Normal file
66
examples/sveltekit/src/lib/vitals.js
Normal file
@@ -0,0 +1,66 @@
|
||||
import { getCLS, getFCP, getFID, getLCP, getTTFB } from 'web-vitals';
|
||||
|
||||
const vitalsUrl = 'https://vitals.vercel-analytics.com/v1/vitals';
|
||||
|
||||
function getConnectionSpeed() {
|
||||
return 'connection' in navigator &&
|
||||
navigator['connection'] &&
|
||||
'effectiveType' in navigator['connection']
|
||||
? // @ts-ignore
|
||||
navigator['connection']['effectiveType']
|
||||
: '';
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {import("web-vitals").Metric} metric
|
||||
* @param {{ params: { [s: string]: any; } | ArrayLike<any>; path: string; analyticsId: string; debug: boolean; }} options
|
||||
*/
|
||||
function sendToAnalytics(metric, options) {
|
||||
const page = Object.entries(options.params).reduce(
|
||||
(acc, [key, value]) => acc.replace(value, `[${key}]`),
|
||||
options.path
|
||||
);
|
||||
|
||||
const body = {
|
||||
dsn: options.analyticsId,
|
||||
id: metric.id,
|
||||
page,
|
||||
href: location.href,
|
||||
event_name: metric.name,
|
||||
value: metric.value.toString(),
|
||||
speed: getConnectionSpeed()
|
||||
};
|
||||
|
||||
if (options.debug) {
|
||||
console.log('[Analytics]', metric.name, JSON.stringify(body, null, 2));
|
||||
}
|
||||
|
||||
const blob = new Blob([new URLSearchParams(body).toString()], {
|
||||
// This content type is necessary for `sendBeacon`
|
||||
type: 'application/x-www-form-urlencoded'
|
||||
});
|
||||
if (navigator.sendBeacon) {
|
||||
navigator.sendBeacon(vitalsUrl, blob);
|
||||
} else
|
||||
fetch(vitalsUrl, {
|
||||
body: blob,
|
||||
method: 'POST',
|
||||
credentials: 'omit',
|
||||
keepalive: true
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {any} options
|
||||
*/
|
||||
export function webVitals(options) {
|
||||
try {
|
||||
getFID((metric) => sendToAnalytics(metric, options));
|
||||
getTTFB((metric) => sendToAnalytics(metric, options));
|
||||
getLCP((metric) => sendToAnalytics(metric, options));
|
||||
getCLS((metric) => sendToAnalytics(metric, options));
|
||||
getFCP((metric) => sendToAnalytics(metric, options));
|
||||
} catch (err) {
|
||||
console.error('[Analytics]', err);
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,19 @@
|
||||
<script>
|
||||
import Header from '$lib/header/Header.svelte';
|
||||
import { webVitals } from '$lib/vitals';
|
||||
import { browser } from '$app/env';
|
||||
import { page } from '$app/stores';
|
||||
import '../app.css';
|
||||
|
||||
let analyticsId = import.meta.env.VERCEL_ANALYTICS_ID;
|
||||
|
||||
$: if (browser && analyticsId) {
|
||||
webVitals({
|
||||
path: $page.url.pathname,
|
||||
params: $page.params,
|
||||
analyticsId
|
||||
})
|
||||
}
|
||||
</script>
|
||||
|
||||
<Header />
|
||||
|
||||
@@ -8,6 +8,11 @@ const config = {
|
||||
// Override http methods in the Todo forms
|
||||
methodOverride: {
|
||||
allowed: ['PATCH', 'DELETE']
|
||||
},
|
||||
vite: {
|
||||
define: {
|
||||
'import.meta.env.VERCEL_ANALYTICS_ID': JSON.stringify(process.env.VERCEL_ANALYTICS_ID)
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
1479
examples/sveltekit/yarn.lock
Normal file
1479
examples/sveltekit/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@@ -30,8 +30,8 @@
|
||||
"npm-package-arg": "6.1.0",
|
||||
"prettier": "2.6.2",
|
||||
"ts-eager": "2.0.2",
|
||||
"ts-jest": "28.0.0-next.1",
|
||||
"turbo": "1.2.14"
|
||||
"ts-jest": "28.0.5",
|
||||
"turbo": "1.3.1"
|
||||
},
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
@@ -41,7 +41,7 @@
|
||||
"publish-from-github": "./utils/publish.sh",
|
||||
"changelog": "node utils/changelog.js",
|
||||
"build": "turbo run build",
|
||||
"vercel-build": "yarn build && cd api && node -r ts-eager/register ./_lib/script/build.ts",
|
||||
"vercel-build": "yarn build && yarn run pack && cd api && node -r ts-eager/register ./_lib/script/build.ts",
|
||||
"pre-commit": "lint-staged",
|
||||
"test": "jest --rootDir=\"test\" --testPathPattern=\"\\.test.js\"",
|
||||
"test-unit": "yarn test && turbo run test-unit",
|
||||
@@ -49,7 +49,8 @@
|
||||
"test-integration-once": "turbo run test-integration-once",
|
||||
"test-integration-dev": "turbo run test-integration-dev",
|
||||
"lint": "eslint . --ext .ts,.js",
|
||||
"prepare": "husky install"
|
||||
"prepare": "husky install",
|
||||
"pack": "cd utils && node -r ts-eager/register ./pack.ts"
|
||||
},
|
||||
"lint-staged": {
|
||||
"./{*,{api,packages,test,utils}/**/*}.{js,ts}": [
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@vercel/build-utils",
|
||||
"version": "4.1.1-canary.0",
|
||||
"version": "4.2.0",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
@@ -31,7 +31,7 @@
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/semver": "6.0.0",
|
||||
"@types/yazl": "2.4.2",
|
||||
"@vercel/frameworks": "1.0.1",
|
||||
"@vercel/frameworks": "1.0.2",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"aggregate-error": "3.0.1",
|
||||
"async-retry": "1.2.3",
|
||||
|
||||
@@ -671,8 +671,8 @@ describe('Test `detectBuilders`', () => {
|
||||
expect(builders![0].use).toBe('@vercel/static');
|
||||
|
||||
expect(defaultRoutes!.length).toBe(1);
|
||||
expect((defaultRoutes![0] as any).src).toBe('/(.*)');
|
||||
expect((defaultRoutes![0] as any).dest).toBe('/dist/$1');
|
||||
expect(defaultRoutes![0].src).toBe('/(.*)');
|
||||
expect(defaultRoutes![0].dest).toBe('/dist/$1');
|
||||
});
|
||||
|
||||
it('Custom static output directory with api', async () => {
|
||||
@@ -691,9 +691,9 @@ describe('Test `detectBuilders`', () => {
|
||||
expect(builders![1].use).toBe('@vercel/static');
|
||||
|
||||
expect(defaultRoutes!.length).toBe(3);
|
||||
expect((defaultRoutes![1] as any).status).toBe(404);
|
||||
expect((defaultRoutes![2] as any).src).toBe('/(.*)');
|
||||
expect((defaultRoutes![2] as any).dest).toBe('/output/$1');
|
||||
expect(defaultRoutes![1].status).toBe(404);
|
||||
expect(defaultRoutes![2].src).toBe('/(.*)');
|
||||
expect(defaultRoutes![2].dest).toBe('/output/$1');
|
||||
});
|
||||
|
||||
it('Framework with non-package.json entrypoint', async () => {
|
||||
@@ -2285,10 +2285,10 @@ it('Test `detectRoutes`', async () => {
|
||||
|
||||
const { defaultRoutes } = await detectBuilders(files);
|
||||
expect(defaultRoutes!.length).toBe(3);
|
||||
expect((defaultRoutes![0] as any).dest).toBe('/api/team.js');
|
||||
expect((defaultRoutes![1] as any).dest).toBe('/api/user.go');
|
||||
expect((defaultRoutes![2] as any).dest).not.toBeDefined();
|
||||
expect((defaultRoutes![2] as any).status).toBe(404);
|
||||
expect(defaultRoutes![0].dest).toBe('/api/team.js');
|
||||
expect(defaultRoutes![1].dest).toBe('/api/user.go');
|
||||
expect(defaultRoutes![2].dest).not.toBeDefined();
|
||||
expect(defaultRoutes![2].status).toBe(404);
|
||||
}
|
||||
|
||||
{
|
||||
@@ -2335,10 +2335,10 @@ it('Test `detectRoutes`', async () => {
|
||||
];
|
||||
|
||||
const { defaultRoutes } = await detectBuilders(files);
|
||||
expect((defaultRoutes![2] as any).status).toBe(404);
|
||||
expect((defaultRoutes![2] as any).src).toBe('^/api(/.*)?$');
|
||||
expect((defaultRoutes![3] as any).src).toBe('/(.*)');
|
||||
expect((defaultRoutes![3] as any).dest).toBe('/public/$1');
|
||||
expect(defaultRoutes![2].status).toBe(404);
|
||||
expect(defaultRoutes![2].src).toBe('^/api(/.*)?$');
|
||||
expect(defaultRoutes![3].src).toBe('/(.*)');
|
||||
expect(defaultRoutes![3].dest).toBe('/public/$1');
|
||||
expect(defaultRoutes!.length).toBe(4);
|
||||
}
|
||||
|
||||
@@ -2350,8 +2350,8 @@ it('Test `detectRoutes`', async () => {
|
||||
const files = ['public/index.html', 'api/[endpoint].js'];
|
||||
|
||||
const { defaultRoutes } = await detectBuilders(files, pkg);
|
||||
expect((defaultRoutes![1] as any).status).toBe(404);
|
||||
expect((defaultRoutes![1] as any).src).toBe('^/api(/.*)?$');
|
||||
expect(defaultRoutes![1].status).toBe(404);
|
||||
expect(defaultRoutes![1].src).toBe('^/api(/.*)?$');
|
||||
expect(defaultRoutes!.length).toBe(2);
|
||||
}
|
||||
|
||||
@@ -2369,14 +2369,10 @@ it('Test `detectRoutes`', async () => {
|
||||
const { defaultRoutes } = await detectBuilders(files);
|
||||
|
||||
expect(defaultRoutes!.length).toBe(3);
|
||||
expect((defaultRoutes![0] as any).src).toBe(
|
||||
'^/api/date(/|/index|/index\\.js)?$'
|
||||
);
|
||||
expect((defaultRoutes![0] as any).dest).toBe('/api/date/index.js');
|
||||
expect((defaultRoutes![1] as any).src).toBe(
|
||||
'^/api/(date/|date|date\\.js)$'
|
||||
);
|
||||
expect((defaultRoutes![1] as any).dest).toBe('/api/date.js');
|
||||
expect(defaultRoutes![0].src).toBe('^/api/date(/|/index|/index\\.js)?$');
|
||||
expect(defaultRoutes![0].dest).toBe('/api/date/index.js');
|
||||
expect(defaultRoutes![1].src).toBe('^/api/(date/|date|date\\.js)$');
|
||||
expect(defaultRoutes![1].dest).toBe('/api/date.js');
|
||||
}
|
||||
|
||||
{
|
||||
@@ -2385,16 +2381,10 @@ it('Test `detectRoutes`', async () => {
|
||||
const { defaultRoutes } = await detectBuilders(files);
|
||||
|
||||
expect(defaultRoutes!.length).toBe(3);
|
||||
expect((defaultRoutes![0] as any).src).toBe(
|
||||
'^/api/([^/]+)(/|/index|/index\\.js)?$'
|
||||
);
|
||||
expect((defaultRoutes![0] as any).dest).toBe(
|
||||
'/api/[date]/index.js?date=$1'
|
||||
);
|
||||
expect((defaultRoutes![1] as any).src).toBe(
|
||||
'^/api/(date/|date|date\\.js)$'
|
||||
);
|
||||
expect((defaultRoutes![1] as any).dest).toBe('/api/date.js');
|
||||
expect(defaultRoutes![0].src).toBe('^/api/([^/]+)(/|/index|/index\\.js)?$');
|
||||
expect(defaultRoutes![0].dest).toBe('/api/[date]/index.js?date=$1');
|
||||
expect(defaultRoutes![1].src).toBe('^/api/(date/|date|date\\.js)$');
|
||||
expect(defaultRoutes![1].dest).toBe('/api/date.js');
|
||||
}
|
||||
|
||||
{
|
||||
@@ -2424,7 +2414,7 @@ it('Test `detectRoutes`', async () => {
|
||||
const { defaultRoutes } = await detectBuilders(files, null, { functions });
|
||||
|
||||
expect(defaultRoutes!.length).toBe(2);
|
||||
expect((defaultRoutes![0] as any).dest).toBe('/api/user.php');
|
||||
expect(defaultRoutes![0].dest).toBe('/api/user.php');
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "vercel",
|
||||
"version": "25.1.1-canary.0",
|
||||
"version": "25.2.0",
|
||||
"preferGlobal": true,
|
||||
"license": "Apache-2.0",
|
||||
"description": "The command-line interface for Vercel",
|
||||
@@ -42,15 +42,15 @@
|
||||
"node": ">= 14"
|
||||
},
|
||||
"dependencies": {
|
||||
"@vercel/build-utils": "4.1.1-canary.0",
|
||||
"@vercel/go": "2.0.2-canary.0",
|
||||
"@vercel/next": "3.0.2-canary.0",
|
||||
"@vercel/node": "2.1.1-canary.0",
|
||||
"@vercel/python": "3.0.2-canary.0",
|
||||
"@vercel/redwood": "1.0.2-canary.0",
|
||||
"@vercel/remix": "1.0.2-canary.0",
|
||||
"@vercel/ruby": "1.3.10-canary.0",
|
||||
"@vercel/static-build": "1.0.2-canary.0",
|
||||
"@vercel/build-utils": "4.2.0",
|
||||
"@vercel/go": "2.0.2",
|
||||
"@vercel/next": "3.1.0",
|
||||
"@vercel/node": "2.3.0",
|
||||
"@vercel/python": "3.0.2",
|
||||
"@vercel/redwood": "1.0.2",
|
||||
"@vercel/remix": "1.0.2",
|
||||
"@vercel/ruby": "1.3.10",
|
||||
"@vercel/static-build": "1.0.2",
|
||||
"update-notifier": "5.1.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -71,6 +71,7 @@
|
||||
"@types/fs-extra": "9.0.13",
|
||||
"@types/glob": "7.1.1",
|
||||
"@types/http-proxy": "1.16.2",
|
||||
"@types/ini": "1.3.31",
|
||||
"@types/inquirer": "7.3.1",
|
||||
"@types/jest": "27.4.1",
|
||||
"@types/jest-expect-message": "1.0.3",
|
||||
@@ -94,8 +95,8 @@
|
||||
"@types/which": "1.3.2",
|
||||
"@types/write-json-file": "2.2.1",
|
||||
"@types/yauzl-promise": "2.1.0",
|
||||
"@vercel/client": "12.0.2-canary.0",
|
||||
"@vercel/frameworks": "1.0.1",
|
||||
"@vercel/client": "12.0.2",
|
||||
"@vercel/frameworks": "1.0.2",
|
||||
"@vercel/ncc": "0.24.0",
|
||||
"@zeit/fun": "0.11.2",
|
||||
"@zeit/source-map-support": "0.6.2",
|
||||
@@ -130,11 +131,13 @@
|
||||
"fast-deep-equal": "3.1.3",
|
||||
"fs-extra": "10.0.0",
|
||||
"get-port": "5.1.1",
|
||||
"git-last-commit": "1.0.1",
|
||||
"glob": "7.1.2",
|
||||
"http-proxy": "1.18.1",
|
||||
"ini": "3.0.0",
|
||||
"inquirer": "7.0.4",
|
||||
"is-docker": "2.2.1",
|
||||
"is-port-reachable": "3.0.0",
|
||||
"is-port-reachable": "3.1.0",
|
||||
"is-url": "1.2.2",
|
||||
"jaro-winkler": "0.2.8",
|
||||
"jsonlines": "0.1.1",
|
||||
|
||||
@@ -16,6 +16,7 @@ import Client from '../../util/client';
|
||||
import { getPkgName } from '../../util/pkg-name';
|
||||
import { Output } from '../../util/output';
|
||||
import { Deployment, PaginationOptions } from '../../types';
|
||||
import { normalizeURL } from '../../util/bisect/normalize-url';
|
||||
|
||||
interface DeploymentV6
|
||||
extends Pick<
|
||||
@@ -97,9 +98,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
run = resolve(run);
|
||||
}
|
||||
|
||||
if (!bad.startsWith('https://')) {
|
||||
bad = `https://${bad}`;
|
||||
}
|
||||
bad = normalizeURL(bad);
|
||||
let parsed = parse(bad);
|
||||
if (!parsed.hostname) {
|
||||
output.error('Invalid input: no hostname provided');
|
||||
@@ -120,9 +119,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
|
||||
const badDeploymentPromise = getDeployment(client, bad).catch(err => err);
|
||||
|
||||
if (!good.startsWith('https://')) {
|
||||
good = `https://${good}`;
|
||||
}
|
||||
good = normalizeURL(good);
|
||||
parsed = parse(good);
|
||||
if (!parsed.hostname) {
|
||||
output.error('Invalid input: no hostname provided');
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import fs from 'fs-extra';
|
||||
import chalk from 'chalk';
|
||||
import dotenv from 'dotenv';
|
||||
import { join, relative } from 'path';
|
||||
import { join, normalize, relative, resolve } from 'path';
|
||||
import {
|
||||
detectBuilders,
|
||||
normalizePath,
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
BuildResultV2,
|
||||
BuildResultV2Typical,
|
||||
BuildResultV3,
|
||||
NowBuildError,
|
||||
} from '@vercel/build-utils';
|
||||
import minimatch from 'minimatch';
|
||||
import {
|
||||
@@ -64,6 +65,7 @@ const help = () => {
|
||||
'DIR'
|
||||
)} Path to the global ${'`.vercel`'} directory
|
||||
--cwd [path] The current working directory
|
||||
--output [path] Directory where built assets should be written to
|
||||
--prod Build a production deployment
|
||||
-d, --debug Debug mode [off]
|
||||
-y, --yes Skip the confirmation prompt
|
||||
@@ -100,6 +102,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
// Parse CLI args
|
||||
const argv = getArgs(client.argv.slice(2), {
|
||||
'--cwd': String,
|
||||
'--output': String,
|
||||
'--prod': Boolean,
|
||||
'--yes': Boolean,
|
||||
});
|
||||
@@ -279,7 +282,9 @@ export default async function main(client: Client): Promise<number> {
|
||||
}
|
||||
|
||||
// Delete output directory from potential previous build
|
||||
const outputDir = join(cwd, OUTPUT_DIR);
|
||||
const outputDir = argv['--output']
|
||||
? resolve(argv['--output'])
|
||||
: join(cwd, OUTPUT_DIR);
|
||||
await fs.remove(outputDir);
|
||||
|
||||
const buildStamp = stamp();
|
||||
@@ -296,6 +301,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
{
|
||||
'//': 'This file was generated by the `vercel build` command. It is not part of the Build Output API.',
|
||||
target,
|
||||
argv: process.argv,
|
||||
builds: builds.map(build => {
|
||||
const builderWithPkg = buildersWithPkgs.get(build.use);
|
||||
if (!builderWithPkg) {
|
||||
@@ -304,6 +310,7 @@ export default async function main(client: Client): Promise<number> {
|
||||
const { builder, pkg: builderPkg } = builderWithPkg;
|
||||
return {
|
||||
require: builderPkg.name,
|
||||
requirePath: builderWithPkg.path,
|
||||
apiVersion: builder.version,
|
||||
...build,
|
||||
};
|
||||
@@ -462,11 +469,12 @@ export default async function main(client: Client): Promise<number> {
|
||||
};
|
||||
await fs.writeJSON(join(outputDir, 'config.json'), config, { spaces: 2 });
|
||||
|
||||
const relOutputDir = relative(cwd, outputDir);
|
||||
output.print(
|
||||
`${prependEmoji(
|
||||
`Build Completed in ${chalk.bold(OUTPUT_DIR)} ${chalk.gray(
|
||||
buildStamp()
|
||||
)}`,
|
||||
`Build Completed in ${chalk.bold(
|
||||
relOutputDir.startsWith('..') ? outputDir : relOutputDir
|
||||
)} ${chalk.gray(buildStamp())}`,
|
||||
emoji('success')
|
||||
)}\n`
|
||||
);
|
||||
@@ -475,17 +483,33 @@ export default async function main(client: Client): Promise<number> {
|
||||
}
|
||||
|
||||
function expandBuild(files: string[], build: Builder): Builder[] {
|
||||
if (!build.src) return [];
|
||||
if (!build.use) {
|
||||
throw new NowBuildError({
|
||||
code: `invalid_build_specification`,
|
||||
message: 'Field `use` is missing in build specification',
|
||||
link: 'https://vercel.com/docs/configuration#project/builds',
|
||||
action: 'View Documentation',
|
||||
});
|
||||
}
|
||||
|
||||
let pattern = build.src;
|
||||
if (pattern[0] === '/') {
|
||||
let src = normalize(build.src || '**');
|
||||
if (src === '.' || src === './') {
|
||||
throw new NowBuildError({
|
||||
code: `invalid_build_specification`,
|
||||
message: 'A build `src` path resolves to an empty string',
|
||||
link: 'https://vercel.com/docs/configuration#project/builds',
|
||||
action: 'View Documentation',
|
||||
});
|
||||
}
|
||||
|
||||
if (src[0] === '/') {
|
||||
// Remove a leading slash so that the globbing is relative
|
||||
// to `cwd` instead of the root of the filesystem.
|
||||
pattern = pattern.substring(1);
|
||||
src = src.substring(1);
|
||||
}
|
||||
|
||||
const matches = files.filter(
|
||||
name => name === pattern || minimatch(name, pattern, { dot: true })
|
||||
name => name === src || minimatch(name, src, { dot: true })
|
||||
);
|
||||
|
||||
return matches.map(m => {
|
||||
|
||||
@@ -43,7 +43,9 @@ import {
|
||||
import { SchemaValidationFailed } from '../../util/errors';
|
||||
import purchaseDomainIfAvailable from '../../util/domains/purchase-domain-if-available';
|
||||
import confirm from '../../util/input/confirm';
|
||||
import editProjectSettings from '../../util/input/edit-project-settings';
|
||||
import editProjectSettings, {
|
||||
PartialProjectSettings,
|
||||
} from '../../util/input/edit-project-settings';
|
||||
import {
|
||||
getLinkedProject,
|
||||
linkFolderToProject,
|
||||
@@ -63,6 +65,7 @@ import { help } from './args';
|
||||
import { getDeploymentChecks } from '../../util/deploy/get-deployment-checks';
|
||||
import parseTarget from '../../util/deploy/parse-target';
|
||||
import getPrebuiltJson from '../../util/deploy/get-prebuilt-json';
|
||||
import { createGitMeta } from '../../util/deploy/create-git-meta';
|
||||
|
||||
export default async (client: Client) => {
|
||||
const { output } = client;
|
||||
@@ -415,6 +418,8 @@ export default async (client: Client) => {
|
||||
parseMeta(argv['--meta'])
|
||||
);
|
||||
|
||||
const gitMetadata = await createGitMeta(path, output);
|
||||
|
||||
// Merge dotenv config, `env` from vercel.json, and `--env` / `-e` arguments
|
||||
const deploymentEnv = Object.assign(
|
||||
{},
|
||||
@@ -453,6 +458,15 @@ export default async (client: Client) => {
|
||||
let deployStamp = stamp();
|
||||
let deployment = null;
|
||||
|
||||
const localConfigurationOverrides: PartialProjectSettings = {
|
||||
buildCommand: localConfig?.buildCommand,
|
||||
devCommand: localConfig?.devCommand,
|
||||
framework: localConfig?.framework,
|
||||
commandForIgnoringBuildStep: localConfig?.ignoreCommand,
|
||||
installCommand: localConfig?.installCommand,
|
||||
outputDirectory: localConfig?.outputDirectory,
|
||||
};
|
||||
|
||||
try {
|
||||
const createArgs: any = {
|
||||
name: project ? project.name : newProjectName,
|
||||
@@ -468,6 +482,7 @@ export default async (client: Client) => {
|
||||
nowConfig: localConfig,
|
||||
regions,
|
||||
meta,
|
||||
gitMetadata,
|
||||
deployStamp,
|
||||
target,
|
||||
skipAutoDetectionConfirmation: autoConfirm,
|
||||
@@ -475,7 +490,12 @@ export default async (client: Client) => {
|
||||
|
||||
if (!localConfig.builds || localConfig.builds.length === 0) {
|
||||
// Only add projectSettings for zero config deployments
|
||||
createArgs.projectSettings = { sourceFilesOutsideRootDirectory };
|
||||
createArgs.projectSettings =
|
||||
status === 'not_linked'
|
||||
? {
|
||||
sourceFilesOutsideRootDirectory,
|
||||
}
|
||||
: { ...localConfigurationOverrides, sourceFilesOutsideRootDirectory };
|
||||
}
|
||||
|
||||
deployment = await createDeploy(
|
||||
@@ -503,7 +523,9 @@ export default async (client: Client) => {
|
||||
const settings = await editProjectSettings(
|
||||
output,
|
||||
projectSettings,
|
||||
framework
|
||||
framework,
|
||||
false,
|
||||
localConfigurationOverrides
|
||||
);
|
||||
|
||||
// deploy again, but send projectSettings this time
|
||||
|
||||
@@ -110,10 +110,12 @@ export default async function dev(
|
||||
// v3 Build Output because it will incorrectly be detected by
|
||||
// @vercel/static-build in BuildOutputV3.getBuildOutputDirectory()
|
||||
if (!devCommand) {
|
||||
output.log(`Removing ${OUTPUT_DIR}`);
|
||||
const outputDir = join(cwd, OUTPUT_DIR);
|
||||
if (await fs.pathExists(outputDir)) {
|
||||
output.log(`Removing ${OUTPUT_DIR}`);
|
||||
await fs.remove(outputDir);
|
||||
}
|
||||
}
|
||||
|
||||
const devServer = new DevServer(cwd, {
|
||||
output,
|
||||
|
||||
@@ -150,17 +150,7 @@ export default async function inspect(
|
||||
`This Domain is not configured properly. To configure it you should either:`,
|
||||
null,
|
||||
null,
|
||||
null,
|
||||
{
|
||||
boxen: {
|
||||
margin: {
|
||||
left: 2,
|
||||
right: 0,
|
||||
bottom: 0,
|
||||
top: 0,
|
||||
},
|
||||
},
|
||||
}
|
||||
null
|
||||
);
|
||||
output.print(
|
||||
` ${chalk.grey('a)')} ` +
|
||||
|
||||
@@ -9,6 +9,7 @@ export default new Map([
|
||||
['certs', 'certs'],
|
||||
['deploy', 'deploy'],
|
||||
['dev', 'dev'],
|
||||
['develop', 'dev'],
|
||||
['dns', 'dns'],
|
||||
['domain', 'domains'],
|
||||
['domains', 'domains'],
|
||||
|
||||
@@ -296,6 +296,15 @@ export interface Token {
|
||||
teamId?: string;
|
||||
}
|
||||
|
||||
export interface GitMetadata {
|
||||
commitAuthorName?: string | undefined;
|
||||
commitMessage?: string | undefined;
|
||||
commitRef?: string | undefined;
|
||||
commitSha?: string | undefined;
|
||||
dirty?: boolean | undefined;
|
||||
remoteUrl: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* An object representing a Build on Vercel
|
||||
*/
|
||||
|
||||
7
packages/cli/src/util/bisect/normalize-url.ts
Normal file
7
packages/cli/src/util/bisect/normalize-url.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
function hasScheme(url: string): Boolean {
|
||||
return url.startsWith('http://') || url.startsWith('https://');
|
||||
}
|
||||
|
||||
export function normalizeURL(url: string): string {
|
||||
return hasScheme(url) ? url : `https://${url}`;
|
||||
}
|
||||
79
packages/cli/src/util/deploy/create-git-meta.ts
Normal file
79
packages/cli/src/util/deploy/create-git-meta.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
import fs from 'fs-extra';
|
||||
import { join } from 'path';
|
||||
import ini from 'ini';
|
||||
import git from 'git-last-commit';
|
||||
import { exec } from 'child_process';
|
||||
import { GitMetadata } from '../../types';
|
||||
import { Output } from '../output';
|
||||
|
||||
export function isDirty(directory: string): Promise<boolean> {
|
||||
return new Promise((resolve, reject) => {
|
||||
exec('git status -s', { cwd: directory }, function (err, stdout, stderr) {
|
||||
if (err) return reject(err);
|
||||
if (stderr)
|
||||
return reject(
|
||||
new Error(
|
||||
`Failed to determine if git repo has been modified: ${stderr.trim()}`
|
||||
)
|
||||
);
|
||||
resolve(stdout.trim().length > 0);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function getLastCommit(directory: string): Promise<git.Commit> {
|
||||
return new Promise((resolve, reject) => {
|
||||
git.getLastCommit(
|
||||
(err, commit) => {
|
||||
if (err) return reject(err);
|
||||
resolve(commit);
|
||||
},
|
||||
{ dst: directory }
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export async function getRemoteUrl(
|
||||
configPath: string,
|
||||
output: Output
|
||||
): Promise<string | null> {
|
||||
let gitConfig;
|
||||
try {
|
||||
gitConfig = ini.parse(await fs.readFile(configPath, 'utf-8'));
|
||||
} catch (error) {
|
||||
output.debug(`Error while parsing repo data: ${error.message}`);
|
||||
}
|
||||
if (!gitConfig) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const originUrl: string = gitConfig['remote "origin"']?.url;
|
||||
if (originUrl) {
|
||||
return originUrl;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export async function createGitMeta(
|
||||
directory: string,
|
||||
output: Output
|
||||
): Promise<GitMetadata | undefined> {
|
||||
const remoteUrl = await getRemoteUrl(join(directory, '.git/config'), output);
|
||||
// If we can't get the repo URL, then don't return any metadata
|
||||
if (!remoteUrl) {
|
||||
return;
|
||||
}
|
||||
const [commit, dirty] = await Promise.all([
|
||||
getLastCommit(directory),
|
||||
isDirty(directory),
|
||||
]);
|
||||
|
||||
return {
|
||||
remoteUrl,
|
||||
commitAuthorName: commit.author.name,
|
||||
commitMessage: commit.subject,
|
||||
commitRef: commit.branch,
|
||||
commitSha: commit.hash,
|
||||
dirty,
|
||||
};
|
||||
}
|
||||
@@ -417,10 +417,6 @@ export async function getBuildMatches(
|
||||
src = src.substring(1);
|
||||
}
|
||||
|
||||
// We need to escape brackets since `glob` will
|
||||
// try to find a group otherwise
|
||||
src = src.replace(/(\[|\])/g, '[$1]');
|
||||
|
||||
// lambda function files are trimmed of their file extension
|
||||
const mapToEntrypoint = new Map<string, string>();
|
||||
const extensionless = devServer.getExtensionlessFile(src);
|
||||
|
||||
18
packages/cli/src/util/dev/headers.ts
Normal file
18
packages/cli/src/util/dev/headers.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { Headers } from 'node-fetch';
|
||||
import { IncomingHttpHeaders, OutgoingHttpHeaders } from 'http';
|
||||
|
||||
export function nodeHeadersToFetchHeaders(
|
||||
nodeHeaders: IncomingHttpHeaders | OutgoingHttpHeaders
|
||||
): Headers {
|
||||
const headers = new Headers();
|
||||
for (const [name, value] of Object.entries(nodeHeaders)) {
|
||||
if (Array.isArray(value)) {
|
||||
for (const val of value) {
|
||||
headers.append(name, val);
|
||||
}
|
||||
} else if (typeof value !== 'undefined') {
|
||||
headers.set(name, String(value));
|
||||
}
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
@@ -1,12 +1,13 @@
|
||||
import ms from 'ms';
|
||||
import url, { URL } from 'url';
|
||||
import http from 'http';
|
||||
import fs from 'fs-extra';
|
||||
import chalk from 'chalk';
|
||||
import fetch from 'node-fetch';
|
||||
import plural from 'pluralize';
|
||||
import rawBody from 'raw-body';
|
||||
import listen from 'async-listen';
|
||||
import minimatch from 'minimatch';
|
||||
import ms from 'ms';
|
||||
import httpProxy from 'http-proxy';
|
||||
import { randomBytes } from 'crypto';
|
||||
import serveHandler from 'serve-handler';
|
||||
@@ -16,11 +17,11 @@ import path, { isAbsolute, basename, dirname, extname, join } from 'path';
|
||||
import once from '@tootallnate/once';
|
||||
import directoryTemplate from 'serve-handler/src/directory';
|
||||
import getPort from 'get-port';
|
||||
import { ChildProcess } from 'child_process';
|
||||
import isPortReachable from 'is-port-reachable';
|
||||
import deepEqual from 'fast-deep-equal';
|
||||
import which from 'which';
|
||||
import npa from 'npm-package-arg';
|
||||
import type { ChildProcess } from 'child_process';
|
||||
|
||||
import { getVercelIgnore, fileNameSymbol } from '@vercel/client';
|
||||
import {
|
||||
@@ -90,6 +91,7 @@ import {
|
||||
import { ProjectEnvVariable, ProjectSettings } from '../../types';
|
||||
import exposeSystemEnvs from './expose-system-envs';
|
||||
import { treeKill } from '../tree-kill';
|
||||
import { nodeHeadersToFetchHeaders } from './headers';
|
||||
|
||||
const frontendRuntimeSet = new Set(
|
||||
frameworkList.map(f => f.useRuntime?.use || '@vercel/static-build')
|
||||
@@ -593,7 +595,7 @@ export default class DevServer {
|
||||
await this.exit();
|
||||
}
|
||||
|
||||
if (warnings && warnings.length > 0) {
|
||||
if (warnings?.length > 0) {
|
||||
warnings.forEach(warning =>
|
||||
this.output.warn(warning.message, null, warning.link, warning.action)
|
||||
);
|
||||
@@ -1106,6 +1108,7 @@ export default class DevServer {
|
||||
view = errorTemplate({
|
||||
http_status_code: statusCode,
|
||||
http_status_description,
|
||||
error_code,
|
||||
request_id: requestId,
|
||||
});
|
||||
}
|
||||
@@ -1337,32 +1340,6 @@ export default class DevServer {
|
||||
return false;
|
||||
};
|
||||
|
||||
/*
|
||||
runDevMiddleware = async (
|
||||
req: http.IncomingMessage,
|
||||
res: http.ServerResponse
|
||||
) => {
|
||||
const { devMiddlewarePlugins } = await loadCliPlugins(
|
||||
this.cwd,
|
||||
this.output
|
||||
);
|
||||
try {
|
||||
for (let plugin of devMiddlewarePlugins) {
|
||||
const result = await plugin.plugin.runDevMiddleware(req, res, this.cwd);
|
||||
if (result.finished) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return { finished: false };
|
||||
} catch (e) {
|
||||
return {
|
||||
finished: true,
|
||||
error: e,
|
||||
};
|
||||
}
|
||||
};
|
||||
*/
|
||||
|
||||
/**
|
||||
* Serve project directory as a v2 deployment.
|
||||
*/
|
||||
@@ -1429,13 +1406,60 @@ export default class DevServer {
|
||||
let statusCode: number | undefined;
|
||||
let prevUrl = req.url;
|
||||
let prevHeaders: HttpHeadersConfig = {};
|
||||
let middlewarePid: number | undefined;
|
||||
|
||||
/*
|
||||
const middlewareResult = await this.runDevMiddleware(req, res);
|
||||
// Run the middleware file, if present, and apply any
|
||||
// mutations to the incoming request based on the
|
||||
// result of the middleware invocation.
|
||||
const middleware = [...this.buildMatches.values()].find(
|
||||
m => m.config?.middleware === true
|
||||
);
|
||||
if (middleware) {
|
||||
let startMiddlewareResult: StartDevServerResult | undefined;
|
||||
// TODO: can we add some caching to prevent (re-)starting
|
||||
// the middleware server for every HTTP request?
|
||||
const { envConfigs, files, devCacheDir, cwd: workPath } = this;
|
||||
try {
|
||||
startMiddlewareResult =
|
||||
await middleware.builderWithPkg.builder.startDevServer?.({
|
||||
files,
|
||||
entrypoint: middleware.entrypoint,
|
||||
workPath,
|
||||
repoRootPath: this.cwd,
|
||||
config: middleware.config || {},
|
||||
meta: {
|
||||
isDev: true,
|
||||
devCacheDir,
|
||||
requestUrl: req.url,
|
||||
env: { ...envConfigs.runEnv },
|
||||
buildEnv: { ...envConfigs.buildEnv },
|
||||
},
|
||||
});
|
||||
|
||||
if (middlewareResult) {
|
||||
if (middlewareResult.error) {
|
||||
this.sendError(
|
||||
if (startMiddlewareResult) {
|
||||
const { port, pid } = startMiddlewareResult;
|
||||
middlewarePid = pid;
|
||||
this.devServerPids.add(pid);
|
||||
|
||||
const middlewareReqHeaders = nodeHeadersToFetchHeaders(req.headers);
|
||||
|
||||
// Add the Vercel platform proxy request headers
|
||||
const proxyHeaders = this.getProxyHeaders(req, requestId, true);
|
||||
for (const [name, value] of nodeHeadersToFetchHeaders(proxyHeaders)) {
|
||||
middlewareReqHeaders.set(name, value);
|
||||
}
|
||||
|
||||
const middlewareRes = await fetch(
|
||||
`http://127.0.0.1:${port}${parsed.path}`,
|
||||
{
|
||||
headers: middlewareReqHeaders,
|
||||
method: req.method,
|
||||
redirect: 'manual',
|
||||
}
|
||||
);
|
||||
|
||||
if (middlewareRes.status === 500) {
|
||||
await this.sendError(
|
||||
req,
|
||||
res,
|
||||
requestId,
|
||||
@@ -1444,23 +1468,94 @@ export default class DevServer {
|
||||
);
|
||||
return;
|
||||
}
|
||||
if (middlewareResult.finished) {
|
||||
|
||||
// Apply status code from middleware invocation,
|
||||
// for i.e. redirects or a custom 404 page
|
||||
res.statusCode = middlewareRes.status;
|
||||
|
||||
let rewritePath = '';
|
||||
let contentType = '';
|
||||
let shouldContinue = false;
|
||||
const skipMiddlewareHeaders = new Set([
|
||||
'date',
|
||||
'connection',
|
||||
'content-length',
|
||||
'transfer-encoding',
|
||||
]);
|
||||
for (const [name, value] of middlewareRes.headers) {
|
||||
if (name === 'x-middleware-next') {
|
||||
shouldContinue = value === '1';
|
||||
} else if (name === 'x-middleware-rewrite') {
|
||||
rewritePath = value;
|
||||
shouldContinue = true;
|
||||
} else if (name === 'content-type') {
|
||||
contentType = value;
|
||||
} else if (!skipMiddlewareHeaders.has(name)) {
|
||||
// Any other kind of response header should be included
|
||||
// on both the incoming HTTP request (for when proxying
|
||||
// to another function) and the outgoing HTTP response.
|
||||
res.setHeader(name, value);
|
||||
req.headers[name] = value;
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldContinue) {
|
||||
const middlewareBody = await middlewareRes.buffer();
|
||||
this.setResponseHeaders(res, requestId);
|
||||
if (middlewareBody.length > 0) {
|
||||
res.setHeader('content-length', middlewareBody.length);
|
||||
if (contentType) {
|
||||
res.setHeader('content-type', contentType);
|
||||
}
|
||||
res.end(middlewareBody);
|
||||
} else {
|
||||
res.end();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (middlewareResult.pathname) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
origUrl.pathname = middlewareResult.pathname;
|
||||
prevUrl = url.format(origUrl);
|
||||
}
|
||||
if (middlewareResult.query && prevUrl) {
|
||||
const origUrl = url.parse(req.url || '/', true);
|
||||
delete origUrl.search;
|
||||
Object.assign(origUrl.query, middlewareResult.query);
|
||||
prevUrl = url.format(origUrl);
|
||||
if (rewritePath) {
|
||||
// TODO: add validation?
|
||||
debug(`Detected rewrite path from middleware: "${rewritePath}"`);
|
||||
prevUrl = rewritePath;
|
||||
|
||||
// Retain orginal pathname, but override query parameters from the rewrite
|
||||
const beforeRewriteUrl = req.url || '/';
|
||||
const rewriteUrlParsed = url.parse(beforeRewriteUrl, true);
|
||||
delete rewriteUrlParsed.search;
|
||||
rewriteUrlParsed.query = url.parse(rewritePath, true).query;
|
||||
req.url = url.format(rewriteUrlParsed);
|
||||
debug(
|
||||
`Rewrote incoming HTTP URL from "${beforeRewriteUrl}" to "${req.url}"`
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// `startDevServer()` threw an error. Most likely this means the dev
|
||||
// server process exited before sending the port information message
|
||||
// (missing dependency at runtime, for example).
|
||||
if (err.code === 'ENOENT') {
|
||||
err.message = `Command not found: ${chalk.cyan(
|
||||
err.path,
|
||||
...err.spawnargs
|
||||
)}\nPlease ensure that ${cmd(err.path)} is properly installed`;
|
||||
err.link = 'https://vercel.link/command-not-found';
|
||||
}
|
||||
|
||||
await this.sendError(
|
||||
req,
|
||||
res,
|
||||
requestId,
|
||||
'EDGE_FUNCTION_INVOCATION_FAILED',
|
||||
500
|
||||
);
|
||||
return;
|
||||
} finally {
|
||||
if (middlewarePid) {
|
||||
this.killBuilderDevServer(middlewarePid);
|
||||
}
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
for (const phase of phases) {
|
||||
statusCode = undefined;
|
||||
@@ -1740,7 +1835,10 @@ export default class DevServer {
|
||||
isDev: true,
|
||||
requestPath,
|
||||
devCacheDir,
|
||||
env: { ...envConfigs.runEnv },
|
||||
env: {
|
||||
...envConfigs.runEnv,
|
||||
VERCEL_BUILDER_DEBUG: this.output.debugEnabled ? '1' : undefined,
|
||||
},
|
||||
buildEnv: { ...envConfigs.buildEnv },
|
||||
},
|
||||
});
|
||||
@@ -2185,13 +2283,7 @@ function proxyPass(
|
||||
`Failed to complete request to ${req.url}: ${error}`
|
||||
);
|
||||
if (!res.headersSent) {
|
||||
devServer.sendError(
|
||||
req,
|
||||
res,
|
||||
requestId,
|
||||
'NO_RESPONSE_FROM_FUNCTION',
|
||||
502
|
||||
);
|
||||
devServer.sendError(req, res, requestId, 'FUNCTION_INVOCATION_FAILED');
|
||||
}
|
||||
}
|
||||
);
|
||||
@@ -2269,11 +2361,12 @@ async function findBuildMatch(
|
||||
if (!isIndex(match.src)) {
|
||||
return match;
|
||||
} else {
|
||||
// if isIndex === true and ends in .html, we're done. Otherwise, keep searching
|
||||
bestIndexMatch = match;
|
||||
// If isIndex === true and ends in `.html`, we're done.
|
||||
// Otherwise, keep searching.
|
||||
if (extname(match.src) === '.html') {
|
||||
return bestIndexMatch;
|
||||
return match;
|
||||
}
|
||||
bestIndexMatch = match;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2295,6 +2388,13 @@ async function shouldServe(
|
||||
config,
|
||||
builderWithPkg: { builder },
|
||||
} = match;
|
||||
|
||||
// "middleware" file is not served as a regular asset,
|
||||
// instead it gets invoked as part of the routing logic.
|
||||
if (config?.middleware === true) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const cleanSrc = src.endsWith('.html') ? src.slice(0, -5) : src;
|
||||
const trimmedPath = requestPath.endsWith('/')
|
||||
? requestPath.slice(0, -1)
|
||||
@@ -2447,12 +2547,10 @@ function needsBlockingBuild(buildMatch: BuildMatch): boolean {
|
||||
return typeof builder.shouldServe !== 'function';
|
||||
}
|
||||
|
||||
async function checkForPort(
|
||||
port: number | undefined,
|
||||
timeout: number
|
||||
): Promise<void> {
|
||||
async function checkForPort(port: number, timeout: number): Promise<void> {
|
||||
const opts = { host: '127.0.0.1' };
|
||||
const start = Date.now();
|
||||
while (!(await isPortReachable(port))) {
|
||||
while (!(await isPortReachable(port, opts))) {
|
||||
if (Date.now() - start > timeout) {
|
||||
throw new Error(`Detecting port ${port} timed out after ${timeout}ms`);
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ import { responseError } from './error';
|
||||
import stamp from './output/stamp';
|
||||
import { APIError, BuildError } from './errors-ts';
|
||||
import printIndications from './print-indications';
|
||||
import { Org } from '../types';
|
||||
import { GitMetadata, Org } from '../types';
|
||||
import { VercelConfig } from './dev/types';
|
||||
import Client, { FetchOptions, isJSONObject } from './client';
|
||||
import { Dictionary } from '@vercel/client';
|
||||
@@ -38,6 +38,7 @@ export interface CreateOptions {
|
||||
prebuilt?: boolean;
|
||||
rootDirectory?: string;
|
||||
meta: Dictionary<string>;
|
||||
gitMetadata?: GitMetadata;
|
||||
regions?: string[];
|
||||
quiet?: boolean;
|
||||
env: Dictionary<string>;
|
||||
@@ -116,6 +117,7 @@ export default class Now extends EventEmitter {
|
||||
rootDirectory,
|
||||
wantsPublic,
|
||||
meta,
|
||||
gitMetadata,
|
||||
regions,
|
||||
quiet = false,
|
||||
env,
|
||||
@@ -142,6 +144,7 @@ export default class Now extends EventEmitter {
|
||||
name,
|
||||
project,
|
||||
meta,
|
||||
gitMetadata,
|
||||
regions,
|
||||
target: target || undefined,
|
||||
projectSettings,
|
||||
|
||||
@@ -2,37 +2,80 @@ import inquirer from 'inquirer';
|
||||
import confirm from './confirm';
|
||||
import chalk from 'chalk';
|
||||
import { Output } from '../output';
|
||||
import { Framework } from '@vercel/frameworks';
|
||||
import frameworkList, { Framework } from '@vercel/frameworks';
|
||||
import { isSettingValue } from '../is-setting-value';
|
||||
import { ProjectSettings } from '../../types';
|
||||
|
||||
export interface PartialProjectSettings {
|
||||
buildCommand: string | null;
|
||||
outputDirectory: string | null;
|
||||
devCommand: string | null;
|
||||
}
|
||||
|
||||
const fields: { name: string; value: keyof PartialProjectSettings }[] = [
|
||||
{ name: 'Build Command', value: 'buildCommand' },
|
||||
{ name: 'Output Directory', value: 'outputDirectory' },
|
||||
{ name: 'Development Command', value: 'devCommand' },
|
||||
const settingMap = {
|
||||
buildCommand: 'Build Command',
|
||||
devCommand: 'Development Command',
|
||||
commandForIgnoringBuildStep: 'Ignore Command',
|
||||
installCommand: 'Install Command',
|
||||
outputDirectory: 'Output Directory',
|
||||
framework: 'Framework',
|
||||
} as const;
|
||||
type ConfigKeys = keyof typeof settingMap;
|
||||
const settingKeys = Object.keys(settingMap).sort() as unknown as readonly [
|
||||
ConfigKeys
|
||||
];
|
||||
|
||||
export type PartialProjectSettings = Pick<ProjectSettings, ConfigKeys>;
|
||||
|
||||
export default async function editProjectSettings(
|
||||
output: Output,
|
||||
projectSettings: PartialProjectSettings | null,
|
||||
framework: Framework | null,
|
||||
autoConfirm?: boolean
|
||||
autoConfirm: boolean,
|
||||
localConfigurationOverrides: PartialProjectSettings | null
|
||||
): Promise<ProjectSettings> {
|
||||
// create new settings object, missing values will be filled with `null`
|
||||
// Create initial settings object defaulting everything to `null` and assigning what may exist in `projectSettings`
|
||||
const settings: ProjectSettings = Object.assign(
|
||||
{ framework: null },
|
||||
{
|
||||
buildCommand: null,
|
||||
devCommand: null,
|
||||
framework: null,
|
||||
commandForIgnoringBuildStep: null,
|
||||
installCommand: null,
|
||||
outputDirectory: null,
|
||||
},
|
||||
projectSettings
|
||||
);
|
||||
|
||||
for (let field of fields) {
|
||||
settings[field.value] =
|
||||
(projectSettings && projectSettings[field.value]) || null;
|
||||
// Start UX by displaying (and applying) overrides. They will be referenced throughout remainder of CLI.
|
||||
if (localConfigurationOverrides) {
|
||||
// Apply local overrides (from `vercel.json`)
|
||||
for (const setting of settingKeys) {
|
||||
const localConfigValue = localConfigurationOverrides[setting];
|
||||
if (localConfigValue) settings[setting] = localConfigValue;
|
||||
}
|
||||
|
||||
output.print('Local settings detected in vercel.json:\n');
|
||||
|
||||
// Print provided overrides including framework
|
||||
for (const setting of settingKeys) {
|
||||
const override = localConfigurationOverrides[setting];
|
||||
if (override) {
|
||||
output.print(
|
||||
`${chalk.dim(
|
||||
`- ${chalk.bold(`${settingMap[setting]}:`)} ${override}`
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// If framework is overridden, set it to the `framework` parameter and let the normal framework-flow occur
|
||||
if (localConfigurationOverrides.framework) {
|
||||
const overrideFramework = frameworkList.find(
|
||||
f => f.slug === localConfigurationOverrides.framework
|
||||
);
|
||||
|
||||
if (overrideFramework) {
|
||||
framework = overrideFramework;
|
||||
output.print(
|
||||
`Merging default Project Settings for ${framework.name}. Previously listed overrides are prioritized.\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// skip editing project settings if no framework is detected
|
||||
@@ -41,6 +84,7 @@ export default async function editProjectSettings(
|
||||
return settings;
|
||||
}
|
||||
|
||||
// A missing framework slug implies the "Other" framework was selected
|
||||
output.print(
|
||||
!framework.slug
|
||||
? `No framework detected. Default Project Settings:\n`
|
||||
@@ -49,44 +93,78 @@ export default async function editProjectSettings(
|
||||
|
||||
settings.framework = framework.slug;
|
||||
|
||||
for (let field of fields) {
|
||||
const defaults = framework.settings[field.value];
|
||||
|
||||
output.print(
|
||||
chalk.dim(
|
||||
`- ${chalk.bold(`${field.name}:`)} ${`${
|
||||
isSettingValue(defaults)
|
||||
? defaults.value
|
||||
: chalk.italic(`${defaults.placeholder}`)
|
||||
}`}`
|
||||
) + '\n'
|
||||
);
|
||||
// Now print defaults for the provided framework whether it was auto-detected or overwritten
|
||||
for (const setting of settingKeys) {
|
||||
if (setting === 'framework' || setting === 'commandForIgnoringBuildStep') {
|
||||
continue;
|
||||
}
|
||||
|
||||
const defaultSetting = framework.settings[setting];
|
||||
const override = localConfigurationOverrides?.[setting];
|
||||
|
||||
if (!override && defaultSetting) {
|
||||
output.print(
|
||||
`${chalk.dim(
|
||||
`- ${chalk.bold(`${settingMap[setting]}:`)} ${
|
||||
isSettingValue(defaultSetting)
|
||||
? defaultSetting.value
|
||||
: chalk.italic(`${defaultSetting.placeholder}`)
|
||||
}`
|
||||
)}\n`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Prompt the user if they want to modify any settings not defined by local configuration.
|
||||
if (
|
||||
autoConfirm ||
|
||||
!(await confirm(`Want to override the settings?`, false))
|
||||
!(await confirm('Want to modify these settings?', false))
|
||||
) {
|
||||
return settings;
|
||||
}
|
||||
|
||||
const { settingFields } = await inquirer.prompt({
|
||||
const choices = settingKeys.reduce<Array<{ name: string; value: string }>>(
|
||||
(acc, setting) => {
|
||||
const skip =
|
||||
setting === 'framework' ||
|
||||
setting === 'commandForIgnoringBuildStep' ||
|
||||
setting === 'installCommand' ||
|
||||
localConfigurationOverrides?.[setting];
|
||||
if (!skip) {
|
||||
acc.push({ name: settingMap[setting], value: setting });
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
[]
|
||||
);
|
||||
|
||||
const { settingFields } = await inquirer.prompt<{
|
||||
settingFields: Array<
|
||||
Exclude<
|
||||
ConfigKeys,
|
||||
'framework' | 'commandForIgnoringBuildStep' | 'installCommand'
|
||||
>
|
||||
>;
|
||||
}>({
|
||||
name: 'settingFields',
|
||||
type: 'checkbox',
|
||||
message: 'Which settings would you like to overwrite (select multiple)?',
|
||||
choices: fields,
|
||||
choices,
|
||||
});
|
||||
|
||||
for (let setting of settingFields as (keyof PartialProjectSettings)[]) {
|
||||
const field = fields.find(f => f.value === setting);
|
||||
const name = `${Date.now()}`;
|
||||
const answers = await inquirer.prompt({
|
||||
for (let setting of settingFields) {
|
||||
const field = settingMap[setting];
|
||||
const answers = await inquirer.prompt<{
|
||||
[k in Exclude<
|
||||
ConfigKeys,
|
||||
'framework' | 'commandForIgnoringBuildStep' | 'installCommand'
|
||||
>]: string;
|
||||
}>({
|
||||
type: 'input',
|
||||
name: name,
|
||||
message: `What's your ${chalk.bold(field ? field.name : setting)}?`,
|
||||
name: setting,
|
||||
message: `What's your ${chalk.bold(field)}?`,
|
||||
});
|
||||
settings[setting] = answers[name] as string;
|
||||
settings[setting] = answers[setting];
|
||||
}
|
||||
|
||||
return settings;
|
||||
}
|
||||
|
||||
@@ -20,7 +20,9 @@ import selectOrg from '../input/select-org';
|
||||
import inputProject from '../input/input-project';
|
||||
import { validateRootDirectory } from '../validate-paths';
|
||||
import { inputRootDirectory } from '../input/input-root-directory';
|
||||
import editProjectSettings from '../input/edit-project-settings';
|
||||
import editProjectSettings, {
|
||||
PartialProjectSettings,
|
||||
} from '../input/edit-project-settings';
|
||||
import stamp from '../output/stamp';
|
||||
import { EmojiLabel } from '../emoji';
|
||||
import createDeploy from '../deploy/create-deploy';
|
||||
@@ -162,6 +164,16 @@ export default async function setupAndLink(
|
||||
client,
|
||||
currentTeam: config.currentTeam,
|
||||
});
|
||||
|
||||
const localConfigurationOverrides: PartialProjectSettings = {
|
||||
buildCommand: localConfig?.buildCommand,
|
||||
devCommand: localConfig?.devCommand,
|
||||
framework: localConfig?.framework,
|
||||
commandForIgnoringBuildStep: localConfig?.ignoreCommand,
|
||||
installCommand: localConfig?.installCommand,
|
||||
outputDirectory: localConfig?.outputDirectory,
|
||||
};
|
||||
|
||||
const createArgs: CreateOptions = {
|
||||
name: newProjectName,
|
||||
env: {},
|
||||
@@ -176,13 +188,12 @@ export default async function setupAndLink(
|
||||
deployStamp: stamp(),
|
||||
target: undefined,
|
||||
skipAutoDetectionConfirmation: false,
|
||||
projectSettings: {
|
||||
...localConfigurationOverrides,
|
||||
sourceFilesOutsideRootDirectory,
|
||||
},
|
||||
};
|
||||
|
||||
if (isZeroConfig) {
|
||||
// Only add projectSettings for zero config deployments
|
||||
createArgs.projectSettings = { sourceFilesOutsideRootDirectory };
|
||||
}
|
||||
|
||||
const deployment = await createDeploy(
|
||||
client,
|
||||
now,
|
||||
@@ -216,7 +227,8 @@ export default async function setupAndLink(
|
||||
output,
|
||||
projectSettings,
|
||||
framework,
|
||||
autoConfirm
|
||||
autoConfirm,
|
||||
localConfigurationOverrides
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import chalk from 'chalk';
|
||||
import boxen from 'boxen';
|
||||
import renderLink from './link';
|
||||
import wait, { StopSpinner } from './wait';
|
||||
import { Writable } from 'stream';
|
||||
@@ -17,7 +16,7 @@ export interface LogOptions extends PrintOptions {
|
||||
}
|
||||
|
||||
export class Output {
|
||||
private debugEnabled: boolean;
|
||||
debugEnabled: boolean;
|
||||
private spinnerMessage: string;
|
||||
private _spinner: StopSpinner | null;
|
||||
isTTY: boolean;
|
||||
@@ -51,28 +50,15 @@ export class Output {
|
||||
str: string,
|
||||
slug: string | null = null,
|
||||
link: string | null = null,
|
||||
action: string | null = 'Learn More',
|
||||
options?: {
|
||||
boxen?: boxen.Options;
|
||||
}
|
||||
action: string | null = 'Learn More'
|
||||
) => {
|
||||
const details = slug ? `https://err.sh/vercel/${slug}` : link;
|
||||
|
||||
this.print(
|
||||
boxen(
|
||||
chalk.bold.yellow('WARN! ') +
|
||||
chalk.yellow(
|
||||
chalk.bold('WARN! ') +
|
||||
str +
|
||||
(details ? `\n${action}: ${renderLink(details)}` : ''),
|
||||
{
|
||||
padding: {
|
||||
top: 0,
|
||||
bottom: 0,
|
||||
left: 1,
|
||||
right: 1,
|
||||
},
|
||||
borderColor: 'yellow',
|
||||
...options?.boxen,
|
||||
}
|
||||
(details ? `\n${action}: ${renderLink(details)}` : '')
|
||||
)
|
||||
);
|
||||
this.print('\n');
|
||||
|
||||
@@ -1,6 +1,14 @@
|
||||
import _pkg from '../../package.json';
|
||||
import fs from 'fs';
|
||||
import { join } from 'path';
|
||||
import { PackageJson } from '@vercel/build-utils';
|
||||
|
||||
const pkg: PackageJson & typeof _pkg = _pkg;
|
||||
let rootDir = __dirname;
|
||||
while (!fs.existsSync(join(rootDir, 'package.json'))) {
|
||||
rootDir = join(rootDir, '..');
|
||||
}
|
||||
|
||||
const pkgPath = join(rootDir, 'package.json');
|
||||
const pkg: PackageJson & typeof import('../../package.json') = JSON.parse(
|
||||
fs.readFileSync(pkgPath, 'utf8')
|
||||
);
|
||||
export default pkg;
|
||||
|
||||
@@ -0,0 +1,8 @@
|
||||
const { readFileSync } = require('fs');
|
||||
const { join } = require('path');
|
||||
|
||||
module.exports = function handler(_req, res) {
|
||||
const path = join(__dirname, '[id].js');
|
||||
const file = readFileSync(path, 'utf8');
|
||||
res.end(file ? 'found .js' : 'did not find .js');
|
||||
};
|
||||
@@ -0,0 +1,7 @@
|
||||
import { readFileSync } from 'fs';
|
||||
|
||||
export default function handler(_req, res) {
|
||||
const url = new URL('[id].mjs', import.meta.url);
|
||||
const file = readFileSync(url, 'utf8');
|
||||
res.end(file ? 'found .mjs' : 'did not find .mjs');
|
||||
};
|
||||
@@ -0,0 +1,7 @@
|
||||
export const config = {
|
||||
runtime: 'invalid-runtime-value',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
throw new Error('intentional runtime error');
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export async function notTheDefaultExport(request, event) {
|
||||
// this will never be run
|
||||
return new Response('some response body');
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
throw new Error('intentional runtime error');
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
// this should never be executed
|
||||
return new Response('some response body');
|
||||
}
|
||||
|
||||
throw new Error('intentional startup error');
|
||||
@@ -0,0 +1,9 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge'
|
||||
}
|
||||
|
||||
export default async function edge(request: Request, event: Event) {
|
||||
return new Response('some response body');
|
||||
|
||||
// intentional missing closing bracket to produce syntax error
|
||||
// }
|
||||
@@ -0,0 +1,9 @@
|
||||
import unknownModule from 'unknown-module-893427589372458934795843';
|
||||
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
return new Response(unknownModule('some response body'));
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
return new Response('responding with intentional 500 from user code', {
|
||||
status: 500,
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
import decamelize from 'decamelize';
|
||||
import { upper } from '../lib/upper';
|
||||
|
||||
export const config = {
|
||||
runtime: 'experimental-edge',
|
||||
};
|
||||
|
||||
export default async function edge(request, event) {
|
||||
const requestBody = await request.text();
|
||||
|
||||
return new Response(
|
||||
JSON.stringify({
|
||||
headerContentType: request.headers.get('content-type'),
|
||||
url: request.url,
|
||||
method: request.method,
|
||||
body: requestBody,
|
||||
decamelized: decamelize('someCamelCaseThing'),
|
||||
uppercase: upper('someThing'),
|
||||
optionalChaining: request?.doesnotexist ?? 'fallback',
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
export function upper(str) {
|
||||
return str.toUpperCase();
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"decamelize": "6.0.0"
|
||||
}
|
||||
}
|
||||
8
packages/cli/test/dev/fixtures/edge-function/yarn.lock
Normal file
8
packages/cli/test/dev/fixtures/edge-function/yarn.lock
Normal file
@@ -0,0 +1,8 @@
|
||||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
decamelize@6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-6.0.0.tgz#8cad4d916fde5c41a264a43d0ecc56fe3d31749e"
|
||||
integrity sha512-Fv96DCsdOgB6mdGl67MT5JaTNKRzrzill5OH5s8bjYJXVlcXyPYGyPsUkWyGV5p1TXI5esYIYMMeDJL0hEIwaA==
|
||||
@@ -0,0 +1 @@
|
||||
export default () => new Response(null, { status: 500 });
|
||||
@@ -0,0 +1 @@
|
||||
throw new Error('Middleware init error');
|
||||
@@ -0,0 +1,3 @@
|
||||
export default () => {
|
||||
throw new Error('Middleware handler error');
|
||||
};
|
||||
@@ -0,0 +1,13 @@
|
||||
// Supports both a single string value or an array of matchers
|
||||
export const config = {
|
||||
matcher: ['/about/:path*', '/dashboard/:path*'],
|
||||
};
|
||||
|
||||
export default function middleware(request, _event) {
|
||||
const response = new Response('middleware response');
|
||||
|
||||
// Set custom header
|
||||
response.headers.set('x-modified-edge', 'true');
|
||||
|
||||
return response;
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
export default req => {
|
||||
const url = new URL(req.url);
|
||||
return new Response(null, {
|
||||
status: 302,
|
||||
headers: {
|
||||
location: `https://vercel.com${url.pathname}${url.search}`,
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1 @@
|
||||
export default () => new Response('hi from middleware');
|
||||
@@ -0,0 +1,5 @@
|
||||
export default (req, res) => {
|
||||
res.json({
|
||||
url: req.url,
|
||||
});
|
||||
};
|
||||
@@ -0,0 +1,6 @@
|
||||
export default () =>
|
||||
new Response(null, {
|
||||
headers: {
|
||||
'x-middleware-rewrite': '/api/fn?from-middleware=true',
|
||||
},
|
||||
});
|
||||
@@ -0,0 +1 @@
|
||||
<h1>Another</h1>
|
||||
@@ -0,0 +1 @@
|
||||
<h1>Index</h1>
|
||||
@@ -0,0 +1,19 @@
|
||||
export default req => {
|
||||
const url = new URL(req.url);
|
||||
|
||||
if (url.pathname === '/') {
|
||||
// Pass-through "index.html" page
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
'x-middleware-next': '1',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Everything else goes to "another.html"
|
||||
return new Response(null, {
|
||||
headers: {
|
||||
'x-middleware-rewrite': '/another.html',
|
||||
},
|
||||
});
|
||||
};
|
||||
@@ -14,6 +14,271 @@ const {
|
||||
validateResponseHeaders,
|
||||
} = require('./utils.js');
|
||||
|
||||
test('[vercel dev] should support edge functions', async () => {
|
||||
const dir = fixture('edge-function');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
const body = { hello: 'world' };
|
||||
|
||||
let res = await fetch(`http://localhost:${port}/api/edge-success`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
validateResponseHeaders(res);
|
||||
|
||||
// support for edge functions has to manually ensure that these properties
|
||||
// are set up; so, we test that they are all passed through properly
|
||||
expect(await res.json()).toMatchObject({
|
||||
headerContentType: 'application/json',
|
||||
url: `http://localhost:${port}/api/edge-success`,
|
||||
method: 'POST',
|
||||
body: '{"hello":"world"}',
|
||||
decamelized: 'some_camel_case_thing',
|
||||
uppercase: 'SOMETHING',
|
||||
optionalChaining: 'fallback',
|
||||
});
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test(
|
||||
'[vercel dev] edge functions respond properly the same as production',
|
||||
testFixtureStdio('edge-function', async (testPath: any) => {
|
||||
await testPath(500, '/api/edge-500-response');
|
||||
await testPath(200, '/api/edge-success');
|
||||
})
|
||||
);
|
||||
|
||||
test('[vercel dev] should support edge functions returning intentional 500 responses', async () => {
|
||||
const dir = fixture('edge-function');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
const body = { hello: 'world' };
|
||||
|
||||
let res = await fetch(`http://localhost:${port}/api/edge-500-response`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(body),
|
||||
});
|
||||
validateResponseHeaders(res);
|
||||
|
||||
expect(await res.status).toBe(500);
|
||||
expect(await res.text()).toBe(
|
||||
'responding with intentional 500 from user code'
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should handle runtime errors thrown in edge functions', async () => {
|
||||
const dir = fixture('edge-function-error');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
let res = await fetch(`http://localhost:${port}/api/edge-error-runtime`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept:
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
},
|
||||
});
|
||||
validateResponseHeaders(res);
|
||||
|
||||
const { stdout, stderr } = await dev.kill('SIGTERM');
|
||||
|
||||
expect(await res.text()).toMatch(
|
||||
/<strong>500<\/strong>: INTERNAL_SERVER_ERROR/g
|
||||
);
|
||||
expect(stdout).toMatch(/Unhandled rejection: intentional runtime error/g);
|
||||
expect(stderr).toMatch(
|
||||
/Failed to complete request to \/api\/edge-error-runtime: Error: socket hang up/g
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should handle config errors thrown in edge functions', async () => {
|
||||
const dir = fixture('edge-function-error');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
let res = await fetch(`http://localhost:${port}/api/edge-error-config`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept:
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
},
|
||||
});
|
||||
validateResponseHeaders(res);
|
||||
|
||||
const { stderr } = await dev.kill('SIGTERM');
|
||||
|
||||
expect(await res.text()).toMatch(
|
||||
/<strong>500<\/strong>: INTERNAL_SERVER_ERROR/g
|
||||
);
|
||||
expect(stderr).toMatch(
|
||||
/Invalid function runtime "invalid-runtime-value" for "api\/edge-error-config.js". Valid runtimes are: \["experimental-edge"\]/g
|
||||
);
|
||||
expect(stderr).toMatch(
|
||||
/Failed to complete request to \/api\/edge-error-config: Error: socket hang up/g
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should handle startup errors thrown in edge functions', async () => {
|
||||
const dir = fixture('edge-function-error');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
let res = await fetch(`http://localhost:${port}/api/edge-error-startup`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept:
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
},
|
||||
});
|
||||
validateResponseHeaders(res);
|
||||
|
||||
const { stdout, stderr } = await dev.kill('SIGTERM');
|
||||
|
||||
expect(await res.text()).toMatch(
|
||||
/<strong>500<\/strong>: INTERNAL_SERVER_ERROR/g
|
||||
);
|
||||
expect(stdout).toMatch(
|
||||
/Failed to instantiate edge runtime: intentional startup error/g
|
||||
);
|
||||
expect(stderr).toMatch(
|
||||
/Failed to complete request to \/api\/edge-error-startup: Error: socket hang up/g
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should handle syntax errors thrown in edge functions', async () => {
|
||||
const dir = fixture('edge-function-error');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
let res = await fetch(`http://localhost:${port}/api/edge-error-syntax`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept:
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
},
|
||||
});
|
||||
validateResponseHeaders(res);
|
||||
|
||||
const { stdout, stderr } = await dev.kill('SIGTERM');
|
||||
|
||||
expect(await res.text()).toMatch(
|
||||
/<strong>500<\/strong>: INTERNAL_SERVER_ERROR/g
|
||||
);
|
||||
expect(stdout).toMatch(
|
||||
/Failed to instantiate edge runtime: Module parse failed: Unexpected token/g
|
||||
);
|
||||
expect(stderr).toMatch(
|
||||
/Failed to complete request to \/api\/edge-error-syntax: Error: socket hang up/g
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should handle import errors thrown in edge functions', async () => {
|
||||
const dir = fixture('edge-function-error');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
let res = await fetch(
|
||||
`http://localhost:${port}/api/edge-error-unknown-import`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept:
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
},
|
||||
}
|
||||
);
|
||||
validateResponseHeaders(res);
|
||||
|
||||
const { stdout, stderr } = await dev.kill('SIGTERM');
|
||||
|
||||
expect(await res.text()).toMatch(
|
||||
/<strong>500<\/strong>: INTERNAL_SERVER_ERROR/g
|
||||
);
|
||||
expect(stdout).toMatch(
|
||||
/Failed to instantiate edge runtime: Code generation from strings disallowed for this context/g
|
||||
);
|
||||
expect(stderr).toMatch(
|
||||
/Failed to complete request to \/api\/edge-error-unknown-import: Error: socket hang up/g
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should handle import errors thrown in edge functions', async () => {
|
||||
const dir = fixture('edge-function-error');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
try {
|
||||
await readyResolver;
|
||||
|
||||
let res = await fetch(
|
||||
`http://localhost:${port}/api/edge-error-no-handler`,
|
||||
{
|
||||
method: 'GET',
|
||||
headers: {
|
||||
Accept:
|
||||
'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
|
||||
},
|
||||
}
|
||||
);
|
||||
validateResponseHeaders(res);
|
||||
|
||||
const { stdout, stderr } = await dev.kill('SIGTERM');
|
||||
|
||||
expect(await res.text()).toMatch(
|
||||
/<strong>500<\/strong>: INTERNAL_SERVER_ERROR/g
|
||||
);
|
||||
expect(stdout).toMatch(
|
||||
/No default export was found. Add a default export to handle requests./g
|
||||
);
|
||||
expect(stderr).toMatch(
|
||||
/Failed to complete request to \/api\/edge-error-no-handler: Error: socket hang up/g
|
||||
);
|
||||
} finally {
|
||||
await dev.kill('SIGTERM');
|
||||
}
|
||||
});
|
||||
|
||||
test('[vercel dev] should support request body', async () => {
|
||||
const dir = fixture('node-request-body');
|
||||
const { dev, port, readyResolver } = await testFixture(dir);
|
||||
|
||||
@@ -315,6 +315,14 @@ test(
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] 42-dynamic-esm-ext',
|
||||
testFixtureStdio('42-dynamic-esm-ext', async (testPath: any) => {
|
||||
await testPath(200, '/api/cjs/foo', 'found .js');
|
||||
await testPath(200, '/api/esm/foo', 'found .mjs');
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Use `@vercel/python` with Flask requirements.txt',
|
||||
testFixtureStdio('python-flask', async (testPath: any) => {
|
||||
@@ -425,3 +433,93 @@ test(
|
||||
await testPath(404, '/i-do-not-exist');
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware that returns a 200 response',
|
||||
testFixtureStdio('middleware-response', async (testPath: any) => {
|
||||
await testPath(200, '/', 'hi from middleware');
|
||||
await testPath(200, '/another', 'hi from middleware');
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware that does basic rewrite',
|
||||
testFixtureStdio('middleware-rewrite', async (testPath: any) => {
|
||||
await testPath(200, '/', '<h1>Index</h1>');
|
||||
await testPath(200, '/index', '<h1>Another</h1>');
|
||||
await testPath(200, '/another', '<h1>Another</h1>');
|
||||
await testPath(200, '/another.html', '<h1>Another</h1>');
|
||||
await testPath(200, '/foo', '<h1>Another</h1>');
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware that rewrites with custom query params',
|
||||
testFixtureStdio('middleware-rewrite-query', async (testPath: any) => {
|
||||
await testPath(200, '/?foo=bar', '{"url":"/?from-middleware=true"}');
|
||||
await testPath(
|
||||
200,
|
||||
'/another?foo=bar',
|
||||
'{"url":"/another?from-middleware=true"}'
|
||||
);
|
||||
await testPath(
|
||||
200,
|
||||
'/api/fn?foo=bar',
|
||||
'{"url":"/api/fn?from-middleware=true"}'
|
||||
);
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware that redirects',
|
||||
testFixtureStdio('middleware-redirect', async (testPath: any) => {
|
||||
await testPath(302, '/', null, {
|
||||
location: 'https://vercel.com/',
|
||||
});
|
||||
await testPath(302, '/home', null, {
|
||||
location: 'https://vercel.com/home',
|
||||
});
|
||||
await testPath(302, '/?foo=bar', null, {
|
||||
location: 'https://vercel.com/?foo=bar',
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware with error in function handler',
|
||||
testFixtureStdio('middleware-error-in-handler', async (testPath: any) => {
|
||||
await testPath(500, '/', /EDGE_FUNCTION_INVOCATION_FAILED/);
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware with error at init',
|
||||
testFixtureStdio('middleware-error-at-init', async (testPath: any) => {
|
||||
await testPath(500, '/', /EDGE_FUNCTION_INVOCATION_FAILED/);
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware with an explicit 500 response',
|
||||
testFixtureStdio('middleware-500-response', async (testPath: any) => {
|
||||
await testPath(500, '/', /EDGE_FUNCTION_INVOCATION_FAILED/);
|
||||
})
|
||||
);
|
||||
|
||||
test(
|
||||
'[vercel dev] Middleware with `matchers` config',
|
||||
testFixtureStdio(
|
||||
'middleware-matchers',
|
||||
async (testPath: any) => {
|
||||
// TODO: remove once latest `@vercel/node` is shipped to stable with `matchers` support (fails because `directoryListing`)
|
||||
//await testPath(404, '/');
|
||||
await testPath(404, '/another');
|
||||
await testPath(200, '/about/page', 'middleware response');
|
||||
await testPath(200, '/dashboard/home', 'middleware response');
|
||||
},
|
||||
{
|
||||
// TODO: remove once latest `@vercel/node` is shipped to stable with `matchers` support
|
||||
skipDeploy: true,
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
@@ -257,6 +257,10 @@ async function testFixture(directory, opts = {}, args = []) {
|
||||
dev.kill = async (...args) => {
|
||||
dev._kill(...args);
|
||||
await exitResolver;
|
||||
return {
|
||||
stdout,
|
||||
stderr,
|
||||
};
|
||||
};
|
||||
|
||||
return {
|
||||
|
||||
7
packages/cli/test/fixtures/unit/commands/build/middleware-with-matcher/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/middleware-with-matcher/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/middleware-with-matcher/index.html
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/middleware-with-matcher/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<h1>Vercel</h1>
|
||||
5
packages/cli/test/fixtures/unit/commands/build/middleware-with-matcher/middleware.js
vendored
Normal file
5
packages/cli/test/fixtures/unit/commands/build/middleware-with-matcher/middleware.js
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
export const config = {
|
||||
matcher: ['/about/:path*', '/dashboard/:path*'],
|
||||
};
|
||||
|
||||
export default () => new Response('middleware');
|
||||
@@ -1 +1 @@
|
||||
export default req => new Response('middleware');
|
||||
export default () => new Response('middleware');
|
||||
|
||||
7
packages/cli/test/fixtures/unit/commands/build/normalize-src/.vercel/project.json
vendored
Normal file
7
packages/cli/test/fixtures/unit/commands/build/normalize-src/.vercel/project.json
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"orgId": ".",
|
||||
"projectId": ".",
|
||||
"settings": {
|
||||
"framework": null
|
||||
}
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/commands/build/normalize-src/server.js
vendored
Normal file
1
packages/cli/test/fixtures/unit/commands/build/normalize-src/server.js
vendored
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = (req, res) => res.end('Vercel');
|
||||
15
packages/cli/test/fixtures/unit/commands/build/normalize-src/vercel.json
vendored
Normal file
15
packages/cli/test/fixtures/unit/commands/build/normalize-src/vercel.json
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{
|
||||
"src": "./server.js",
|
||||
"use": "@vercel/node"
|
||||
}
|
||||
],
|
||||
"routes": [
|
||||
{
|
||||
"src": "/(.*)",
|
||||
"dest": "/server.js"
|
||||
}
|
||||
]
|
||||
}
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/HEAD
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/HEAD
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
ref: refs/heads/master
|
||||
7
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/config
generated
vendored
Normal file
7
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/config
generated
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
[core]
|
||||
repositoryformatversion = 0
|
||||
filemode = true
|
||||
bare = false
|
||||
logallrefupdates = true
|
||||
ignorecase = true
|
||||
precomposeunicode = true
|
||||
1
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/description
generated
vendored
Normal file
1
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/description
generated
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Unnamed repository; edit this file 'description' to name the repository.
|
||||
15
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/applypatch-msg.sample
generated
vendored
Executable file
15
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/applypatch-msg.sample
generated
vendored
Executable file
@@ -0,0 +1,15 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to check the commit log message taken by
|
||||
# applypatch from an e-mail message.
|
||||
#
|
||||
# The hook should exit with non-zero status after issuing an
|
||||
# appropriate message if it wants to stop the commit. The hook is
|
||||
# allowed to edit the commit message file.
|
||||
#
|
||||
# To enable this hook, rename this file to "applypatch-msg".
|
||||
|
||||
. git-sh-setup
|
||||
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
|
||||
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
|
||||
:
|
||||
24
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/commit-msg.sample
generated
vendored
Executable file
24
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/commit-msg.sample
generated
vendored
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to check the commit log message.
|
||||
# Called by "git commit" with one argument, the name of the file
|
||||
# that has the commit message. The hook should exit with non-zero
|
||||
# status after issuing an appropriate message if it wants to stop the
|
||||
# commit. The hook is allowed to edit the commit message file.
|
||||
#
|
||||
# To enable this hook, rename this file to "commit-msg".
|
||||
|
||||
# Uncomment the below to add a Signed-off-by line to the message.
|
||||
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
|
||||
# hook is more suited to it.
|
||||
#
|
||||
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
|
||||
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
|
||||
|
||||
# This example catches duplicate Signed-off-by lines.
|
||||
|
||||
test "" = "$(grep '^Signed-off-by: ' "$1" |
|
||||
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
|
||||
echo >&2 Duplicate Signed-off-by lines.
|
||||
exit 1
|
||||
}
|
||||
173
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/fsmonitor-watchman.sample
generated
vendored
Executable file
173
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/fsmonitor-watchman.sample
generated
vendored
Executable file
@@ -0,0 +1,173 @@
|
||||
#!/usr/bin/perl
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use IPC::Open2;
|
||||
|
||||
# An example hook script to integrate Watchman
|
||||
# (https://facebook.github.io/watchman/) with git to speed up detecting
|
||||
# new and modified files.
|
||||
#
|
||||
# The hook is passed a version (currently 2) and last update token
|
||||
# formatted as a string and outputs to stdout a new update token and
|
||||
# all files that have been modified since the update token. Paths must
|
||||
# be relative to the root of the working tree and separated by a single NUL.
|
||||
#
|
||||
# To enable this hook, rename this file to "query-watchman" and set
|
||||
# 'git config core.fsmonitor .git/hooks/query-watchman'
|
||||
#
|
||||
my ($version, $last_update_token) = @ARGV;
|
||||
|
||||
# Uncomment for debugging
|
||||
# print STDERR "$0 $version $last_update_token\n";
|
||||
|
||||
# Check the hook interface version
|
||||
if ($version ne 2) {
|
||||
die "Unsupported query-fsmonitor hook version '$version'.\n" .
|
||||
"Falling back to scanning...\n";
|
||||
}
|
||||
|
||||
my $git_work_tree = get_working_dir();
|
||||
|
||||
my $retry = 1;
|
||||
|
||||
my $json_pkg;
|
||||
eval {
|
||||
require JSON::XS;
|
||||
$json_pkg = "JSON::XS";
|
||||
1;
|
||||
} or do {
|
||||
require JSON::PP;
|
||||
$json_pkg = "JSON::PP";
|
||||
};
|
||||
|
||||
launch_watchman();
|
||||
|
||||
sub launch_watchman {
|
||||
my $o = watchman_query();
|
||||
if (is_work_tree_watched($o)) {
|
||||
output_result($o->{clock}, @{$o->{files}});
|
||||
}
|
||||
}
|
||||
|
||||
sub output_result {
|
||||
my ($clockid, @files) = @_;
|
||||
|
||||
# Uncomment for debugging watchman output
|
||||
# open (my $fh, ">", ".git/watchman-output.out");
|
||||
# binmode $fh, ":utf8";
|
||||
# print $fh "$clockid\n@files\n";
|
||||
# close $fh;
|
||||
|
||||
binmode STDOUT, ":utf8";
|
||||
print $clockid;
|
||||
print "\0";
|
||||
local $, = "\0";
|
||||
print @files;
|
||||
}
|
||||
|
||||
sub watchman_clock {
|
||||
my $response = qx/watchman clock "$git_work_tree"/;
|
||||
die "Failed to get clock id on '$git_work_tree'.\n" .
|
||||
"Falling back to scanning...\n" if $? != 0;
|
||||
|
||||
return $json_pkg->new->utf8->decode($response);
|
||||
}
|
||||
|
||||
sub watchman_query {
|
||||
my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
|
||||
or die "open2() failed: $!\n" .
|
||||
"Falling back to scanning...\n";
|
||||
|
||||
# In the query expression below we're asking for names of files that
|
||||
# changed since $last_update_token but not from the .git folder.
|
||||
#
|
||||
# To accomplish this, we're using the "since" generator to use the
|
||||
# recency index to select candidate nodes and "fields" to limit the
|
||||
# output to file names only. Then we're using the "expression" term to
|
||||
# further constrain the results.
|
||||
if (substr($last_update_token, 0, 1) eq "c") {
|
||||
$last_update_token = "\"$last_update_token\"";
|
||||
}
|
||||
my $query = <<" END";
|
||||
["query", "$git_work_tree", {
|
||||
"since": $last_update_token,
|
||||
"fields": ["name"],
|
||||
"expression": ["not", ["dirname", ".git"]]
|
||||
}]
|
||||
END
|
||||
|
||||
# Uncomment for debugging the watchman query
|
||||
# open (my $fh, ">", ".git/watchman-query.json");
|
||||
# print $fh $query;
|
||||
# close $fh;
|
||||
|
||||
print CHLD_IN $query;
|
||||
close CHLD_IN;
|
||||
my $response = do {local $/; <CHLD_OUT>};
|
||||
|
||||
# Uncomment for debugging the watch response
|
||||
# open ($fh, ">", ".git/watchman-response.json");
|
||||
# print $fh $response;
|
||||
# close $fh;
|
||||
|
||||
die "Watchman: command returned no output.\n" .
|
||||
"Falling back to scanning...\n" if $response eq "";
|
||||
die "Watchman: command returned invalid output: $response\n" .
|
||||
"Falling back to scanning...\n" unless $response =~ /^\{/;
|
||||
|
||||
return $json_pkg->new->utf8->decode($response);
|
||||
}
|
||||
|
||||
sub is_work_tree_watched {
|
||||
my ($output) = @_;
|
||||
my $error = $output->{error};
|
||||
if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
|
||||
$retry--;
|
||||
my $response = qx/watchman watch "$git_work_tree"/;
|
||||
die "Failed to make watchman watch '$git_work_tree'.\n" .
|
||||
"Falling back to scanning...\n" if $? != 0;
|
||||
$output = $json_pkg->new->utf8->decode($response);
|
||||
$error = $output->{error};
|
||||
die "Watchman: $error.\n" .
|
||||
"Falling back to scanning...\n" if $error;
|
||||
|
||||
# Uncomment for debugging watchman output
|
||||
# open (my $fh, ">", ".git/watchman-output.out");
|
||||
# close $fh;
|
||||
|
||||
# Watchman will always return all files on the first query so
|
||||
# return the fast "everything is dirty" flag to git and do the
|
||||
# Watchman query just to get it over with now so we won't pay
|
||||
# the cost in git to look up each individual file.
|
||||
my $o = watchman_clock();
|
||||
$error = $output->{error};
|
||||
|
||||
die "Watchman: $error.\n" .
|
||||
"Falling back to scanning...\n" if $error;
|
||||
|
||||
output_result($o->{clock}, ("/"));
|
||||
$last_update_token = $o->{clock};
|
||||
|
||||
eval { launch_watchman() };
|
||||
return 0;
|
||||
}
|
||||
|
||||
die "Watchman: $error.\n" .
|
||||
"Falling back to scanning...\n" if $error;
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
sub get_working_dir {
|
||||
my $working_dir;
|
||||
if ($^O =~ 'msys' || $^O =~ 'cygwin') {
|
||||
$working_dir = Win32::GetCwd();
|
||||
$working_dir =~ tr/\\/\//;
|
||||
} else {
|
||||
require Cwd;
|
||||
$working_dir = Cwd::cwd();
|
||||
}
|
||||
|
||||
return $working_dir;
|
||||
}
|
||||
8
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/post-update.sample
generated
vendored
Executable file
8
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/post-update.sample
generated
vendored
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to prepare a packed repository for use over
|
||||
# dumb transports.
|
||||
#
|
||||
# To enable this hook, rename this file to "post-update".
|
||||
|
||||
exec git update-server-info
|
||||
14
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-applypatch.sample
generated
vendored
Executable file
14
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-applypatch.sample
generated
vendored
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to verify what is about to be committed
|
||||
# by applypatch from an e-mail message.
|
||||
#
|
||||
# The hook should exit with non-zero status after issuing an
|
||||
# appropriate message if it wants to stop the commit.
|
||||
#
|
||||
# To enable this hook, rename this file to "pre-applypatch".
|
||||
|
||||
. git-sh-setup
|
||||
precommit="$(git rev-parse --git-path hooks/pre-commit)"
|
||||
test -x "$precommit" && exec "$precommit" ${1+"$@"}
|
||||
:
|
||||
49
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-commit.sample
generated
vendored
Executable file
49
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-commit.sample
generated
vendored
Executable file
@@ -0,0 +1,49 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to verify what is about to be committed.
|
||||
# Called by "git commit" with no arguments. The hook should
|
||||
# exit with non-zero status after issuing an appropriate message if
|
||||
# it wants to stop the commit.
|
||||
#
|
||||
# To enable this hook, rename this file to "pre-commit".
|
||||
|
||||
if git rev-parse --verify HEAD >/dev/null 2>&1
|
||||
then
|
||||
against=HEAD
|
||||
else
|
||||
# Initial commit: diff against an empty tree object
|
||||
against=$(git hash-object -t tree /dev/null)
|
||||
fi
|
||||
|
||||
# If you want to allow non-ASCII filenames set this variable to true.
|
||||
allownonascii=$(git config --type=bool hooks.allownonascii)
|
||||
|
||||
# Redirect output to stderr.
|
||||
exec 1>&2
|
||||
|
||||
# Cross platform projects tend to avoid non-ASCII filenames; prevent
|
||||
# them from being added to the repository. We exploit the fact that the
|
||||
# printable range starts at the space character and ends with tilde.
|
||||
if [ "$allownonascii" != "true" ] &&
|
||||
# Note that the use of brackets around a tr range is ok here, (it's
|
||||
# even required, for portability to Solaris 10's /usr/bin/tr), since
|
||||
# the square bracket bytes happen to fall in the designated range.
|
||||
test $(git diff --cached --name-only --diff-filter=A -z $against |
|
||||
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
|
||||
then
|
||||
cat <<\EOF
|
||||
Error: Attempt to add a non-ASCII file name.
|
||||
|
||||
This can cause problems if you want to work with people on other platforms.
|
||||
|
||||
To be portable it is advisable to rename the file.
|
||||
|
||||
If you know what you are doing you can disable this check using:
|
||||
|
||||
git config hooks.allownonascii true
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# If there are whitespace errors, print the offending file names and fail.
|
||||
exec git diff-index --check --cached $against --
|
||||
13
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-merge-commit.sample
generated
vendored
Executable file
13
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-merge-commit.sample
generated
vendored
Executable file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# An example hook script to verify what is about to be committed.
|
||||
# Called by "git merge" with no arguments. The hook should
|
||||
# exit with non-zero status after issuing an appropriate message to
|
||||
# stderr if it wants to stop the merge commit.
|
||||
#
|
||||
# To enable this hook, rename this file to "pre-merge-commit".
|
||||
|
||||
. git-sh-setup
|
||||
test -x "$GIT_DIR/hooks/pre-commit" &&
|
||||
exec "$GIT_DIR/hooks/pre-commit"
|
||||
:
|
||||
53
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-push.sample
generated
vendored
Executable file
53
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-push.sample
generated
vendored
Executable file
@@ -0,0 +1,53 @@
|
||||
#!/bin/sh
|
||||
|
||||
# An example hook script to verify what is about to be pushed. Called by "git
|
||||
# push" after it has checked the remote status, but before anything has been
|
||||
# pushed. If this script exits with a non-zero status nothing will be pushed.
|
||||
#
|
||||
# This hook is called with the following parameters:
|
||||
#
|
||||
# $1 -- Name of the remote to which the push is being done
|
||||
# $2 -- URL to which the push is being done
|
||||
#
|
||||
# If pushing without using a named remote those arguments will be equal.
|
||||
#
|
||||
# Information about the commits which are being pushed is supplied as lines to
|
||||
# the standard input in the form:
|
||||
#
|
||||
# <local ref> <local oid> <remote ref> <remote oid>
|
||||
#
|
||||
# This sample shows how to prevent push of commits where the log message starts
|
||||
# with "WIP" (work in progress).
|
||||
|
||||
remote="$1"
|
||||
url="$2"
|
||||
|
||||
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
|
||||
|
||||
while read local_ref local_oid remote_ref remote_oid
|
||||
do
|
||||
if test "$local_oid" = "$zero"
|
||||
then
|
||||
# Handle delete
|
||||
:
|
||||
else
|
||||
if test "$remote_oid" = "$zero"
|
||||
then
|
||||
# New branch, examine all commits
|
||||
range="$local_oid"
|
||||
else
|
||||
# Update to existing branch, examine new commits
|
||||
range="$remote_oid..$local_oid"
|
||||
fi
|
||||
|
||||
# Check for WIP commit
|
||||
commit=$(git rev-list -n 1 --grep '^WIP' "$range")
|
||||
if test -n "$commit"
|
||||
then
|
||||
echo >&2 "Found WIP commit in $local_ref, not pushing"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
exit 0
|
||||
169
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-rebase.sample
generated
vendored
Executable file
169
packages/cli/test/fixtures/unit/create-git-meta/dirty/git/hooks/pre-rebase.sample
generated
vendored
Executable file
@@ -0,0 +1,169 @@
|
||||
#!/bin/sh
|
||||
#
|
||||
# Copyright (c) 2006, 2008 Junio C Hamano
|
||||
#
|
||||
# The "pre-rebase" hook is run just before "git rebase" starts doing
|
||||
# its job, and can prevent the command from running by exiting with
|
||||
# non-zero status.
|
||||
#
|
||||
# The hook is called with the following parameters:
|
||||
#
|
||||
# $1 -- the upstream the series was forked from.
|
||||
# $2 -- the branch being rebased (or empty when rebasing the current branch).
|
||||
#
|
||||
# This sample shows how to prevent topic branches that are already
|
||||
# merged to 'next' branch from getting rebased, because allowing it
|
||||
# would result in rebasing already published history.
|
||||
|
||||
publish=next
|
||||
basebranch="$1"
|
||||
if test "$#" = 2
|
||||
then
|
||||
topic="refs/heads/$2"
|
||||
else
|
||||
topic=`git symbolic-ref HEAD` ||
|
||||
exit 0 ;# we do not interrupt rebasing detached HEAD
|
||||
fi
|
||||
|
||||
case "$topic" in
|
||||
refs/heads/??/*)
|
||||
;;
|
||||
*)
|
||||
exit 0 ;# we do not interrupt others.
|
||||
;;
|
||||
esac
|
||||
|
||||
# Now we are dealing with a topic branch being rebased
|
||||
# on top of master. Is it OK to rebase it?
|
||||
|
||||
# Does the topic really exist?
|
||||
git show-ref -q "$topic" || {
|
||||
echo >&2 "No such branch $topic"
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Is topic fully merged to master?
|
||||
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
|
||||
if test -z "$not_in_master"
|
||||
then
|
||||
echo >&2 "$topic is fully merged to master; better remove it."
|
||||
exit 1 ;# we could allow it, but there is no point.
|
||||
fi
|
||||
|
||||
# Is topic ever merged to next? If so you should not be rebasing it.
|
||||
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
|
||||
only_next_2=`git rev-list ^master ${publish} | sort`
|
||||
if test "$only_next_1" = "$only_next_2"
|
||||
then
|
||||
not_in_topic=`git rev-list "^$topic" master`
|
||||
if test -z "$not_in_topic"
|
||||
then
|
||||
echo >&2 "$topic is already up to date with master"
|
||||
exit 1 ;# we could allow it, but there is no point.
|
||||
else
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
|
||||
/usr/bin/perl -e '
|
||||
my $topic = $ARGV[0];
|
||||
my $msg = "* $topic has commits already merged to public branch:\n";
|
||||
my (%not_in_next) = map {
|
||||
/^([0-9a-f]+) /;
|
||||
($1 => 1);
|
||||
} split(/\n/, $ARGV[1]);
|
||||
for my $elem (map {
|
||||
/^([0-9a-f]+) (.*)$/;
|
||||
[$1 => $2];
|
||||
} split(/\n/, $ARGV[2])) {
|
||||
if (!exists $not_in_next{$elem->[0]}) {
|
||||
if ($msg) {
|
||||
print STDERR $msg;
|
||||
undef $msg;
|
||||
}
|
||||
print STDERR " $elem->[1]\n";
|
||||
}
|
||||
}
|
||||
' "$topic" "$not_in_next" "$not_in_master"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
<<\DOC_END
|
||||
|
||||
This sample hook safeguards topic branches that have been
|
||||
published from being rewound.
|
||||
|
||||
The workflow assumed here is:
|
||||
|
||||
* Once a topic branch forks from "master", "master" is never
|
||||
merged into it again (either directly or indirectly).
|
||||
|
||||
* Once a topic branch is fully cooked and merged into "master",
|
||||
it is deleted. If you need to build on top of it to correct
|
||||
earlier mistakes, a new topic branch is created by forking at
|
||||
the tip of the "master". This is not strictly necessary, but
|
||||
it makes it easier to keep your history simple.
|
||||
|
||||
* Whenever you need to test or publish your changes to topic
|
||||
branches, merge them into "next" branch.
|
||||
|
||||
The script, being an example, hardcodes the publish branch name
|
||||
to be "next", but it is trivial to make it configurable via
|
||||
$GIT_DIR/config mechanism.
|
||||
|
||||
With this workflow, you would want to know:
|
||||
|
||||
(1) ... if a topic branch has ever been merged to "next". Young
|
||||
topic branches can have stupid mistakes you would rather
|
||||
clean up before publishing, and things that have not been
|
||||
merged into other branches can be easily rebased without
|
||||
affecting other people. But once it is published, you would
|
||||
not want to rewind it.
|
||||
|
||||
(2) ... if a topic branch has been fully merged to "master".
|
||||
Then you can delete it. More importantly, you should not
|
||||
build on top of it -- other people may already want to
|
||||
change things related to the topic as patches against your
|
||||
"master", so if you need further changes, it is better to
|
||||
fork the topic (perhaps with the same name) afresh from the
|
||||
tip of "master".
|
||||
|
||||
Let's look at this example:
|
||||
|
||||
o---o---o---o---o---o---o---o---o---o "next"
|
||||
/ / / /
|
||||
/ a---a---b A / /
|
||||
/ / / /
|
||||
/ / c---c---c---c B /
|
||||
/ / / \ /
|
||||
/ / / b---b C \ /
|
||||
/ / / / \ /
|
||||
---o---o---o---o---o---o---o---o---o---o---o "master"
|
||||
|
||||
|
||||
A, B and C are topic branches.
|
||||
|
||||
* A has one fix since it was merged up to "next".
|
||||
|
||||
* B has finished. It has been fully merged up to "master" and "next",
|
||||
and is ready to be deleted.
|
||||
|
||||
* C has not merged to "next" at all.
|
||||
|
||||
We would want to allow C to be rebased, refuse A, and encourage
|
||||
B to be deleted.
|
||||
|
||||
To compute (1):
|
||||
|
||||
git rev-list ^master ^topic next
|
||||
git rev-list ^master next
|
||||
|
||||
if these match, topic has not merged in next at all.
|
||||
|
||||
To compute (2):
|
||||
|
||||
git rev-list master..topic
|
||||
|
||||
if this is empty, it is fully merged to "master".
|
||||
|
||||
DOC_END
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user