Compare commits

..

4 Commits

Author SHA1 Message Date
Chris Barber
7082da8451 Publish Stable
- vercel@28.20.0
 - @vercel/client@12.4.11
 - @vercel/gatsby-plugin-vercel-analytics@1.0.10
 - @vercel/gatsby-plugin-vercel-builder@1.2.10
 - @vercel/node@2.12.0
 - @vercel/static-build@1.3.25
2023-04-20 09:19:59 -05:00
Chris
20a7b2f2d4 [all] Update naming for Next.js Analytics (#9835)
Updates the name of Next.js Analytics to Next.js Speed Insights

closes ALY-484
2023-04-20 10:07:41 -04:00
Gary Borton
a5c3cbcd45 Allow passing of local git meta even with no remote. (#9824)
This enables the CLI to pass locally detected git meta data even when there is no available remote. It requires a corresponding internal change, which is blocking the e2e tests.
2023-04-20 13:11:17 +00:00
Kiko Beats
6dded87426 [node] Add streaming support for vc dev (#9745)
Until now, the user code response it's buffered and serialized. This is
mismatching how Vercel works these days.

This PR enables streaming response in `vc dev` for Edge/Serverless.

As part of the implementation, the `node-bridge` which spawns a process
to consume the user code is not necessary anymore.

Some necessary files (like HTTP server helpers) have been moved to live
in node builder package instead.

---------

Co-authored-by: Ethan Arrowood <ethan.arrowood@vercel.com>
Co-authored-by: Sean Massa <EndangeredMassa@gmail.com>
2023-04-19 23:56:41 +02:00
80 changed files with 1434 additions and 2835 deletions

View File

@@ -26,12 +26,6 @@ packages/hydrogen/edge-entry.js
packages/next/test/integration/middleware
packages/next/test/integration/middleware-eval
# node-bridge
packages/node-bridge/bridge.js
packages/node-bridge/launcher.js
packages/node-bridge/helpers.js
packages/node-bridge/source-map-support.js
# middleware
packages/middleware/src/entries.js

2
.github/CODEOWNERS vendored
View File

@@ -4,7 +4,6 @@
* @TooTallNate @EndangeredMassa @styfle @cb1kenobi @Ethan-Arrowood
/.github/workflows @TooTallNate @EndangeredMassa @styfle @cb1kenobi @Ethan-Arrowood @ijjk
/packages/fs-detectors @TooTallNate @EndangeredMassa @styfle @cb1kenobi @Ethan-Arrowood @agadzik @chloetedder
/packages/node-bridge @TooTallNate @EndangeredMassa @styfle @cb1kenobi @Ethan-Arrowood @ijjk
/packages/next @TooTallNate @EndangeredMassa @styfle @cb1kenobi @Ethan-Arrowood @ijjk
/packages/routing-utils @TooTallNate @EndangeredMassa @styfle @cb1kenobi @Ethan-Arrowood @ijjk
/packages/edge @vercel/edge-compute
@@ -14,3 +13,4 @@
/examples/hugo @styfle
/examples/jekyll @styfle
/examples/zola @styfle
/packages/node @Kikobeats

View File

@@ -32,7 +32,7 @@ export function sendToAnalytics(metric, options) {
};
if (options.debug) {
console.log("[Analytics]", metric.name, JSON.stringify(body, null, 2));
console.log("[Web Vitals]", metric.name, JSON.stringify(body, null, 2));
}
const blob = new Blob([new URLSearchParams(body).toString()], {
@@ -61,6 +61,6 @@ export function webVitals(options) {
onCLS((metric) => sendToAnalytics(metric, options));
onFCP((metric) => sendToAnalytics(metric, options));
} catch (err) {
console.error("[Analytics]", err);
console.error("[Web Vitals]", err);
}
}

View File

@@ -32,7 +32,7 @@ function sendToAnalytics(metric, options) {
};
if (options.debug) {
console.log('[Analytics]', metric.name, JSON.stringify(body, null, 2));
console.log('[Web Vitals]', metric.name, JSON.stringify(body, null, 2));
}
const blob = new Blob([new URLSearchParams(body).toString()], {
@@ -61,6 +61,6 @@ export function webVitals(options) {
getCLS((metric) => sendToAnalytics(metric, options));
getFCP((metric) => sendToAnalytics(metric, options));
} catch (err) {
console.error('[Analytics]', err);
console.error('[Web Vitals]', err);
}
}

View File

@@ -418,7 +418,7 @@ export interface GitMetadata {
commitRef?: string | undefined;
commitSha?: string | undefined;
dirty?: boolean | undefined;
remoteUrl: string;
remoteUrl?: string;
}
/**

View File

@@ -1,6 +1,6 @@
{
"name": "vercel",
"version": "28.19.0",
"version": "28.20.0",
"preferGlobal": true,
"license": "Apache-2.0",
"description": "The command-line interface for Vercel",
@@ -36,12 +36,12 @@
"@vercel/go": "2.5.0",
"@vercel/hydrogen": "0.0.63",
"@vercel/next": "3.7.5",
"@vercel/node": "2.11.0",
"@vercel/node": "2.12.0",
"@vercel/python": "3.1.59",
"@vercel/redwood": "1.1.14",
"@vercel/remix-builder": "1.8.5",
"@vercel/ruby": "1.3.75",
"@vercel/static-build": "1.3.24"
"@vercel/static-build": "1.3.25"
},
"devDependencies": {
"@alex_neo/jest-expect-message": "1.0.5",
@@ -87,7 +87,7 @@
"@vercel-internals/constants": "*",
"@vercel-internals/get-package-json": "*",
"@vercel-internals/types": "*",
"@vercel/client": "12.4.10",
"@vercel/client": "12.4.11",
"@vercel/error-utils": "1.0.10",
"@vercel/frameworks": "1.3.4",
"@vercel/fs-detectors": "3.8.11",

View File

@@ -252,7 +252,7 @@ export default async function main(client: Client): Promise<number> {
output.debug(`Loaded environment variables from "${envPath}"`);
}
// For Vercel Analytics support
// For Vercel Speed Insights support
if (project.settings.analyticsId) {
envToUnset.add('VERCEL_ANALYTICS_ID');
process.env.VERCEL_ANALYTICS_ID = project.settings.analyticsId;

View File

@@ -35,10 +35,6 @@ export async function createGitMeta(
if (!remoteUrl) {
remoteUrl = await getOriginUrl(join(directory, '.git/config'), output);
}
// If we can't get the repo URL, then don't return any metadata
if (!remoteUrl) {
return;
}
const [commitResult, dirtyResult] = await Promise.allSettled([
getLastCommit(directory),
@@ -63,7 +59,7 @@ export async function createGitMeta(
const commit = commitResult.value;
return {
remoteUrl,
remoteUrl: remoteUrl || undefined,
commitAuthorName: commit.author.name,
commitMessage: commit.subject,
commitRef: commit.branch,

View File

@@ -10,7 +10,7 @@ export default async function edge(request, event) {
return new Response(
JSON.stringify({
headerContentType: request.headers.get('content-type'),
headers: Object.fromEntries(request.headers),
url: request.url,
method: request.method,
body: requestBody,

View File

@@ -1,3 +1,19 @@
module.exports = (req, res) => {
res.send(req.body);
const rawBody = stream =>
new Promise((resolve, reject) => {
const chunks = []
let bytes = 0
stream
.on('error', reject)
.on('end', () => resolve(Buffer.concat(chunks, bytes)))
.on('data', chunk => {
chunks.push(chunk)
bytes += chunk.length
})
})
module.exports = async (req, res) => {
res.json({
body: req.body,
readBody: JSON.parse((await rawBody(req)).toString())
})
};

View File

@@ -0,0 +1,7 @@
module.exports = (req, res) => {
res.json({
url: req.url,
method: req.method,
headers: req.headers,
})
}

View File

@@ -0,0 +1,3 @@
{
"private": true
}

View File

@@ -15,6 +15,22 @@ const {
validateResponseHeaders,
} = require('./utils.js');
test('[verdel dev] should support serverless functions', async () => {
const dir = fixture('serverless-function');
const { dev, port, readyResolver } = await testFixture(dir, {});
try {
await readyResolver;
let res = await fetch(`http://localhost:${port}/api?foo=bar`);
validateResponseHeaders(res);
const payload = await res.json();
expect(payload).toMatchObject({ url: '/api?foo=bar', method: 'GET' });
expect(payload.headers.host).toBe(payload.headers['x-forwarded-host']);
} finally {
await dev.kill();
}
});
test('[vercel dev] should support edge functions', async () => {
const dir = fixture('edge-function');
const { dev, port, readyResolver } = await testFixture(dir, {
@@ -39,8 +55,9 @@ test('[vercel dev] should support edge functions', async () => {
// support for edge functions has to manually ensure that these properties
// are set up; so, we test that they are all passed through properly
expect(await res.json()).toMatchObject({
headerContentType: 'application/json',
const payload = await res.json();
expect(payload).toMatchObject({
headers: { 'content-type': 'application/json' },
url: `http://localhost:${port}/api/edge-success`,
method: 'POST',
body: '{"hello":"world"}',
@@ -49,6 +66,7 @@ test('[vercel dev] should support edge functions', async () => {
optionalChaining: 'fallback',
ENV_VAR_IN_EDGE: '1',
});
expect(payload.headers.host).toBe(payload.headers['x-forwarded-host']);
} finally {
await dev.kill();
}
@@ -364,7 +382,7 @@ test('[vercel dev] should support request body', async () => {
body: JSON.stringify(body),
});
validateResponseHeaders(res);
expect(await res.json()).toMatchObject(body);
expect(await res.json()).toMatchObject({ body, readBody: body });
// Test that `req` "data" events work in dev
res = await fetch(`http://localhost:${port}/api/data-events`, {

View File

@@ -0,0 +1 @@
hi

View File

@@ -0,0 +1 @@
ref: refs/heads/master

View File

@@ -0,0 +1 @@
Unnamed repository; edit this file 'description' to name the repository.

View File

@@ -0,0 +1,15 @@
#!/bin/sh
#
# An example hook script to check the commit log message taken by
# applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit. The hook is
# allowed to edit the commit message file.
#
# To enable this hook, rename this file to "applypatch-msg".
. git-sh-setup
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
:

View File

@@ -0,0 +1,24 @@
#!/bin/sh
#
# An example hook script to check the commit log message.
# Called by "git commit" with one argument, the name of the file
# that has the commit message. The hook should exit with non-zero
# status after issuing an appropriate message if it wants to stop the
# commit. The hook is allowed to edit the commit message file.
#
# To enable this hook, rename this file to "commit-msg".
# Uncomment the below to add a Signed-off-by line to the message.
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
# hook is more suited to it.
#
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
# This example catches duplicate Signed-off-by lines.
test "" = "$(grep '^Signed-off-by: ' "$1" |
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
echo >&2 Duplicate Signed-off-by lines.
exit 1
}

View File

@@ -0,0 +1,173 @@
#!/usr/bin/perl
use strict;
use warnings;
use IPC::Open2;
# An example hook script to integrate Watchman
# (https://facebook.github.io/watchman/) with git to speed up detecting
# new and modified files.
#
# The hook is passed a version (currently 2) and last update token
# formatted as a string and outputs to stdout a new update token and
# all files that have been modified since the update token. Paths must
# be relative to the root of the working tree and separated by a single NUL.
#
# To enable this hook, rename this file to "query-watchman" and set
# 'git config core.fsmonitor .git/hooks/query-watchman'
#
my ($version, $last_update_token) = @ARGV;
# Uncomment for debugging
# print STDERR "$0 $version $last_update_token\n";
# Check the hook interface version
if ($version ne 2) {
die "Unsupported query-fsmonitor hook version '$version'.\n" .
"Falling back to scanning...\n";
}
my $git_work_tree = get_working_dir();
my $retry = 1;
my $json_pkg;
eval {
require JSON::XS;
$json_pkg = "JSON::XS";
1;
} or do {
require JSON::PP;
$json_pkg = "JSON::PP";
};
launch_watchman();
sub launch_watchman {
my $o = watchman_query();
if (is_work_tree_watched($o)) {
output_result($o->{clock}, @{$o->{files}});
}
}
sub output_result {
my ($clockid, @files) = @_;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# binmode $fh, ":utf8";
# print $fh "$clockid\n@files\n";
# close $fh;
binmode STDOUT, ":utf8";
print $clockid;
print "\0";
local $, = "\0";
print @files;
}
sub watchman_clock {
my $response = qx/watchman clock "$git_work_tree"/;
die "Failed to get clock id on '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
return $json_pkg->new->utf8->decode($response);
}
sub watchman_query {
my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
or die "open2() failed: $!\n" .
"Falling back to scanning...\n";
# In the query expression below we're asking for names of files that
# changed since $last_update_token but not from the .git folder.
#
# To accomplish this, we're using the "since" generator to use the
# recency index to select candidate nodes and "fields" to limit the
# output to file names only. Then we're using the "expression" term to
# further constrain the results.
if (substr($last_update_token, 0, 1) eq "c") {
$last_update_token = "\"$last_update_token\"";
}
my $query = <<" END";
["query", "$git_work_tree", {
"since": $last_update_token,
"fields": ["name"],
"expression": ["not", ["dirname", ".git"]]
}]
END
# Uncomment for debugging the watchman query
# open (my $fh, ">", ".git/watchman-query.json");
# print $fh $query;
# close $fh;
print CHLD_IN $query;
close CHLD_IN;
my $response = do {local $/; <CHLD_OUT>};
# Uncomment for debugging the watch response
# open ($fh, ">", ".git/watchman-response.json");
# print $fh $response;
# close $fh;
die "Watchman: command returned no output.\n" .
"Falling back to scanning...\n" if $response eq "";
die "Watchman: command returned invalid output: $response\n" .
"Falling back to scanning...\n" unless $response =~ /^\{/;
return $json_pkg->new->utf8->decode($response);
}
sub is_work_tree_watched {
my ($output) = @_;
my $error = $output->{error};
if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
$retry--;
my $response = qx/watchman watch "$git_work_tree"/;
die "Failed to make watchman watch '$git_work_tree'.\n" .
"Falling back to scanning...\n" if $? != 0;
$output = $json_pkg->new->utf8->decode($response);
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
# Uncomment for debugging watchman output
# open (my $fh, ">", ".git/watchman-output.out");
# close $fh;
# Watchman will always return all files on the first query so
# return the fast "everything is dirty" flag to git and do the
# Watchman query just to get it over with now so we won't pay
# the cost in git to look up each individual file.
my $o = watchman_clock();
$error = $output->{error};
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
output_result($o->{clock}, ("/"));
$last_update_token = $o->{clock};
eval { launch_watchman() };
return 0;
}
die "Watchman: $error.\n" .
"Falling back to scanning...\n" if $error;
return 1;
}
sub get_working_dir {
my $working_dir;
if ($^O =~ 'msys' || $^O =~ 'cygwin') {
$working_dir = Win32::GetCwd();
$working_dir =~ tr/\\/\//;
} else {
require Cwd;
$working_dir = Cwd::cwd();
}
return $working_dir;
}

View File

@@ -0,0 +1,8 @@
#!/bin/sh
#
# An example hook script to prepare a packed repository for use over
# dumb transports.
#
# To enable this hook, rename this file to "post-update".
exec git update-server-info

View File

@@ -0,0 +1,14 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed
# by applypatch from an e-mail message.
#
# The hook should exit with non-zero status after issuing an
# appropriate message if it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-applypatch".
. git-sh-setup
precommit="$(git rev-parse --git-path hooks/pre-commit)"
test -x "$precommit" && exec "$precommit" ${1+"$@"}
:

View File

@@ -0,0 +1,49 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git commit" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message if
# it wants to stop the commit.
#
# To enable this hook, rename this file to "pre-commit".
if git rev-parse --verify HEAD >/dev/null 2>&1
then
against=HEAD
else
# Initial commit: diff against an empty tree object
against=$(git hash-object -t tree /dev/null)
fi
# If you want to allow non-ASCII filenames set this variable to true.
allownonascii=$(git config --type=bool hooks.allownonascii)
# Redirect output to stderr.
exec 1>&2
# Cross platform projects tend to avoid non-ASCII filenames; prevent
# them from being added to the repository. We exploit the fact that the
# printable range starts at the space character and ends with tilde.
if [ "$allownonascii" != "true" ] &&
# Note that the use of brackets around a tr range is ok here, (it's
# even required, for portability to Solaris 10's /usr/bin/tr), since
# the square bracket bytes happen to fall in the designated range.
test $(git diff --cached --name-only --diff-filter=A -z $against |
LC_ALL=C tr -d '[ -~]\0' | wc -c) != 0
then
cat <<\EOF
Error: Attempt to add a non-ASCII file name.
This can cause problems if you want to work with people on other platforms.
To be portable it is advisable to rename the file.
If you know what you are doing you can disable this check using:
git config hooks.allownonascii true
EOF
exit 1
fi
# If there are whitespace errors, print the offending file names and fail.
exec git diff-index --check --cached $against --

View File

@@ -0,0 +1,13 @@
#!/bin/sh
#
# An example hook script to verify what is about to be committed.
# Called by "git merge" with no arguments. The hook should
# exit with non-zero status after issuing an appropriate message to
# stderr if it wants to stop the merge commit.
#
# To enable this hook, rename this file to "pre-merge-commit".
. git-sh-setup
test -x "$GIT_DIR/hooks/pre-commit" &&
exec "$GIT_DIR/hooks/pre-commit"
:

View File

@@ -0,0 +1,53 @@
#!/bin/sh
# An example hook script to verify what is about to be pushed. Called by "git
# push" after it has checked the remote status, but before anything has been
# pushed. If this script exits with a non-zero status nothing will be pushed.
#
# This hook is called with the following parameters:
#
# $1 -- Name of the remote to which the push is being done
# $2 -- URL to which the push is being done
#
# If pushing without using a named remote those arguments will be equal.
#
# Information about the commits which are being pushed is supplied as lines to
# the standard input in the form:
#
# <local ref> <local oid> <remote ref> <remote oid>
#
# This sample shows how to prevent push of commits where the log message starts
# with "WIP" (work in progress).
remote="$1"
url="$2"
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
while read local_ref local_oid remote_ref remote_oid
do
if test "$local_oid" = "$zero"
then
# Handle delete
:
else
if test "$remote_oid" = "$zero"
then
# New branch, examine all commits
range="$local_oid"
else
# Update to existing branch, examine new commits
range="$remote_oid..$local_oid"
fi
# Check for WIP commit
commit=$(git rev-list -n 1 --grep '^WIP' "$range")
if test -n "$commit"
then
echo >&2 "Found WIP commit in $local_ref, not pushing"
exit 1
fi
fi
done
exit 0

View File

@@ -0,0 +1,169 @@
#!/bin/sh
#
# Copyright (c) 2006, 2008 Junio C Hamano
#
# The "pre-rebase" hook is run just before "git rebase" starts doing
# its job, and can prevent the command from running by exiting with
# non-zero status.
#
# The hook is called with the following parameters:
#
# $1 -- the upstream the series was forked from.
# $2 -- the branch being rebased (or empty when rebasing the current branch).
#
# This sample shows how to prevent topic branches that are already
# merged to 'next' branch from getting rebased, because allowing it
# would result in rebasing already published history.
publish=next
basebranch="$1"
if test "$#" = 2
then
topic="refs/heads/$2"
else
topic=`git symbolic-ref HEAD` ||
exit 0 ;# we do not interrupt rebasing detached HEAD
fi
case "$topic" in
refs/heads/??/*)
;;
*)
exit 0 ;# we do not interrupt others.
;;
esac
# Now we are dealing with a topic branch being rebased
# on top of master. Is it OK to rebase it?
# Does the topic really exist?
git show-ref -q "$topic" || {
echo >&2 "No such branch $topic"
exit 1
}
# Is topic fully merged to master?
not_in_master=`git rev-list --pretty=oneline ^master "$topic"`
if test -z "$not_in_master"
then
echo >&2 "$topic is fully merged to master; better remove it."
exit 1 ;# we could allow it, but there is no point.
fi
# Is topic ever merged to next? If so you should not be rebasing it.
only_next_1=`git rev-list ^master "^$topic" ${publish} | sort`
only_next_2=`git rev-list ^master ${publish} | sort`
if test "$only_next_1" = "$only_next_2"
then
not_in_topic=`git rev-list "^$topic" master`
if test -z "$not_in_topic"
then
echo >&2 "$topic is already up to date with master"
exit 1 ;# we could allow it, but there is no point.
else
exit 0
fi
else
not_in_next=`git rev-list --pretty=oneline ^${publish} "$topic"`
/usr/bin/perl -e '
my $topic = $ARGV[0];
my $msg = "* $topic has commits already merged to public branch:\n";
my (%not_in_next) = map {
/^([0-9a-f]+) /;
($1 => 1);
} split(/\n/, $ARGV[1]);
for my $elem (map {
/^([0-9a-f]+) (.*)$/;
[$1 => $2];
} split(/\n/, $ARGV[2])) {
if (!exists $not_in_next{$elem->[0]}) {
if ($msg) {
print STDERR $msg;
undef $msg;
}
print STDERR " $elem->[1]\n";
}
}
' "$topic" "$not_in_next" "$not_in_master"
exit 1
fi
<<\DOC_END
This sample hook safeguards topic branches that have been
published from being rewound.
The workflow assumed here is:
* Once a topic branch forks from "master", "master" is never
merged into it again (either directly or indirectly).
* Once a topic branch is fully cooked and merged into "master",
it is deleted. If you need to build on top of it to correct
earlier mistakes, a new topic branch is created by forking at
the tip of the "master". This is not strictly necessary, but
it makes it easier to keep your history simple.
* Whenever you need to test or publish your changes to topic
branches, merge them into "next" branch.
The script, being an example, hardcodes the publish branch name
to be "next", but it is trivial to make it configurable via
$GIT_DIR/config mechanism.
With this workflow, you would want to know:
(1) ... if a topic branch has ever been merged to "next". Young
topic branches can have stupid mistakes you would rather
clean up before publishing, and things that have not been
merged into other branches can be easily rebased without
affecting other people. But once it is published, you would
not want to rewind it.
(2) ... if a topic branch has been fully merged to "master".
Then you can delete it. More importantly, you should not
build on top of it -- other people may already want to
change things related to the topic as patches against your
"master", so if you need further changes, it is better to
fork the topic (perhaps with the same name) afresh from the
tip of "master".
Let's look at this example:
o---o---o---o---o---o---o---o---o---o "next"
/ / / /
/ a---a---b A / /
/ / / /
/ / c---c---c---c B /
/ / / \ /
/ / / b---b C \ /
/ / / / \ /
---o---o---o---o---o---o---o---o---o---o---o "master"
A, B and C are topic branches.
* A has one fix since it was merged up to "next".
* B has finished. It has been fully merged up to "master" and "next",
and is ready to be deleted.
* C has not merged to "next" at all.
We would want to allow C to be rebased, refuse A, and encourage
B to be deleted.
To compute (1):
git rev-list ^master ^topic next
git rev-list ^master next
if these match, topic has not merged in next at all.
To compute (2):
git rev-list master..topic
if this is empty, it is fully merged to "master".
DOC_END

View File

@@ -0,0 +1,24 @@
#!/bin/sh
#
# An example hook script to make use of push options.
# The example simply echoes all push options that start with 'echoback='
# and rejects all pushes when the "reject" push option is used.
#
# To enable this hook, rename this file to "pre-receive".
if test -n "$GIT_PUSH_OPTION_COUNT"
then
i=0
while test "$i" -lt "$GIT_PUSH_OPTION_COUNT"
do
eval "value=\$GIT_PUSH_OPTION_$i"
case "$value" in
echoback=*)
echo "echo from the pre-receive-hook: ${value#*=}" >&2
;;
reject)
exit 1
esac
i=$((i + 1))
done
fi

View File

@@ -0,0 +1,42 @@
#!/bin/sh
#
# An example hook script to prepare the commit log message.
# Called by "git commit" with the name of the file that has the
# commit message, followed by the description of the commit
# message's source. The hook's purpose is to edit the commit
# message file. If the hook fails with a non-zero status,
# the commit is aborted.
#
# To enable this hook, rename this file to "prepare-commit-msg".
# This hook includes three examples. The first one removes the
# "# Please enter the commit message..." help message.
#
# The second includes the output of "git diff --name-status -r"
# into the message, just before the "git status" output. It is
# commented because it doesn't cope with --amend or with squashed
# commits.
#
# The third example adds a Signed-off-by line to the message, that can
# still be edited. This is rarely a good idea.
COMMIT_MSG_FILE=$1
COMMIT_SOURCE=$2
SHA1=$3
/usr/bin/perl -i.bak -ne 'print unless(m/^. Please enter the commit message/..m/^#$/)' "$COMMIT_MSG_FILE"
# case "$COMMIT_SOURCE,$SHA1" in
# ,|template,)
# /usr/bin/perl -i.bak -pe '
# print "\n" . `git diff --cached --name-status -r`
# if /^#/ && $first++ == 0' "$COMMIT_MSG_FILE" ;;
# *) ;;
# esac
# SOB=$(git var GIT_COMMITTER_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
# git interpret-trailers --in-place --trailer "$SOB" "$COMMIT_MSG_FILE"
# if test -z "$COMMIT_SOURCE"
# then
# /usr/bin/perl -i.bak -pe 'print "\n" if !$first_line++' "$COMMIT_MSG_FILE"
# fi

View File

@@ -0,0 +1,78 @@
#!/bin/sh
# An example hook script to update a checked-out tree on a git push.
#
# This hook is invoked by git-receive-pack(1) when it reacts to git
# push and updates reference(s) in its repository, and when the push
# tries to update the branch that is currently checked out and the
# receive.denyCurrentBranch configuration variable is set to
# updateInstead.
#
# By default, such a push is refused if the working tree and the index
# of the remote repository has any difference from the currently
# checked out commit; when both the working tree and the index match
# the current commit, they are updated to match the newly pushed tip
# of the branch. This hook is to be used to override the default
# behaviour; however the code below reimplements the default behaviour
# as a starting point for convenient modification.
#
# The hook receives the commit with which the tip of the current
# branch is going to be updated:
commit=$1
# It can exit with a non-zero status to refuse the push (when it does
# so, it must not modify the index or the working tree).
die () {
echo >&2 "$*"
exit 1
}
# Or it can make any necessary changes to the working tree and to the
# index to bring them to the desired state when the tip of the current
# branch is updated to the new commit, and exit with a zero status.
#
# For example, the hook can simply run git read-tree -u -m HEAD "$1"
# in order to emulate git fetch that is run in the reverse direction
# with git push, as the two-tree form of git read-tree -u -m is
# essentially the same as git switch or git checkout that switches
# branches while keeping the local changes in the working tree that do
# not interfere with the difference between the branches.
# The below is a more-or-less exact translation to shell of the C code
# for the default behaviour for git's push-to-checkout hook defined in
# the push_to_deploy() function in builtin/receive-pack.c.
#
# Note that the hook will be executed from the repository directory,
# not from the working tree, so if you want to perform operations on
# the working tree, you will have to adapt your code accordingly, e.g.
# by adding "cd .." or using relative paths.
if ! git update-index -q --ignore-submodules --refresh
then
die "Up-to-date check failed"
fi
if ! git diff-files --quiet --ignore-submodules --
then
die "Working directory has unstaged changes"
fi
# This is a rough translation of:
#
# head_has_history() ? "HEAD" : EMPTY_TREE_SHA1_HEX
if git cat-file -e HEAD 2>/dev/null
then
head=HEAD
else
head=$(git hash-object -t tree --stdin </dev/null)
fi
if ! git diff-index --quiet --cached --ignore-submodules $head --
then
die "Working directory has staged changes"
fi
if ! git read-tree -u -m "$commit"
then
die "Could not update working tree to new HEAD"
fi

View File

@@ -0,0 +1,128 @@
#!/bin/sh
#
# An example hook script to block unannotated tags from entering.
# Called by "git receive-pack" with arguments: refname sha1-old sha1-new
#
# To enable this hook, rename this file to "update".
#
# Config
# ------
# hooks.allowunannotated
# This boolean sets whether unannotated tags will be allowed into the
# repository. By default they won't be.
# hooks.allowdeletetag
# This boolean sets whether deleting tags will be allowed in the
# repository. By default they won't be.
# hooks.allowmodifytag
# This boolean sets whether a tag may be modified after creation. By default
# it won't be.
# hooks.allowdeletebranch
# This boolean sets whether deleting branches will be allowed in the
# repository. By default they won't be.
# hooks.denycreatebranch
# This boolean sets whether remotely creating branches will be denied
# in the repository. By default this is allowed.
#
# --- Command line
refname="$1"
oldrev="$2"
newrev="$3"
# --- Safety check
if [ -z "$GIT_DIR" ]; then
echo "Don't run this script from the command line." >&2
echo " (if you want, you could supply GIT_DIR then run" >&2
echo " $0 <ref> <oldrev> <newrev>)" >&2
exit 1
fi
if [ -z "$refname" -o -z "$oldrev" -o -z "$newrev" ]; then
echo "usage: $0 <ref> <oldrev> <newrev>" >&2
exit 1
fi
# --- Config
allowunannotated=$(git config --type=bool hooks.allowunannotated)
allowdeletebranch=$(git config --type=bool hooks.allowdeletebranch)
denycreatebranch=$(git config --type=bool hooks.denycreatebranch)
allowdeletetag=$(git config --type=bool hooks.allowdeletetag)
allowmodifytag=$(git config --type=bool hooks.allowmodifytag)
# check for no description
projectdesc=$(sed -e '1q' "$GIT_DIR/description")
case "$projectdesc" in
"Unnamed repository"* | "")
echo "*** Project description file hasn't been set" >&2
exit 1
;;
esac
# --- Check types
# if $newrev is 0000...0000, it's a commit to delete a ref.
zero=$(git hash-object --stdin </dev/null | tr '[0-9a-f]' '0')
if [ "$newrev" = "$zero" ]; then
newrev_type=delete
else
newrev_type=$(git cat-file -t $newrev)
fi
case "$refname","$newrev_type" in
refs/tags/*,commit)
# un-annotated tag
short_refname=${refname##refs/tags/}
if [ "$allowunannotated" != "true" ]; then
echo "*** The un-annotated tag, $short_refname, is not allowed in this repository" >&2
echo "*** Use 'git tag [ -a | -s ]' for tags you want to propagate." >&2
exit 1
fi
;;
refs/tags/*,delete)
# delete tag
if [ "$allowdeletetag" != "true" ]; then
echo "*** Deleting a tag is not allowed in this repository" >&2
exit 1
fi
;;
refs/tags/*,tag)
# annotated tag
if [ "$allowmodifytag" != "true" ] && git rev-parse $refname > /dev/null 2>&1
then
echo "*** Tag '$refname' already exists." >&2
echo "*** Modifying a tag is not allowed in this repository." >&2
exit 1
fi
;;
refs/heads/*,commit)
# branch
if [ "$oldrev" = "$zero" -a "$denycreatebranch" = "true" ]; then
echo "*** Creating a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/heads/*,delete)
# delete branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a branch is not allowed in this repository" >&2
exit 1
fi
;;
refs/remotes/*,commit)
# tracking branch
;;
refs/remotes/*,delete)
# delete tracking branch
if [ "$allowdeletebranch" != "true" ]; then
echo "*** Deleting a tracking branch is not allowed in this repository" >&2
exit 1
fi
;;
*)
# Anything else (is there anything else?)
echo "*** Update hook: unknown type of update to ref $refname of type $newrev_type" >&2
exit 1
;;
esac
# --- Finished
exit 0

Binary file not shown.

View File

@@ -0,0 +1,6 @@
# git ls-files --others --exclude-from=.git/info/exclude
# Lines that start with '#' are comments.
# For a project mostly in C, the following would be a good set of
# exclude patterns (uncomment them if you want to use them):
# *.[oa]
# *~

View File

@@ -0,0 +1 @@
0000000000000000000000000000000000000000 0499dbfa2f58cd8b3b3ce5b2c02a24200862ac97 Matthew Stanciu <mattbstanciu@gmail.com> 1654123602 -0700 commit (initial): hi

View File

@@ -0,0 +1 @@
0000000000000000000000000000000000000000 0499dbfa2f58cd8b3b3ce5b2c02a24200862ac97 Matthew Stanciu <mattbstanciu@gmail.com> 1654123602 -0700 commit (initial): hi

View File

@@ -0,0 +1 @@
0499dbfa2f58cd8b3b3ce5b2c02a24200862ac97

View File

@@ -112,7 +112,6 @@ describe('parseRepoUrl', () => {
it('should parse github git url with trailing slash', () => {
const repoInfo = parseRepoUrl('git://github.com/vercel/vercel.git/');
expect(repoInfo).toBeTruthy();
console.log(repoInfo);
expect(repoInfo?.provider).toEqual('github');
expect(repoInfo?.org).toEqual('vercel');
expect(repoInfo?.repo).toEqual('vercel');
@@ -182,7 +181,14 @@ describe('createGitMeta', () => {
try {
await fs.rename(join(directory, 'git'), join(directory, '.git'));
const data = await createGitMeta(directory, client.output);
expect(data).toBeUndefined();
expect(data).toEqual({
commitAuthorName: 'Matthew Stanciu',
commitMessage: 'hi',
commitRef: 'master',
commitSha: '0499dbfa2f58cd8b3b3ce5b2c02a24200862ac97',
dirty: false,
remoteUrl: undefined,
});
} finally {
await fs.rename(join(directory, '.git'), join(directory, 'git'));
}

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/client",
"version": "12.4.10",
"version": "12.4.11",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"homepage": "https://vercel.com",

View File

@@ -164,7 +164,7 @@ export interface GitMetadata {
commitRef?: string | undefined;
commitSha?: string | undefined;
dirty?: boolean | undefined;
remoteUrl: string;
remoteUrl?: string;
}
/**

View File

@@ -6,7 +6,7 @@
---
This plugin sends [Core Web Vitals](https://web.dev/vitals/) to Vercel Analytics. This plugin is configured by default on Vercel. You **do not** need to install it manually. For more information, [read this post](https://vercel.com/blog/gatsby-analytics).
This plugin sends [Core Web Vitals](https://web.dev/vitals/) to Vercel Speed Insights. This plugin is configured by default on Vercel. You **do not** need to install it manually. For more information, [read this post](https://vercel.com/blog/gatsby-analytics).
## Install

View File

@@ -1,7 +1,7 @@
{
"name": "@vercel/gatsby-plugin-vercel-analytics",
"version": "1.0.9",
"description": "Track Core Web Vitals in Gatsby projects with Vercel Analytics.",
"version": "1.0.10",
"description": "Track Core Web Vitals in Gatsby projects with Vercel Speed Insights.",
"main": "index.js",
"files": [
"gatsby-browser.js",

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/gatsby-plugin-vercel-builder",
"version": "1.2.9",
"version": "1.2.10",
"main": "dist/index.js",
"files": [
"dist",
@@ -16,7 +16,7 @@
"dependencies": {
"@sinclair/typebox": "0.25.24",
"@vercel/build-utils": "6.7.1",
"@vercel/node": "2.11.0",
"@vercel/node": "2.12.0",
"@vercel/routing-utils": "2.2.0",
"esbuild": "0.14.47",
"etag": "1.8.1",

View File

@@ -1,2 +0,0 @@
/helpers.js
/source-map-support.js

View File

@@ -1,18 +0,0 @@
/// <reference types="node" />
import { Server } from 'http';
import {
VercelProxyRequest,
VercelProxyResponse,
VercelProxyEvent,
ServerLike,
} from './types';
export declare class Bridge {
constructor(server?: ServerLike, shouldStoreEvents?: boolean);
setServer(server: ServerLike): void;
setStoreEvents(shouldStoreEvents: boolean): void;
listen(): void | Server;
launcher(event: VercelProxyEvent, context: any): Promise<VercelProxyResponse>;
consumeEvent(reqId: string): VercelProxyRequest;
}
export {};

View File

@@ -1,481 +0,0 @@
const { URL } = require('url');
const { request } = require('http');
const { Socket } = require('net');
const { createCipheriv } = require('crypto');
const { pipeline, Transform } = require('stream');
const CRLF = `\r\n`;
/**
* If the `http.Server` handler function throws an error asynchronously,
* then it ends up being an unhandled rejection which doesn't kill the node
* process which causes the HTTP request to hang indefinitely. So print the
* error here and force the process to exit so that the lambda invocation
* returns an Unhandled error quickly.
*/
process.on('unhandledRejection', err => {
console.error('Unhandled rejection:', err);
process.exit(1);
});
/**
* @param {import('./types').VercelProxyEvent} event
*/
function normalizeProxyEvent(event) {
let bodyBuffer;
/**
* @type {import('./types').VercelProxyRequest}
*/
const payload = JSON.parse(event.body);
const {
method,
path,
headers,
encoding,
body,
payloads,
responseCallbackCipher,
responseCallbackCipherIV,
responseCallbackCipherKey,
responseCallbackStream,
responseCallbackUrl,
features,
} = payload;
/**
*
* @param {string | Buffer} body
* @returns Buffer
*/
const normalizeBody = body => {
if (body) {
if (typeof body === 'string' && encoding === 'base64') {
bodyBuffer = Buffer.from(body, encoding);
} else if (encoding === undefined) {
bodyBuffer = Buffer.from(body);
} else {
throw new Error(`Unsupported encoding: ${encoding}`);
}
} else {
bodyBuffer = Buffer.alloc(0);
}
return bodyBuffer;
};
if (payloads) {
for (const targetPayload of payloads) {
targetPayload.features = features;
targetPayload.body = normalizeBody(payload.body);
}
}
bodyBuffer = normalizeBody(body);
return {
isApiGateway: false,
method,
path,
headers,
body: bodyBuffer,
payloads,
features,
responseCallbackCipher,
responseCallbackCipherIV,
responseCallbackCipherKey,
responseCallbackStream,
responseCallbackUrl,
};
}
/**
* @param {import('./types').VercelProxyEvent } event
* @return {import('./types').VercelProxyRequest }
*/
function normalizeEvent(event) {
if (event.Action === 'Invoke') return normalizeProxyEvent(event);
throw new Error(`Unexpected event.Action: ${event.Action}`);
}
class Bridge {
/**
* @param {import('./types').ServerLike | null} server
* @param {boolean} shouldStoreEvents
*/
constructor(server = null, shouldStoreEvents = false) {
this.server = server;
this.shouldStoreEvents = shouldStoreEvents;
this.launcher = this.launcher.bind(this);
this.reqIdSeed = 1;
/**
* @type {{ [key: string]: import('./types').VercelProxyRequest }}
*/
this.events = {};
this.listening = new Promise(resolve => {
this.resolveListening = resolve;
});
}
/**
* @param {import('./types').ServerLike} server
*/
setServer(server) {
this.server = server;
}
/**
* @param {boolean} shouldStoreEvents
*/
setStoreEvents(shouldStoreEvents) {
this.shouldStoreEvents = shouldStoreEvents;
}
listen() {
const { server, resolveListening } = this;
if (!server) {
throw new Error('Server has not been set!');
}
if (typeof server.timeout === 'number' && server.timeout > 0) {
// Disable timeout (usually 2 minutes until Node 13).
// Instead, user should assign function `maxDuration`.
server.timeout = 0;
}
return server.listen(
{
host: '127.0.0.1',
port: 0,
},
function listeningCallback() {
if (!this || typeof this.address !== 'function') {
throw new Error(
'Missing server.address() function on `this` in server.listen()'
);
}
const addr = this.address();
if (!addr) {
throw new Error('`server.address()` returned `null`');
}
if (typeof addr === 'string') {
throw new Error(
`Unexpected string for \`server.address()\`: ${addr}`
);
}
resolveListening(addr);
}
);
}
/**
*
* @param {import('./types').VercelProxyEvent} event
* @param {import('aws-lambda').Context} context
* @return {Promise<import('./types').VercelProxyResponse>}
*/
async launcher(event, context) {
context.callbackWaitsForEmptyEventLoop = false;
const normalizedEvent = normalizeEvent(event);
if (
'payloads' in normalizedEvent &&
Array.isArray(normalizedEvent.payloads)
) {
let statusCode = 200;
/**
* @type {import('http').IncomingHttpHeaders}
*/
let headers = {};
/**
* @type {string}
*/
let combinedBody = '';
const multipartBoundary = 'payload-separator';
const CLRF = '\r\n';
/**
* @type {Record<string, any>[]}
*/
const separateHeaders = [];
/**
* @type {Set<string>}
*/
const allHeaderKeys = new Set();
// we execute the payloads one at a time to ensure
// lambda semantics
for (let i = 0; i < normalizedEvent.payloads.length; i++) {
const currentPayload = normalizedEvent.payloads[i];
const response = await this.handleEvent(currentPayload);
// build a combined body using multipart
// https://www.w3.org/Protocols/rfc1341/7_2_Multipart.html
combinedBody += `--${multipartBoundary}${CLRF}`;
combinedBody += `content-type: ${
response.headers['content-type'] || 'text/plain'
}${CLRF}${CLRF}`;
combinedBody += response.body || '';
combinedBody += CLRF;
if (i === normalizedEvent.payloads.length - 1) {
combinedBody += `--${multipartBoundary}--${CLRF}`;
}
// pass non-200 status code in header so it can be handled
// separately from other payloads e.g. HTML payload redirects
// (307) but data payload does not (200)
if (response.statusCode !== 200) {
headers[`x-vercel-payload-${i + 1}-status`] =
response.statusCode + '';
}
separateHeaders.push(response.headers);
Object.keys(response.headers).forEach(key => allHeaderKeys.add(key));
}
allHeaderKeys.forEach(curKey => {
/**
* @type string | string[] | undefined
*/
const curValue = separateHeaders[0] && separateHeaders[0][curKey];
const canDedupe = separateHeaders.every(
headers => headers[curKey] === curValue
);
if (canDedupe) {
headers[curKey] = curValue;
} else {
// if a header is unique per payload ensure it is prefixed
// so it can be parsed and provided separately
separateHeaders.forEach((curHeaders, idx) => {
if (curHeaders[curKey]) {
headers[`x-vercel-payload-${idx + 1}-${curKey}`] =
curHeaders[curKey];
}
});
}
});
headers[
'content-type'
] = `multipart/mixed; boundary="${multipartBoundary}"`;
return {
headers,
statusCode,
body: combinedBody,
encoding: 'base64',
};
} else {
// TODO We expect this to error as it is possible to resolve to empty.
// For now it is not very important as we will only pass
// `responseCallbackUrl` in production.
// @ts-ignore
return this.handleEvent(normalizedEvent);
}
}
/**
*
* @param {ReturnType<typeof normalizeEvent>} normalizedEvent
* @return {Promise<import('./types').VercelProxyResponse | import('./types').VercelStreamProxyResponse>}
*/
async handleEvent(normalizedEvent) {
const { port } = await this.listening;
const {
body,
headers,
isApiGateway,
method,
responseCallbackCipher,
responseCallbackCipherIV,
responseCallbackCipherKey,
responseCallbackStream,
responseCallbackUrl,
} = normalizedEvent;
let { path } = normalizedEvent;
if (this.shouldStoreEvents) {
const reqId = `${this.reqIdSeed++}`;
this.events[reqId] = normalizedEvent;
headers['x-now-bridge-request-id'] = reqId;
}
return new Promise((resolve, reject) => {
let socket;
let cipher;
let url;
if (responseCallbackUrl) {
socket = new Socket();
url = new URL(responseCallbackUrl);
socket.connect(parseInt(url.port, 10), url.hostname);
socket.write(`${responseCallbackStream}${CRLF}`);
}
if (
responseCallbackCipher &&
responseCallbackCipherKey &&
responseCallbackCipherIV
) {
cipher = createCipheriv(
responseCallbackCipher,
Buffer.from(responseCallbackCipherKey, 'base64'),
Buffer.from(responseCallbackCipherIV, 'base64')
);
}
// if the path is improperly encoded we need to encode it or
// http.request will throw an error (related check: https://github.com/nodejs/node/blob/4ece669c6205ec78abfdadfe78869bbb8411463e/lib/_http_client.js#L84)
if (path && /[^\u0021-\u00ff]/.test(path)) {
path = encodeURI(path);
}
const req = request(
{ hostname: '127.0.0.1', port, path, method },
socket && url && cipher
? getStreamResponseCallback({ url, socket, cipher, resolve, reject })
: getResponseCallback({ isApiGateway, resolve, reject })
);
req.on('error', error => {
setTimeout(() => {
// this lets express print the true error of why the connection was closed.
// it is probably 'Cannot set headers after they are sent to the client'
reject(error);
}, 2);
});
for (const [name, value] of getHeadersIterator(headers)) {
try {
req.setHeader(name, value);
} catch (/** @type any */ err) {
console.error(`Skipping HTTP request header: "${name}: ${value}"`);
console.error(err.message);
}
}
if (body) req.write(body);
req.end();
});
}
/**
* @param {string} reqId
* @return {import('./types').VercelProxyRequest}
*/
consumeEvent(reqId) {
const event = this.events[reqId];
delete this.events[reqId];
return event;
}
}
/**
* Generates the streaming response callback which writes in the given socket client a raw
* HTTP Request message to later pipe the response body into the socket. It will pass request
* headers namespace and an additional header with the status code. Once everything is
* written it will destroy the socket and resolve to an empty object. If a cipher is given
* it will be used to pipe bytes.
*
* @type {(params: {
* url: import('url').URL,
* socket: import('net').Socket,
* cipher: import('crypto').Cipher
* resolve: (result: (Record<string, never>)) => void,
* reject: (err: Error) => void
* }) => (response: import("http").IncomingMessage) => void}
*/
function getStreamResponseCallback({ url, socket, cipher, resolve, reject }) {
return response => {
const chunked = new Transform();
chunked._transform = function (chunk, _, callback) {
this.push(Buffer.byteLength(chunk).toString(16) + CRLF);
this.push(chunk);
this.push(CRLF);
callback();
};
let headers = `Host: ${url.host}${CRLF}`;
headers += `transfer-encoding: chunked${CRLF}`;
headers += `x-vercel-status-code: ${response.statusCode || 200}${CRLF}`;
for (const [name, value] of getHeadersIterator(response.headers)) {
if (!['connection', 'transfer-encoding'].includes(name)) {
if (typeof value === 'string') {
headers += `x-vercel-header-${name}: ${value}${CRLF}`;
} else {
for (const val of value) {
headers += `x-vercel-header-${name}: ${val}${CRLF}`;
}
}
}
}
cipher.write(`POST ${url.pathname} HTTP/1.1${CRLF}${headers}${CRLF}`);
pipeline(response, chunked, cipher, socket, err => {
if (err) return reject(err);
resolve({});
});
};
}
/**
* Generates the normal response callback which waits until the body is fully
* received before resolving the promise. It caches the entire body and resolve
* with an object that describes the response.
*
* @type {(params: {
* isApiGateway: boolean,
* resolve: (result: (import('./types').VercelProxyResponse)) => void,
* reject: (err: Error) => void
* }) => (response: import("http").IncomingMessage) => void}
*/
function getResponseCallback({ isApiGateway, resolve, reject }) {
return response => {
/**
* @type {Buffer[]}
*/
const respBodyChunks = [];
response.on('data', chunk => respBodyChunks.push(Buffer.from(chunk)));
response.on('error', reject);
response.on('end', () => {
const bodyBuffer = Buffer.concat(respBodyChunks);
delete response.headers.connection;
if (isApiGateway) {
delete response.headers['content-length'];
} else if (response.headers['content-length']) {
response.headers['content-length'] = String(bodyBuffer.length);
}
resolve({
statusCode: response.statusCode || 200,
headers: response.headers,
body: bodyBuffer.toString('base64'),
encoding: 'base64',
});
});
};
}
/**
* Get an iterator for the headers object and yield the name and value when
* the value is not undefined only.
*
* @type {(headers: import('http').IncomingHttpHeaders) =>
* Generator<[string, string | string[]], void, unknown>}
*/
function* getHeadersIterator(headers) {
for (const [name, value] of Object.entries(headers)) {
if (value === undefined) {
console.error(
`Skipping HTTP request header "${name}" because value is undefined`
);
continue;
}
yield [name, value];
}
}
module.exports = { Bridge };

View File

@@ -1,62 +0,0 @@
#!/usr/bin/env node
const fs = require('fs-extra');
const execa = require('execa');
const { join } = require('path');
async function main() {
// Build TypeScript files
await execa('tsc', [], {
stdio: 'inherit',
});
// Bundle `helpers.ts` with ncc
await fs.remove(join(__dirname, 'helpers.js'));
const helpersDir = join(__dirname, 'helpers');
await execa(
'ncc',
[
'build',
join(__dirname, 'helpers.ts'),
'-e',
'@vercel/node-bridge',
'-e',
'@vercel/build-utils',
'-e',
'typescript',
'-o',
helpersDir,
],
{ stdio: 'inherit' }
);
await fs.rename(join(helpersDir, 'index.js'), join(__dirname, 'helpers.js'));
await fs.remove(helpersDir);
// Bundle `source-map-support/register` with ncc for source maps
const sourceMapSupportDir = join(__dirname, 'source-map-support');
await execa(
'ncc',
[
'build',
join(__dirname, '../../node_modules/source-map-support/register'),
'-e',
'@vercel/node-bridge',
'-e',
'@vercel/build-utils',
'-e',
'typescript',
'-o',
sourceMapSupportDir,
],
{ stdio: 'inherit' }
);
await fs.rename(
join(sourceMapSupportDir, 'index.js'),
join(__dirname, 'source-map-support.js')
);
await fs.remove(sourceMapSupportDir);
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -1,3 +0,0 @@
const path = require('path');
module.exports = path.join(__dirname, 'bridge.js');

View File

@@ -1,16 +0,0 @@
import { Bridge } from './bridge';
import { LauncherConfiguration } from './types';
export declare function makeVercelLauncher(
config: LauncherConfiguration
): string;
export declare function getVercelLauncher({
entrypointPath,
helpersPath,
shouldAddHelpers,
}: LauncherConfiguration): () => Bridge;
export declare function makeAwsLauncher(config: LauncherConfiguration): string;
export declare function getAwsLauncher({
entrypointPath,
awsLambdaHandler,
}: LauncherConfiguration): (e: any, context: any, callback: any) => any;
export {};

View File

@@ -1,199 +0,0 @@
const { parse, pathToFileURL } = require('url');
const { createServer, Server } = require('http');
const { isAbsolute } = require('path');
const { Bridge } = require('./bridge.js');
/**
* @param {import('./types').LauncherConfiguration} config
*/
function makeVercelLauncher(config) {
const {
entrypointPath,
bridgePath,
helpersPath,
sourcemapSupportPath,
shouldAddHelpers = false,
shouldAddSourcemapSupport = false,
} = config;
return `
const { parse, pathToFileURL } = require('url');
const { createServer, Server } = require('http');
const { isAbsolute } = require('path');
const { Bridge } = require(${JSON.stringify(bridgePath)});
${
shouldAddSourcemapSupport
? `require(${JSON.stringify(sourcemapSupportPath)});`
: ''
}
const entrypointPath = ${JSON.stringify(entrypointPath)};
const shouldAddHelpers = ${JSON.stringify(shouldAddHelpers)};
const helpersPath = ${JSON.stringify(helpersPath)};
const useRequire = false;
const func = (${getVercelLauncher(config).toString()})();
exports.launcher = func.launcher;`;
}
/**
* @param {import('./types').LauncherConfiguration} config
*/
function getVercelLauncher({
entrypointPath,
helpersPath,
shouldAddHelpers = false,
useRequire = false,
}) {
return function () {
const bridge = new Bridge();
let isServerListening = false;
const originalListen = Server.prototype.listen;
Server.prototype.listen = function listen() {
isServerListening = true;
console.log('Legacy server listening...');
bridge.setServer(this);
Server.prototype.listen = originalListen;
bridge.listen();
return this;
};
if (!process.env.NODE_ENV) {
const region = process.env.VERCEL_REGION || process.env.NOW_REGION;
process.env.NODE_ENV = region === 'dev1' ? 'development' : 'production';
}
/**
* @param {string} p - entrypointPath
*/
async function getListener(p) {
let listener = useRequire
? require(p)
: await import(isAbsolute(p) ? pathToFileURL(p).href : p);
// In some cases we might have nested default props due to TS => JS
for (let i = 0; i < 5; i++) {
if (listener.default) listener = listener.default;
}
return listener;
}
getListener(entrypointPath)
.then(listener => {
if (typeof listener.listen === 'function') {
Server.prototype.listen = originalListen;
const server = listener;
bridge.setServer(server);
bridge.listen();
} else if (typeof listener === 'function') {
Server.prototype.listen = originalListen;
if (shouldAddHelpers) {
bridge.setStoreEvents(true);
import(helpersPath).then(helper => {
const h = helper.default || helper;
const server = h.createServerWithHelpers(listener, bridge);
bridge.setServer(server);
bridge.listen();
});
} else {
const server = createServer(listener);
bridge.setServer(server);
bridge.listen();
}
} else if (
typeof listener === 'object' &&
Object.keys(listener).length === 0
) {
setTimeout(() => {
if (!isServerListening) {
console.error('No exports found in module %j.', entrypointPath);
console.error('Did you forget to export a function or a server?');
process.exit(1);
}
}, 5000);
} else {
console.error('Invalid export found in module %j.', entrypointPath);
console.error('The default export must be a function or server.');
}
})
.catch(err => {
if (err.code === 'MODULE_NOT_FOUND') {
console.error(err.message);
console.error(
'Did you forget to add it to "dependencies" in `package.json`?'
);
} else {
console.error(err);
}
process.exit(1);
});
return bridge;
};
}
/**
* @param {import('./types').LauncherConfiguration} config
*/
function makeAwsLauncher(config) {
const { entrypointPath, awsLambdaHandler = '' } = config;
return `const { parse } = require("url");
const funcName = ${JSON.stringify(awsLambdaHandler.split('.').pop())};
const entrypointPath = ${JSON.stringify(entrypointPath)};
exports.launcher = ${getAwsLauncher(config).toString()}`;
}
/**
* @param {import('./types').LauncherConfiguration} config
*/
function getAwsLauncher({ entrypointPath, awsLambdaHandler = '' }) {
const funcName = awsLambdaHandler.split('.').pop() || '';
if (typeof funcName !== 'string') {
throw new TypeError('Expected "string"');
}
/**
* @param {import('aws-lambda').APIGatewayProxyEvent} event
* @param {import('aws-lambda').Context} context
* @param {() => void} callback
*/
function internal(event, context, callback) {
const {
path,
method: httpMethod,
body,
headers,
} = JSON.parse(event.body || '{}');
const { query } = parse(path, true);
/**
* @type {{[key: string]: string}}
*/
const queryStringParameters = {};
for (const [key, value] of Object.entries(query)) {
if (typeof value === 'string') {
queryStringParameters[key] = value;
}
}
const awsGatewayEvent = {
resource: '/{proxy+}',
path: path,
httpMethod: httpMethod,
body: body,
isBase64Encoded: true,
queryStringParameters: queryStringParameters,
multiValueQueryStringParameters: query,
headers: headers,
};
const mod = require(entrypointPath);
return mod[funcName](awsGatewayEvent, context, callback);
}
return internal;
}
module.exports = {
makeVercelLauncher,
getVercelLauncher,
makeAwsLauncher,
getAwsLauncher,
};

View File

@@ -1,35 +0,0 @@
{
"name": "@vercel/node-bridge",
"version": "4.0.1",
"license": "Apache-2.0",
"main": "./index.js",
"repository": {
"type": "git",
"url": "https://github.com/vercel/vercel.git",
"directory": "packages/node-bridge"
},
"files": [
"bridge.*",
"launcher.*",
"index.js",
"helpers.js",
"source-map-support.js"
],
"scripts": {
"build": "node build.js",
"test": "jest --env node --verbose --runInBand --bail",
"test-unit": "pnpm test"
},
"devDependencies": {
"@types/aws-lambda": "8.10.19",
"@types/node": "14.18.33",
"content-type": "1.0.4",
"cookie": "0.4.0",
"etag": "1.8.1",
"execa": "3.2.0",
"fs-extra": "10.0.0",
"jsonlines": "0.1.1",
"test-listen": "1.1.0",
"typescript": "4.3.4"
}
}

View File

@@ -1,467 +0,0 @@
const assert = require('assert');
const crypto = require('crypto');
const jsonlines = require('jsonlines');
const { Server } = require('http');
const { Bridge } = require('../bridge');
const { runServer } = require('./run-test-server');
const { runTcpServer } = require('./run-test-server');
test('port binding', async () => {
const server = new Server();
const bridge = new Bridge(server);
bridge.listen();
// Test port binding
const info = await bridge.listening;
assert.strictEqual(info.address, '127.0.0.1');
assert.strictEqual(typeof info.port, 'number');
server.close();
});
test('`NowProxyEvent` normalizing', async () => {
const server = new Server((req, res) =>
res.end(
JSON.stringify({
method: req.method,
path: req.url,
headers: req.headers,
})
)
);
let features;
class CustomBridge extends Bridge {
handleEvent(normalizedEvent) {
features = normalizedEvent.features;
return super.handleEvent(normalizedEvent);
}
}
const bridge = new CustomBridge(server);
bridge.listen();
const context = { callbackWaitsForEmptyEventLoop: true };
const result = await bridge.launcher(
{
Action: 'Invoke',
body: JSON.stringify({
method: 'POST',
headers: { foo: 'baz' },
features: { enabled: true },
path: '/nowproxy',
body: 'body=1',
}),
},
context
);
assert.deepStrictEqual(features, { enabled: true });
assert.strictEqual(result.encoding, 'base64');
assert.strictEqual(result.statusCode, 200);
const body = JSON.parse(Buffer.from(result.body, 'base64').toString());
assert.strictEqual(body.method, 'POST');
assert.strictEqual(body.path, '/nowproxy');
assert.strictEqual(body.headers.foo, 'baz');
assert.strictEqual(context.callbackWaitsForEmptyEventLoop, false);
server.close();
});
test('multi-payload handling', async () => {
const server = new Server((req, res) => {
if (req.url === '/redirect') {
res.setHeader('Location', '/somewhere');
res.statusCode = 307;
res.end('/somewhere');
return;
}
res.setHeader(
'content-type',
req.url.includes('_next/data') ? 'application/json' : 'text/html'
);
res.end(
JSON.stringify({
method: req.method,
path: req.url,
headers: req.headers,
})
);
});
const bridge = new Bridge(server);
bridge.listen();
const context = { callbackWaitsForEmptyEventLoop: true };
const result = await bridge.launcher(
{
Action: 'Invoke',
body: JSON.stringify({
payloads: [
{
method: 'GET',
headers: { foo: 'baz' },
path: '/nowproxy',
},
{
method: 'GET',
headers: { foo: 'baz' },
path: '/_next/data/build-id/nowproxy.json',
},
{
method: 'GET',
headers: { foo: 'baz' },
path: '/redirect',
},
],
}),
},
context
);
assert.strictEqual(result.encoding, 'base64');
assert.strictEqual(result.statusCode, 200);
assert.strictEqual(
result.headers['content-type'],
'multipart/mixed; boundary="payload-separator"'
);
const bodies = [];
const payloadParts = result.body.split('\r\n');
payloadParts.forEach(item => {
if (
item.trim() &&
!item.startsWith('content-type:') &&
!item.startsWith('--payload')
) {
const content = Buffer.from(
item.split('--payload-separator')[0],
'base64'
).toString();
bodies.push(content.startsWith('{') ? JSON.parse(content) : content);
}
});
// ensure content-type is always specified as is required for
// proper parsing of the multipart body
assert(payloadParts.some(part => part.includes('content-type: text/plain')));
assert.strictEqual(bodies[0].method, 'GET');
assert.strictEqual(bodies[0].path, '/nowproxy');
assert.strictEqual(bodies[0].headers.foo, 'baz');
assert.strictEqual(bodies[1].method, 'GET');
assert.strictEqual(bodies[1].path, '/_next/data/build-id/nowproxy.json');
assert.strictEqual(bodies[1].headers.foo, 'baz');
assert.strictEqual(bodies[2], '/somewhere');
assert.strictEqual(result.headers['x-vercel-payload-3-status'], '307');
assert.strictEqual(result.headers['x-vercel-payload-2-status'], undefined);
assert.strictEqual(result.headers['x-vercel-payload-1-status'], undefined);
assert.strictEqual(
result.headers['x-vercel-payload-1-content-type'],
'text/html'
);
assert.strictEqual(
result.headers['x-vercel-payload-2-content-type'],
'application/json'
);
assert.strictEqual(
result.headers['x-vercel-payload-3-content-type'],
undefined
);
assert.strictEqual(
result.headers['x-vercel-payload-3-location'],
'/somewhere'
);
assert.strictEqual(result.headers['x-vercel-payload-2-location'], undefined);
assert.strictEqual(context.callbackWaitsForEmptyEventLoop, false);
server.close();
});
test('consumeEvent', async () => {
const mockListener = jest.fn((_, res) => {
res.end('hello');
});
const server = new Server(mockListener);
const bridge = new Bridge(server, true);
bridge.listen();
const context = { callbackWaitsForEmptyEventLoop: true };
await bridge.launcher(
{
Action: 'Invoke',
body: JSON.stringify({
method: 'POST',
headers: { foo: 'baz' },
path: '/nowproxy',
body: 'body=1',
}),
},
context
);
const headers = mockListener.mock.calls[0][0].headers;
const reqId = headers['x-now-bridge-request-id'];
expect(reqId).toBeTruthy();
const event = bridge.consumeEvent(reqId);
expect(event.body.toString()).toBe('body=1');
// an event can't be consumed multiple times
// to avoid memory leaks
expect(bridge.consumeEvent(reqId)).toBeUndefined();
server.close();
});
test('consumeEvent and handle decoded path', async () => {
const mockListener = jest.fn((_, res) => {
res.end('hello');
});
const server = new Server(mockListener);
const bridge = new Bridge(server, true);
bridge.listen();
const context = { callbackWaitsForEmptyEventLoop: true };
await bridge.launcher(
{
Action: 'Invoke',
body: JSON.stringify({
method: 'POST',
headers: { foo: 'baz' },
path: '/now proxy',
body: 'body=1',
}),
},
context
);
const headers = mockListener.mock.calls[0][0].headers;
const reqId = headers['x-now-bridge-request-id'];
expect(reqId).toBeTruthy();
const event = bridge.consumeEvent(reqId);
expect(event.body.toString()).toBe('body=1');
// an event can't be consumed multiple times
// to avoid memory leaks
expect(bridge.consumeEvent(reqId)).toBeUndefined();
server.close();
});
test('invalid request headers', async () => {
const server = new Server((req, res) =>
res.end(
JSON.stringify({
method: req.method,
path: req.url,
headers: req.headers,
})
)
);
const bridge = new Bridge(server);
bridge.listen();
const context = { callbackWaitsForEmptyEventLoop: true };
const result = await bridge.launcher(
{
Action: 'Invoke',
body: JSON.stringify({
method: 'GET',
headers: { foo: 'baz\n', ok: 'true' },
path: '/nowproxy',
body: 'body=1',
}),
},
context
);
assert.strictEqual(result.encoding, 'base64');
assert.strictEqual(result.statusCode, 200);
const body = JSON.parse(Buffer.from(result.body, 'base64').toString());
assert.strictEqual(body.method, 'GET');
assert.strictEqual(body.path, '/nowproxy');
assert.strictEqual(body.headers.ok, 'true');
assert(!body.headers.foo);
assert.strictEqual(context.callbackWaitsForEmptyEventLoop, false);
server.close();
});
test('`NowProxyEvent` proxy streaming with a sync handler', async () => {
const cipherParams = {
cipher: 'aes-256-ctr',
cipherIV: crypto.randomBytes(16),
cipherKey: crypto.randomBytes(32),
};
const effects = {
callbackPayload: undefined,
callbackStream: undefined,
};
const { deferred, resolve } = createDeferred();
const httpServer = await runServer({
handler: (req, res) => {
const chunks = [];
req.on('data', chunk => {
chunks.push(chunk.toString());
});
req.on('close', () => {
effects.callbackPayload = chunks;
res.writeHead(200, 'OK', { 'content-type': 'application/json' });
res.end();
resolve();
});
},
});
const tcpServerCallback = await runTcpServer({
cipherParams,
effects,
httpServer,
});
const server = new Server((req, res) => {
res.setHeader('content-type', 'text/html');
res.end('hello');
});
const bridge = new Bridge(server);
bridge.listen();
const context = { callbackWaitsForEmptyEventLoop: true };
const result = await bridge.launcher(
{
Action: 'Invoke',
body: JSON.stringify({
method: 'POST',
responseCallbackCipher: cipherParams.cipher,
responseCallbackCipherIV: cipherParams.cipherIV.toString('base64'),
responseCallbackCipherKey: cipherParams.cipherKey.toString('base64'),
responseCallbackStream: 'abc',
responseCallbackUrl: String(tcpServerCallback.url),
headers: { foo: 'bar' },
path: '/nowproxy',
body: 'body=1',
}),
},
context
);
await deferred;
expect(result).toEqual({});
expect(context.callbackWaitsForEmptyEventLoop).toEqual(false);
expect(effects.callbackStream).toEqual('abc');
expect(effects.callbackPayload).toEqual(['hello']);
server.close();
await httpServer.close();
await tcpServerCallback.close();
});
test('`NowProxyEvent` proxy streaming with an async handler', async () => {
const effects = {
callbackHeaders: undefined,
callbackMethod: undefined,
callbackPayload: undefined,
callbackStream: undefined,
};
const cipherParams = {
cipher: 'aes-256-ctr',
cipherIV: crypto.randomBytes(16),
cipherKey: crypto.randomBytes(32),
};
const { deferred, resolve } = createDeferred();
const jsonParser = jsonlines.parse();
const httpServer = await runServer({
handler: (req, res) => {
const chunks = [];
req.pipe(jsonParser);
jsonParser.on('data', chunk => {
chunks.push(chunk);
});
req.on('close', () => {
effects.callbackMethod = req.method;
effects.callbackHeaders = req.headers;
effects.callbackPayload = chunks;
res.writeHead(200, 'OK', { 'content-type': 'application/json' });
res.end();
resolve();
});
},
});
const tcpServerCallback = await runTcpServer({
cipherParams,
httpServer,
effects,
});
const jsonStringifier = jsonlines.stringify();
const server = new Server((req, res) => {
res.setHeader('x-test', 'hello');
res.setHeader('content-type', 'text/html');
jsonStringifier.pipe(res);
jsonStringifier.write({ method: req.method });
jsonStringifier.write({ path: req.url });
setTimeout(() => {
jsonStringifier.write({ headers: req.headers });
res.end();
}, 100);
});
const bridge = new Bridge(server);
bridge.listen();
const context = { callbackWaitsForEmptyEventLoop: true };
const result = await bridge.launcher(
{
Action: 'Invoke',
body: JSON.stringify({
method: 'POST',
responseCallbackCipher: cipherParams.cipher,
responseCallbackCipherIV: cipherParams.cipherIV.toString('base64'),
responseCallbackCipherKey: cipherParams.cipherKey.toString('base64'),
responseCallbackStream: 'abc',
responseCallbackUrl: String(tcpServerCallback.url),
headers: { foo: 'bar' },
path: '/nowproxy',
body: 'body=1',
}),
},
context
);
await deferred;
expect(result).toEqual({});
expect(context.callbackWaitsForEmptyEventLoop).toEqual(false);
expect(effects.callbackStream).toEqual('abc');
expect(effects.callbackMethod).toEqual('POST');
expect(effects.callbackHeaders).toMatchObject({
'x-vercel-status-code': '200',
'x-vercel-header-x-test': 'hello',
'x-vercel-header-content-type': 'text/html',
});
expect(effects.callbackPayload).toMatchObject([
{ method: 'POST' },
{ path: '/nowproxy' },
{ headers: { foo: 'bar' } },
]);
server.close();
httpServer.close();
tcpServerCallback.close();
});
function createDeferred() {
let resolve;
const deferred = new Promise(_resolve => {
resolve = _resolve;
});
return { deferred, resolve };
}

View File

@@ -1,842 +0,0 @@
const fetch = require('node-fetch');
const listen = require('test-listen');
const qs = require('querystring');
const { createServerWithHelpers } = require('../helpers');
const mockListener = jest.fn();
const consumeEventMock = jest.fn();
const mockBridge = { consumeEvent: consumeEventMock };
let server;
let url;
async function fetchWithProxyReq(_url, opts = {}) {
if (opts.body) {
// eslint-disable-next-line
opts = { ...opts, body: Buffer.from(opts.body) };
}
consumeEventMock.mockImplementationOnce(() => opts);
return fetch(_url, {
...opts,
headers: { ...opts.headers, 'x-now-bridge-request-id': '2' },
});
}
beforeEach(async () => {
mockListener.mockClear();
consumeEventMock.mockClear();
mockListener.mockImplementation((req, res) => {
res.send('hello');
});
consumeEventMock.mockImplementation(() => ({}));
server = createServerWithHelpers(mockListener, mockBridge);
url = await listen(server);
});
afterEach(async () => {
await server.close();
});
describe('contract with @vercel/node-bridge', () => {
test('should call consumeEvent with the correct reqId', async () => {
await fetchWithProxyReq(`${url}/`);
expect(consumeEventMock).toHaveBeenLastCalledWith('2');
});
test('should not expose the request id header', async () => {
await fetchWithProxyReq(`${url}/`, { headers: { 'x-test-header': 'ok' } });
const [{ headers }] = mockListener.mock.calls[0];
expect(headers['x-now-bridge-request-id']).toBeUndefined();
expect(headers['x-test-header']).toBe('ok');
});
});
describe('all helpers', () => {
const nowHelpers = [
['query', 0],
['cookies', 0],
['body', 0],
['status', 1],
['redirect', 1],
['send', 1],
['json', 1],
];
test('should not recalculate req properties twice', async () => {
const spy = jest.fn(() => {});
const nowReqHelpers = nowHelpers.filter(([, i]) => i === 0);
mockListener.mockImplementation((req, res) => {
spy(...nowReqHelpers.map(h => req[h]));
spy(...nowReqHelpers.map(h => req[h]));
res.end();
});
await fetchWithProxyReq(`${url}/?who=bill`, {
method: 'POST',
body: JSON.stringify({ who: 'mike' }),
headers: { 'content-type': 'application/json', cookie: 'who=jim' },
});
// here we test that bodySpy is called twice with exactly the same arguments
for (let i = 0; i < 3; i += 1) {
expect(spy.mock.calls[0][i]).toBe(spy.mock.calls[1][i]);
}
});
test('should be able to overwrite request properties', async () => {
const spy = jest.fn(() => {});
mockListener.mockImplementation((...args) => {
nowHelpers.forEach(([prop, n]) => {
/* eslint-disable */
args[n][prop] = 'ok';
args[n][prop] = 'ok2';
spy(args[n][prop]);
});
args[1].end();
});
await fetchWithProxyReq(url);
nowHelpers.forEach((_, i) => expect(spy.mock.calls[i][0]).toBe('ok2'));
});
test('should be able to reconfig request properties', async () => {
const spy = jest.fn(() => {});
mockListener.mockImplementation((...args) => {
nowHelpers.forEach(([prop, n]) => {
// eslint-disable-next-line
Object.defineProperty(args[n], prop, { value: 'ok' });
Object.defineProperty(args[n], prop, { value: 'ok2' });
spy(args[n][prop]);
});
args[1].end();
});
await fetchWithProxyReq(url);
nowHelpers.forEach((_, i) => expect(spy.mock.calls[i][0]).toBe('ok2'));
});
});
describe('req.query', () => {
test('req.query should reflect querystring in the url', async () => {
await fetchWithProxyReq(`${url}/?who=bill&where=us`);
expect(mockListener.mock.calls[0][0].query).toMatchObject({
who: 'bill',
where: 'us',
});
});
test('req.query should turn multiple params with same name into an array', async () => {
await fetchWithProxyReq(`${url}/?a=2&a=1`);
expect(mockListener.mock.calls[0][0].query).toMatchObject({
a: ['2', '1'],
});
});
test('req.query should be {} when there is no querystring', async () => {
await fetchWithProxyReq(url);
const [{ query }] = mockListener.mock.calls[0];
expect(Object.keys(query).length).toBe(0);
});
});
describe('req.cookies', () => {
test('req.cookies should reflect req.cookie header', async () => {
await fetchWithProxyReq(url, {
headers: {
cookie: 'who=bill; where=us',
},
});
expect(mockListener.mock.calls[0][0].cookies).toMatchObject({
who: 'bill',
where: 'us',
});
});
});
describe('req.body', () => {
test('req.body should be undefined by default', async () => {
await fetchWithProxyReq(url);
expect(mockListener.mock.calls[0][0].body).toBe(undefined);
});
test('req.body should be undefined if content-type is not defined', async () => {
await fetchWithProxyReq(url, {
method: 'POST',
body: 'hello',
});
expect(mockListener.mock.calls[0][0].body).toBe(undefined);
});
test('req.body should be a string when content-type is `text/plain`', async () => {
await fetchWithProxyReq(url, {
method: 'POST',
body: 'hello',
headers: { 'content-type': 'text/plain' },
});
expect(mockListener.mock.calls[0][0].body).toBe('hello');
});
test('req.body should be a buffer when content-type is `application/octet-stream`', async () => {
await fetchWithProxyReq(url, {
method: 'POST',
body: 'hello',
headers: { 'content-type': 'application/octet-stream' },
});
const [{ body }] = mockListener.mock.calls[0];
const str = body.toString();
expect(Buffer.isBuffer(body)).toBe(true);
expect(str).toBe('hello');
});
test('req.body should be an object when content-type is `application/x-www-form-urlencoded`', async () => {
const obj = { who: 'mike' };
await fetchWithProxyReq(url, {
method: 'POST',
body: qs.encode(obj),
headers: { 'content-type': 'application/x-www-form-urlencoded' },
});
expect(mockListener.mock.calls[0][0].body).toMatchObject(obj);
});
test('req.body should be an object when content-type is `application/json`', async () => {
const json = {
who: 'bill',
where: 'us',
};
await fetchWithProxyReq(url, {
method: 'POST',
body: JSON.stringify(json),
headers: { 'content-type': 'application/json' },
});
expect(mockListener.mock.calls[0][0].body).toMatchObject(json);
});
test('should work when body is empty and content-type is `application/json`', async () => {
mockListener.mockImplementation((req, res) => {
console.log(req.body);
res.end();
});
const res = await fetchWithProxyReq(url, {
method: 'POST',
body: '',
headers: { 'content-type': 'application/json' },
});
expect(res.status).toBe(200);
expect(res.body).toMatchObject({});
});
test('should be able to try/catch parse errors', async () => {
const bodySpy = jest.fn(() => {});
mockListener.mockImplementation((req, res) => {
try {
if (req.body === undefined) res.status(400);
} catch (error) {
bodySpy(error);
} finally {
res.end();
}
});
await fetchWithProxyReq(url, {
method: 'POST',
body: '{"wrong":"json"',
headers: { 'content-type': 'application/json' },
});
expect(bodySpy).toHaveBeenCalled();
const [error] = bodySpy.mock.calls[0];
expect(error.message).toMatch(/invalid json/i);
expect(error.statusCode).toBe(400);
});
});
describe('res.status', () => {
test('res.status() should set the status code', async () => {
mockListener.mockImplementation((req, res) => {
res.status(404);
res.end();
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(404);
});
test('res.status() should be chainable', async () => {
const spy = jest.fn();
mockListener.mockImplementation((req, res) => {
spy(res, res.status(404));
res.end();
});
await fetchWithProxyReq(url);
const [a, b] = spy.mock.calls[0];
expect(a).toBe(b);
});
});
describe('res.redirect', () => {
test('should redirect to login', async () => {
mockListener.mockImplementation((req, res) => {
res.redirect('/login');
res.end();
});
const res = await fetchWithProxyReq(url, { redirect: 'manual' });
expect(res.status).toBe(307);
expect(res.headers.get('location')).toBe(url + '/login');
});
test('should redirect with status code 301', async () => {
mockListener.mockImplementation((req, res) => {
res.redirect(301, '/login');
res.end();
});
const res = await fetchWithProxyReq(url, { redirect: 'manual' });
expect(res.status).toBe(301);
expect(res.headers.get('location')).toBe(url + '/login');
});
test('should show friendly error for invalid redirect', async () => {
let error;
mockListener.mockImplementation((req, res) => {
try {
res.redirect(307);
} catch (err) {
error = err;
}
res.end();
});
await fetchWithProxyReq(url, { redirect: 'manual' });
expect(error.message).toBe(
`Invalid redirect arguments. Please use a single argument URL, e.g. res.redirect('/destination') or use a status code and URL, e.g. res.redirect(307, '/destination').`
);
});
test('should show friendly error in case of passing null as first argument redirect', async () => {
let error;
mockListener.mockImplementation((req, res) => {
try {
res.redirect(null);
} catch (err) {
error = err;
}
res.end();
});
await fetchWithProxyReq(url, { redirect: 'manual' });
expect(error.message).toBe(
`Invalid redirect arguments. Please use a single argument URL, e.g. res.redirect('/destination') or use a status code and URL, e.g. res.redirect(307, '/destination').`
);
});
});
// tests based on expressjs test suite
// see https://github.com/expressjs/express/blob/master/test/res.send.js
describe('res.send', () => {
test('should be chainable', async () => {
const spy = jest.fn();
mockListener.mockImplementation((req, res) => {
spy(res, res.send('hello'));
});
await fetchWithProxyReq(url);
const [a, b] = spy.mock.calls[0];
expect(a).toBe(b);
});
describe('res.send()', () => {
test('should set body to ""', async () => {
mockListener.mockImplementation((req, res) => {
res.send();
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(await res.text()).toBe('');
});
});
describe('.send(null)', () => {
test('should set body to ""', async () => {
mockListener.mockImplementation((req, res) => {
res.send(null);
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-length')).toBe('0');
expect(await res.text()).toBe('');
});
});
describe('.send(undefined)', () => {
test('should set body to ""', async () => {
mockListener.mockImplementation((req, res) => {
res.send(undefined);
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(await res.text()).toBe('');
});
});
describe('.send(String)', () => {
test('should send as html', async () => {
mockListener.mockImplementation((req, res) => {
res.send('<p>hey</p>');
});
const res = await fetchWithProxyReq(url);
expect(res.headers.get('content-type')).toBe('text/html; charset=utf-8');
expect(await res.text()).toBe('<p>hey</p>');
});
test('should set Content-Length', async () => {
mockListener.mockImplementation((req, res) => {
res.send('½ + ¼ = ¾');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(Number(res.headers.get('content-length'))).toBe(12);
expect(await res.text()).toBe('½ + ¼ = ¾');
});
test('should set ETag', async () => {
mockListener.mockImplementation((req, res) => {
res.send(Array(1000).join('-'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe(
'W/"3e7-qPnkJ3CVdVhFJQvUBfF10TmVA7g"'
);
});
test('should not override Content-Type', async () => {
mockListener.mockImplementation((req, res) => {
res.setHeader('Content-Type', 'text/plain');
res.send('hey');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toBe('text/plain; charset=utf-8');
expect(await res.text()).toBe('hey');
});
test('should override charset in Content-Type', async () => {
mockListener.mockImplementation((req, res) => {
res.setHeader('Content-Type', 'text/plain; charset=iso-8859-1');
res.send('hey');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toBe('text/plain; charset=utf-8');
expect(await res.text()).toBe('hey');
});
});
describe('.send(Buffer)', () => {
test('should keep charset in Content-Type', async () => {
mockListener.mockImplementation((req, res) => {
res.setHeader('Content-Type', 'text/plain; charset=iso-8859-1');
res.send(Buffer.from('hi'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toBe(
'text/plain; charset=iso-8859-1'
);
expect(await res.text()).toBe('hi');
});
test('should set Content-Length', async () => {
mockListener.mockImplementation((req, res) => {
res.send(Buffer.from('½ + ¼ = ¾'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(Number(res.headers.get('content-length'))).toBe(12);
expect(await res.text()).toBe('½ + ¼ = ¾');
});
test('should send as octet-stream', async () => {
mockListener.mockImplementation((req, res) => {
res.send(Buffer.from('hello'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toBe('application/octet-stream');
expect((await res.buffer()).toString('hex')).toBe(
Buffer.from('hello').toString('hex')
);
});
test('should set ETag', async () => {
mockListener.mockImplementation((req, res) => {
res.send(Buffer.alloc(999, '-'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe(
'W/"3e7-qPnkJ3CVdVhFJQvUBfF10TmVA7g"'
);
});
test('should not override Content-Type', async () => {
mockListener.mockImplementation((req, res) => {
res.setHeader('Content-Type', 'text/plain; charset=utf-8');
res.send(Buffer.from('hey'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toBe('text/plain; charset=utf-8');
expect(await res.text()).toBe('hey');
});
test('should not override ETag', async () => {
mockListener.mockImplementation((req, res) => {
res.setHeader('ETag', '"foo"');
res.send(Buffer.from('hey'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe('"foo"');
expect(await res.text()).toBe('hey');
});
});
describe('.send(Object)', () => {
test('should send as application/json', async () => {
mockListener.mockImplementation((req, res) => {
res.send({ name: 'tobi' });
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('Content-Type')).toBe(
'application/json; charset=utf-8'
);
expect(await res.text()).toBe('{"name":"tobi"}');
});
});
describe('when the request method is HEAD', () => {
test('should ignore the body', async () => {
mockListener.mockImplementation((req, res) => {
res.send('yay');
});
// TODO: fix this test
// node-fetch is automatically ignoring the body so this test will never fail
const res = await fetchWithProxyReq(url, { method: 'HEAD' });
expect(res.status).toBe(200);
expect((await res.buffer()).toString()).toBe('');
});
});
describe('when .statusCode is 204', () => {
test('should strip Content-* fields, Transfer-Encoding field, and body', async () => {
mockListener.mockImplementation((req, res) => {
res.statusCode = 204;
res.setHeader('Transfer-Encoding', 'chunked');
res.send('foo');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(204);
expect(res.headers.get('Content-Type')).toBe(null);
expect(res.headers.get('Content-Length')).toBe(null);
expect(res.headers.get('Transfer-Encoding')).toBe(null);
expect(await res.text()).toBe('');
});
});
describe('when .statusCode is 304', () => {
test('should strip Content-* fields, Transfer-Encoding field, and body', async () => {
mockListener.mockImplementation((req, res) => {
res.statusCode = 304;
res.setHeader('Transfer-Encoding', 'chunked');
res.send('foo');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(304);
expect(res.headers.get('Content-Type')).toBe(null);
expect(res.headers.get('Content-Length')).toBe(null);
expect(res.headers.get('Transfer-Encoding')).toBe(null);
expect(await res.text()).toBe('');
});
});
// test('should always check regardless of length', async () => {
// const etag = '"asdf"';
// mockListener.mockImplementation((req, res) => {
// res.setHeader('ETag', etag);
// res.send('hey');
// });
// const res = await fetchWithProxyReq(url, {
// headers: { 'If-None-Match': etag },
// });
// expect(res.status).toBe(304);
// });
// test('should respond with 304 Not Modified when fresh', async () => {
// const etag = '"asdf"';
// mockListener.mockImplementation((req, res) => {
// res.setHeader('ETag', etag);
// res.send(Array(1000).join('-'));
// });
// const res = await fetchWithProxyReq(url, {
// headers: { 'If-None-Match': etag },
// });
// expect(res.status).toBe(304);
// });
// test('should not perform freshness check unless 2xx or 304', async () => {
// const etag = '"asdf"';
// mockListener.mockImplementation((req, res) => {
// res.status(500);
// res.setHeader('ETag', etag);
// res.send('hey');
// });
// const res = await fetchWithProxyReq(url, {
// headers: { 'If-None-Match': etag },
// });
// expect(res.status).toBe(500);
// expect(await res.text()).toBe('hey');
// });
describe('etag', () => {
test('should send ETag', async () => {
mockListener.mockImplementation((req, res) => {
res.send('kajdslfkasdf');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe('W/"c-IgR/L5SF7CJQff4wxKGF/vfPuZ0"');
});
test('should send ETag for empty string response', async () => {
mockListener.mockImplementation((req, res) => {
res.send('');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe('W/"0-2jmj7l5rSw0yVb/vlWAYkK/YBwk"');
});
test('should send ETag for long response', async () => {
mockListener.mockImplementation((req, res) => {
res.send(Array(1000).join('-'));
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe(
'W/"3e7-qPnkJ3CVdVhFJQvUBfF10TmVA7g"'
);
});
test('should not override ETag when manually set', async () => {
mockListener.mockImplementation((req, res) => {
res.setHeader('etag', '"asdf"');
res.send('hello');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe('"asdf"');
});
test('should not send ETag for res.send()', async () => {
mockListener.mockImplementation((req, res) => {
res.send();
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('ETag')).toBe(null);
});
});
});
// tests based on expressjs test suite
// see https://github.com/expressjs/express/blob/master/test/res.json.js
describe('res.json', () => {
test('should send be chainable', async () => {
const spy = jest.fn();
mockListener.mockImplementation((req, res) => {
spy(res, res.json({ hello: 'world' }));
});
await fetchWithProxyReq(url);
const [a, b] = spy.mock.calls[0];
expect(a).toBe(b);
});
test('res.json() should send an empty body', async () => {
mockListener.mockImplementation((req, res) => {
res.json();
});
await fetchWithProxyReq(url);
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/json; charset=utf-8'
);
expect(await res.text()).toBe('');
});
describe('.json(object)', () => {
test('should not override previous Content-Types', async () => {
mockListener.mockImplementation((req, res) => {
res.setHeader('content-type', 'application/vnd.example+json');
res.json({ hello: 'world' });
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/vnd.example+json; charset=utf-8'
);
expect(await res.text()).toBe('{"hello":"world"}');
});
test('should set Content-Length and Content-Type', async () => {
mockListener.mockImplementation((req, res) => {
res.json({ hello: '½ + ¼ = ¾' });
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/json; charset=utf-8'
);
expect(Number(res.headers.get('content-length'))).toBe(24);
expect(await res.text()).toBe('{"hello":"½ + ¼ = ¾"}');
});
describe('when given primitives', () => {
test('should respond with json for null', async () => {
mockListener.mockImplementation((req, res) => {
res.json(null);
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/json; charset=utf-8'
);
expect(await res.text()).toBe('null');
});
test('should respond with json for Number', async () => {
mockListener.mockImplementation((req, res) => {
res.json(300);
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/json; charset=utf-8'
);
expect(await res.text()).toBe('300');
});
test('should respond with json for String', async () => {
mockListener.mockImplementation((req, res) => {
res.json('str');
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/json; charset=utf-8'
);
expect(await res.text()).toBe('"str"');
});
});
test('should respond with json when given an array', async () => {
mockListener.mockImplementation((req, res) => {
res.json(['foo', 'bar', 'baz']);
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/json; charset=utf-8'
);
expect(await res.text()).toBe('["foo","bar","baz"]');
});
test('should respond with json when given an object', async () => {
mockListener.mockImplementation((req, res) => {
res.json({ name: 'tobi' });
});
const res = await fetchWithProxyReq(url);
expect(res.status).toBe(200);
expect(res.headers.get('content-type')).toBe(
'application/json; charset=utf-8'
);
expect(await res.text()).toBe('{"name":"tobi"}');
});
});
});

View File

@@ -1,78 +0,0 @@
const { createServer } = require('net');
const { Server } = require('http');
const { Socket } = require('net');
const { URL } = require('url');
const crypto = require('crypto');
const listen = require('test-listen');
exports.runServer = async function runServer({ handler }) {
const server = new Server(handler);
const url = await listen(server);
return { url: new URL(url), close: getKillServer(server) };
};
function getKillServer(server) {
let sockets = [];
server.on('connection', socket => {
sockets.push(socket);
socket.once('close', () => {
sockets.splice(sockets.indexOf(socket), 1);
});
});
return () => {
return new Promise((resolve, reject) => {
server.close(err => {
if (err) {
return reject(err);
}
resolve();
});
sockets.forEach(function (socket) {
socket.destroy();
});
sockets = [];
});
};
}
exports.runTcpServer = async function runTcpServer({
effects,
httpServer,
cipherParams,
}) {
const server = createServer();
server.on('connection', connection => {
const socket = new Socket();
socket.connect(parseInt(httpServer.url.port, 10), httpServer.hostname);
const decipher = crypto.createDecipheriv(
cipherParams.cipher,
cipherParams.cipherKey,
cipherParams.cipherIV
);
decipher.pipe(socket);
const CRLF = Buffer.from('\r\n');
let accBuffer = Buffer.from([]);
connection.on('data', function onConnectionData(chunk) {
accBuffer = Buffer.concat([accBuffer, chunk]);
const idx = accBuffer.indexOf(CRLF);
if (idx !== -1) {
effects.callbackStream = accBuffer.slice(0, idx).toString();
connection.off('data', onConnectionData);
decipher.write(accBuffer.slice(idx + 2));
connection.pipe(decipher);
decipher.on('close', () => {
socket.end();
});
}
});
});
const url = await listen(server);
return { url: new URL(url), close: getKillServer(server) };
};

View File

@@ -1,16 +0,0 @@
{
"compilerOptions": {
"allowJs": true,
"checkJs": true,
"lib": ["ES2020"],
"noEmit": true,
"noImplicitReturns": true,
"strict": true,
"target": "ES2020",
"declaration": true,
"module": "commonjs",
"skipLibCheck": true
},
"include": ["helpers.ts", "bridge.js", "launcher.js"],
"exclude": ["node_modules"]
}

View File

@@ -1,9 +0,0 @@
{
"$schema": "https://turborepo.org/schema.json",
"extends": ["//"],
"pipeline": {
"build": {
"outputs": ["helpers.js", "source-map-support.js"]
}
}
}

View File

@@ -1,72 +0,0 @@
/// <reference types="node" />
import type { CipherCCMTypes } from 'crypto';
import type {
Server,
IncomingHttpHeaders,
OutgoingHttpHeaders,
ServerResponse,
IncomingMessage,
} from 'http';
export interface VercelProxyEvent {
Action: string;
body: string;
}
export interface VercelProxyRequest {
isApiGateway: boolean;
method: string;
path: string;
headers: IncomingHttpHeaders;
body: Buffer;
encoding?: string;
payloads?: Array<VercelProxyRequest>;
features?: Record<string, boolean>;
responseCallbackCipher?: CipherCCMTypes;
responseCallbackCipherIV?: string;
responseCallbackCipherKey?: string;
responseCallbackStream?: string;
responseCallbackUrl?: string;
}
export interface VercelProxyResponse {
statusCode: number;
headers: OutgoingHttpHeaders;
body: string;
encoding: BufferEncoding;
}
export type VercelStreamProxyResponse = Record<string, never>;
export interface ServerLike {
timeout?: number;
listen: (
opts: {
host?: string;
port?: number;
},
callback: (this: Server | null) => void
) => Server | void;
}
export type LauncherConfiguration = {
entrypointPath: string;
bridgePath: string;
helpersPath: string;
sourcemapSupportPath: string;
shouldAddHelpers?: boolean;
shouldAddSourcemapSupport?: boolean;
awsLambdaHandler?: string;
useRequire?: boolean;
};
export type VercelRequestCookies = { [key: string]: string };
export type VercelRequestQuery = { [key: string]: string | string[] };
export type VercelRequestBody = any;
export type VercelRequest = IncomingMessage & {
query: VercelRequestQuery;
cookies: VercelRequestCookies;
body: VercelRequestBody;
};
export type VercelResponse = ServerResponse & {
send: (body: any) => VercelResponse;
json: (jsonBody: any) => VercelResponse;
status: (statusCode: number) => VercelResponse;
redirect: (statusOrUrl: string | number, url?: string) => VercelResponse;
};

View File

@@ -10,10 +10,6 @@ const setupFiles = async (entrypoint, shouldAddHelpers) => {
join(__dirname, '../dist/helpers.js'),
join(__dirname, 'lambda/helpers.js')
);
await fs.copyFile(
require.resolve('@vercel/node-bridge/bridge'),
join(__dirname, 'lambda/bridge.js')
);
await fs.copyFile(
join(process.cwd(), entrypoint),
join(__dirname, 'lambda/entrypoint.js')

View File

@@ -3,13 +3,6 @@ const fs = require('fs-extra');
const execa = require('execa');
const { join } = require('path');
async function copyToDist(sourcePath, outDir) {
return fs.copyFile(
join(__dirname, sourcePath),
join(outDir, 'edge-functions/edge-handler-template.js')
);
}
async function main() {
const srcDir = join(__dirname, 'src');
const outDir = join(__dirname, 'dist');
@@ -54,7 +47,15 @@ async function main() {
join(__dirname, 'test/fixtures/15-helpers/ts/types.d.ts')
);
await copyToDist('src/edge-functions/edge-handler-template.js', outDir);
await fs.copyFile(
join(__dirname, 'src/serverless-functions/dynamic-import.js'),
join(outDir, 'serverless-functions/dynamic-import.js')
);
await fs.copyFile(
join(__dirname, 'src/edge-functions/edge-handler-template.js'),
join(outDir, 'edge-functions/edge-handler-template.js')
);
}
main().catch(err => {

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/node",
"version": "2.11.0",
"version": "2.12.0",
"license": "Apache-2.0",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/runtimes#official-runtimes/node-js",
@@ -11,7 +11,7 @@
},
"scripts": {
"build": "node build",
"test": "jest --env node --verbose --bail --runInBand",
"test": "cross-env NODE_OPTIONS=--experimental-vm-modules jest --env node --verbose --bail --runInBand",
"test-unit": "pnpm test test/unit",
"test-e2e": "pnpm test test/integration"
},
@@ -22,9 +22,11 @@
"@edge-runtime/vm": "2.0.0",
"@types/node": "14.18.33",
"@vercel/build-utils": "6.7.1",
"@vercel/error-utils": "1.0.8",
"@vercel/node-bridge": "4.0.1",
"@vercel/static-config": "2.0.16",
"edge-runtime": "2.0.0",
"async-listen": "1.2.0",
"edge-runtime": "2.1.4",
"esbuild": "0.14.47",
"exit-hook": "2.2.1",
"node-fetch": "2.6.7",
@@ -48,6 +50,7 @@
"@vercel/nft": "0.22.5",
"content-type": "1.0.4",
"cookie": "0.4.0",
"cross-env": "7.0.3",
"etag": "1.8.1",
"execa": "3.2.0",
"fs-extra": "11.1.0",

View File

@@ -5,36 +5,24 @@ if (!entrypoint) {
throw new Error('`VERCEL_DEV_ENTRYPOINT` must be defined');
}
delete process.env.TS_NODE_TRANSPILE_ONLY;
delete process.env.TS_NODE_COMPILER_OPTIONS;
import { join } from 'path';
const useRequire = process.env.VERCEL_DEV_IS_ESM !== '1';
import { createServer, Server, IncomingMessage, ServerResponse } from 'http';
import { VercelProxyResponse } from '@vercel/node-bridge/types';
import type { Headers } from 'node-fetch';
import type { VercelProxyResponse } from './types';
import { Config } from '@vercel/build-utils';
import { createEdgeEventHandler } from './edge-functions/edge-handler';
import { createServer, IncomingMessage, ServerResponse } from 'http';
import { createServerlessEventHandler } from './serverless-functions/serverless-handler';
import { EdgeRuntimes, isEdgeRuntime, logError } from './utils';
import { getConfig } from '@vercel/static-config';
import { Project } from 'ts-morph';
import { EdgeRuntimes, isEdgeRuntime, logError } from './utils';
import { createEdgeEventHandler } from './edge-functions/edge-handler';
import { createServerlessEventHandler } from './serverless-functions/serverless-handler';
import listen from 'async-listen';
function listen(server: Server, port: number, host: string): Promise<void> {
return new Promise(resolve => {
server.listen(port, host, () => {
resolve();
});
});
}
const parseConfig = (entryPointPath: string) =>
getConfig(new Project(), entryPointPath);
function parseRuntime(
entrypoint: string,
entryPointPath: string
): string | undefined {
const project = new Project();
const staticConfig = getConfig(project, entryPointPath);
const runtime = staticConfig?.runtime;
function getRuntime(runtime: string | undefined, entrypoint: string) {
if (runtime && !isEdgeRuntime(runtime)) {
throw new Error(
`Invalid function runtime "${runtime}" for "${entrypoint}". Valid runtimes are: ${JSON.stringify(
@@ -42,7 +30,6 @@ function parseRuntime(
)}. Learn more: https://vercel.link/creating-edge-functions`
);
}
return runtime;
}
@@ -52,7 +39,8 @@ async function createEventHandler(
options: { shouldAddHelpers: boolean }
): Promise<(request: IncomingMessage) => Promise<VercelProxyResponse>> {
const entrypointPath = join(process.cwd(), entrypoint!);
const runtime = parseRuntime(entrypoint, entrypointPath);
const staticConfig = parseConfig(entrypointPath);
const runtime = getRuntime(staticConfig?.runtime, entrypoint);
// `middleware.js`/`middleware.ts` file is always run as
// an Edge Function, otherwise needs to be opted-in via
@@ -67,6 +55,7 @@ async function createEventHandler(
}
return createServerlessEventHandler(entrypointPath, {
mode: staticConfig?.supportsResponseStreaming ? 'streaming' : 'buffer',
shouldAddHelpers: options.shouldAddHelpers,
useRequire,
});
@@ -87,7 +76,7 @@ async function main() {
);
const proxyServer = createServer(onDevRequest);
await listen(proxyServer, 0, '127.0.0.1');
await listen(proxyServer, { host: '127.0.0.1', port: 0 });
try {
handleEvent = await createEventHandler(entrypoint!, config, {
@@ -124,14 +113,19 @@ export async function onDevRequest(
}
try {
const result = await handleEvent(req);
res.statusCode = result.statusCode;
for (const [key, value] of Object.entries(result.headers)) {
if (typeof value !== 'undefined') {
const { headers, body, status } = await handleEvent(req);
res.statusCode = status;
for (const [key, value] of headers as unknown as Headers) {
if (value !== undefined) {
res.setHeader(key, value);
}
}
res.end(Buffer.from(result.body, result.encoding));
if (body instanceof Buffer) {
res.end(body);
} else {
body.pipe(res);
}
} catch (error: any) {
res.statusCode = 500;
res.end(error.stack);

View File

@@ -1,49 +1,29 @@
// provided by the edge runtime:
/* global addEventListener */
function buildUrl(requestDetails) {
const host = requestDetails.headers['x-forwarded-host'] || '127.0.0.1';
const path = requestDetails.url || '/';
const allProtocols = requestDetails.headers['x-forwarded-proto'];
let proto;
if (allProtocols) {
// handle multi-protocol like: https,http://...
proto = allProtocols.split(/\b/).shift();
} else {
proto = 'http';
}
return `${proto}://${host}${path}`;
function getUrl(url, headers) {
const urlObj = new URL(url);
const protocol = headers.get('x-forwarded-proto');
if (protocol) urlObj.protocol = protocol.split(/\b/).shift();
urlObj.host = headers.get('x-forwarded-host');
urlObj.port = headers.get('x-forwarded-port');
return urlObj.toString();
}
async function respond(
userEdgeHandler,
requestDetails,
event,
options,
dependencies
) {
async function respond(userEdgeHandler, event, options, dependencies) {
const { Request, Response } = dependencies;
const { isMiddleware } = options;
let body;
if (requestDetails.method !== 'GET' && requestDetails.method !== 'HEAD') {
if (requestDetails.body) {
body = Uint8Array.from(atob(requestDetails.body), c => c.charCodeAt(0));
}
}
const request = new Request(buildUrl(requestDetails), {
headers: requestDetails.headers,
method: requestDetails.method,
body: body,
});
event.request = request;
let response = await userEdgeHandler(event.request, event);
event.request.headers.set(
'host',
event.request.headers.get('x-forwarded-host')
);
let response = await userEdgeHandler(
new Request(
getUrl(event.request.url, event.request.headers),
event.request
),
event
);
if (!response) {
if (isMiddleware) {
@@ -85,10 +65,8 @@ async function parseRequestEvent(event) {
function registerFetchListener(userEdgeHandler, options, dependencies) {
addEventListener('fetch', async event => {
try {
const requestDetails = await parseRequestEvent(event);
const response = await respond(
userEdgeHandler,
requestDetails,
event,
options,
dependencies
@@ -100,11 +78,10 @@ function registerFetchListener(userEdgeHandler, options, dependencies) {
});
}
// for testing:
module.exports = {
buildUrl,
respond,
toResponseError,
getUrl,
parseRequestEvent,
registerFetchListener,
respond,
toResponseError,
};

View File

@@ -1,18 +1,19 @@
import { IncomingMessage } from 'http';
import { VercelProxyResponse } from '@vercel/node-bridge/types';
import { streamToBuffer } from '@vercel/build-utils';
import exitHook from 'exit-hook';
import { EdgeRuntime, runServer } from 'edge-runtime';
import type { EdgeContext } from '@edge-runtime/vm';
import esbuild from 'esbuild';
import fetch from 'node-fetch';
import { createEdgeWasmPlugin, WasmAssets } from './edge-wasm-plugin';
import { entrypointToOutputPath, logError } from '../utils';
import { readFileSync } from 'fs';
import {
createNodeCompatPlugin,
NodeCompatBindings,
} from './edge-node-compat-plugin';
import { EdgeRuntime, runServer } from 'edge-runtime';
import fetch, { Headers } from 'node-fetch';
import { isError } from '@vercel/error-utils';
import { readFileSync } from 'fs';
import { serializeBody, entrypointToOutputPath, logError } from '../utils';
import esbuild from 'esbuild';
import exitHook from 'exit-hook';
import type { HeadersInit } from 'node-fetch';
import type { VercelProxyResponse } from '../types';
import type { IncomingMessage } from 'http';
import { pathToFileURL } from 'url';
const NODE_VERSION_MAJOR = process.version.match(/^v(\d+)\.\d+/)?.[1];
const NODE_VERSION_IDENTIFIER = `node${NODE_VERSION_MAJOR}`;
@@ -26,17 +27,6 @@ const edgeHandlerTemplate = readFileSync(
`${__dirname}/edge-handler-template.js`
);
async function serializeRequest(message: IncomingMessage) {
const bodyBuffer = await streamToBuffer(message);
const body = bodyBuffer.toString('base64');
return JSON.stringify({
url: message.url,
method: message.method,
headers: message.headers,
body,
});
}
async function compileUserCode(
entrypointFullPath: string,
entrypointRelativePath: string,
@@ -63,7 +53,23 @@ async function compileUserCode(
sourcemap: 'inline',
legalComments: 'none',
bundle: true,
plugins: [edgeWasmPlugin, nodeCompatPlugin.plugin],
plugins: [
edgeWasmPlugin,
nodeCompatPlugin.plugin,
{
name: 'import.meta.url',
setup({ onLoad }) {
onLoad({ filter: /\.[cm]?js$/, namespace: 'file' }, args => {
let code = readFileSync(args.path, 'utf8');
code = code.replace(
/\bimport\.meta\.url\b/g,
JSON.stringify(pathToFileURL(__filename))
);
return { contents: code };
});
},
},
],
entryPoints: [entrypointFullPath],
write: false, // operate in memory
format: 'cjs',
@@ -95,14 +101,8 @@ async function compileUserCode(
// edge handler
${edgeHandlerTemplate};
const dependencies = {
Request,
Response
};
const options = {
isMiddleware,
entrypointLabel
};
const dependencies = { Request, Response };
const options = { isMiddleware, entrypointLabel };
registerFetchListener(userEdgeHandler, options, dependencies);
`;
@@ -111,16 +111,16 @@ async function compileUserCode(
wasmAssets,
nodeCompatBindings: nodeCompatPlugin.bindings,
};
} catch (error) {
} catch (error: unknown) {
// We can't easily show a meaningful stack trace from ncc -> edge-runtime.
// So, stick with just the message for now.
console.error(`Failed to compile user code for edge runtime.`);
logError(error);
if (isError(error)) logError(error);
return undefined;
}
}
async function createEdgeRuntime(params?: {
async function createEdgeRuntimeServer(params?: {
userCode: string;
wasmAssets: WasmAssets;
nodeCompatBindings: NodeCompatBindings;
@@ -133,9 +133,9 @@ async function createEdgeRuntime(params?: {
const wasmBindings = await params.wasmAssets.getContext();
const nodeCompatBindings = params.nodeCompatBindings.getContext();
const edgeRuntime = new EdgeRuntime({
const runtime = new EdgeRuntime({
initialCode: params.userCode,
extend: (context: EdgeContext) => {
extend: context => {
Object.assign(context, {
// This is required for esbuild wrapping logic to resolve
module: {},
@@ -158,11 +158,10 @@ async function createEdgeRuntime(params?: {
},
});
const server = await runServer({ runtime: edgeRuntime });
exitHook(server.close);
const server = await runServer({ runtime });
exitHook(() => server.close());
return server;
} catch (error) {
} catch (error: any) {
// We can't easily show a meaningful stack trace from ncc -> edge-runtime.
// So, stick with just the message for now.
console.error('Failed to instantiate edge runtime.');
@@ -182,7 +181,7 @@ export async function createEdgeEventHandler(
entrypointRelativePath,
isMiddleware
);
const server = await createEdgeRuntime(userCode);
const server = await createEdgeRuntimeServer(userCode);
return async function (request: IncomingMessage) {
if (!server) {
@@ -192,17 +191,23 @@ export async function createEdgeEventHandler(
process.exit(1);
}
const response = await fetch(server.url, {
redirect: 'manual',
method: 'post',
body: await serializeRequest(request),
});
const headers = new Headers(request.headers as HeadersInit);
const body: Buffer | string | undefined = await serializeBody(request);
if (body !== undefined) headers.set('content-length', String(body.length));
const body = await response.text();
const url = new URL(request.url ?? '/', server.url);
const response = await fetch(url, {
body,
headers,
method: request.method,
redirect: 'manual',
});
const isUserError =
response.headers.get('x-vercel-failed') === 'edge-wrapper';
if (isUserError && response.status >= 500) {
const body = await response.text();
// We can't currently get a real stack trace from the Edge Function error,
// but we can fake a basic one that is still usefult to the user.
const fakeStackTrace = ` at (${entrypointRelativePath})`;
@@ -210,6 +215,7 @@ export async function createEdgeEventHandler(
entrypointRelativePath,
isZeroConfig
);
console.log(
`Error from API Route ${requestPath}: ${body}\n${fakeStackTrace}`
);
@@ -220,9 +226,9 @@ export async function createEdgeEventHandler(
}
return {
statusCode: response.status,
headers: response.headers.raw(),
body,
status: response.status,
headers: response.headers,
body: response.body,
encoding: 'utf8',
};
};

View File

@@ -1,3 +1,4 @@
import { isErrnoException } from '@vercel/error-utils';
import url from 'url';
import { spawn } from 'child_process';
import {
@@ -247,13 +248,16 @@ async function compile(
fsCache.set(relPath, entry);
sourceCache.set(relPath, source);
return source;
} catch (e) {
if (e.code === 'ENOENT' || e.code === 'EISDIR') {
} catch (error: unknown) {
if (
isErrnoException(error) &&
(error.code === 'ENOENT' || error.code === 'EISDIR')
) {
// `null` represents a not found
sourceCache.set(relPath, null);
return null;
}
throw e;
throw error;
}
},
}
@@ -549,8 +553,8 @@ export const startDevServer: StartDevServer = async opts => {
filename: 'package.json',
});
const pkg = pathToPkg ? require_(pathToPkg) : {};
const isTypescript = ['.ts', '.tsx', '.mts', '.cts'].includes(ext);
const maybeTranspile = isTypescript || !['.cjs', '.mjs'].includes(ext);
const isTypeScript = ['.ts', '.tsx', '.mts', '.cts'].includes(ext);
const maybeTranspile = isTypeScript || !['.cjs', '.mjs'].includes(ext);
const isEsm =
ext === '.mjs' ||
ext === '.mts' ||
@@ -588,10 +592,10 @@ export const startDevServer: StartDevServer = async opts => {
if (pathToTsConfig) {
try {
tsConfig = ts.readConfigFile(pathToTsConfig, ts.sys.readFile).config;
} catch (err) {
if (err.code !== 'ENOENT') {
} catch (error: unknown) {
if (isErrnoException(error) && error.code !== 'ENOENT') {
console.error(`Error while parsing "${pathToTsConfig}"`);
throw err;
throw error;
}
}
}
@@ -616,7 +620,7 @@ export const startDevServer: StartDevServer = async opts => {
entrypoint,
require_,
isEsm,
isTypeScript: isTypescript,
isTypeScript,
maybeTranspile,
meta,
tsConfig,
@@ -627,7 +631,7 @@ export const startDevServer: StartDevServer = async opts => {
if (message.state === 'message') {
// "message" event
if (isTypescript) {
if (isTypeScript) {
// Invoke `tsc --noEmit` asynchronously in the background, so
// that the HTTP request is not blocked by the type checking.
doTypeCheck(opts, pathToTsConfig).catch((err: Error) => {
@@ -674,11 +678,8 @@ async function doTypeCheck(
const json = JSON.stringify(tsconfig, null, '\t');
await fsp.mkdir(entrypointCacheDir, { recursive: true });
await fsp.writeFile(tsconfigPath, json, { flag: 'wx' });
} catch (err) {
// Don't throw if the file already exists
if (err.code !== 'EEXIST') {
throw err;
}
} catch (error: unknown) {
if (isErrnoException(error) && error.code !== 'EEXIST') throw error;
}
const child = spawn(

View File

@@ -0,0 +1,13 @@
'use strict';
const { pathToFileURL } = require('url');
const { isAbsolute } = require('path');
function dynamicImport(filepath) {
const id = isAbsolute(filepath) ? pathToFileURL(filepath).href : filepath;
return import(id);
}
module.exports = {
dynamicImport,
};

View File

@@ -1,21 +1,36 @@
import type {
VercelRequest,
VercelResponse,
VercelRequestCookies,
VercelRequestQuery,
VercelRequestBody,
} from './types';
import { Server } from 'http';
import type { Bridge } from './bridge';
import type { ServerResponse, IncomingMessage } from 'http';
import { serializeBody } from '../utils';
import { PassThrough } from 'stream';
function getBodyParser(req: VercelRequest, body: Buffer) {
type VercelRequestCookies = { [key: string]: string };
type VercelRequestQuery = { [key: string]: string | string[] };
type VercelRequestBody = any;
export type VercelRequest = IncomingMessage & {
query: VercelRequestQuery;
cookies: VercelRequestCookies;
body: VercelRequestBody;
};
export type VercelResponse = ServerResponse & {
send: (body: any) => VercelResponse;
json: (jsonBody: any) => VercelResponse;
status: (statusCode: number) => VercelResponse;
redirect: (statusOrUrl: string | number, url?: string) => VercelResponse;
};
class ApiError extends Error {
readonly statusCode: number;
constructor(statusCode: number, message: string) {
super(message);
this.statusCode = statusCode;
}
}
function getBodyParser(body: Buffer, contentType: string | undefined) {
return function parseBody(): VercelRequestBody {
if (!req.headers['content-type']) {
return undefined;
}
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { parse: parseContentType } = require('content-type');
const { type } = parseContentType(req.headers['content-type']);
const { type } = parseContentType(contentType);
if (type === 'application/json') {
try {
@@ -26,43 +41,32 @@ function getBodyParser(req: VercelRequest, body: Buffer) {
}
}
if (type === 'application/octet-stream') {
return body;
}
if (type === 'application/octet-stream') return body;
if (type === 'application/x-www-form-urlencoded') {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { parse: parseQS } = require('querystring');
// note: querystring.parse does not produce an iterable object
// https://nodejs.org/api/querystring.html#querystring_querystring_parse_str_sep_eq_options
return parseQS(body.toString());
}
if (type === 'text/plain') {
return body.toString();
}
if (type === 'text/plain') return body.toString();
return undefined;
};
}
function getQueryParser({ url = '/' }: VercelRequest) {
function getQueryParser({ url = '/' }: IncomingMessage) {
return function parseQuery(): VercelRequestQuery {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { parse: parseURL } = require('url');
return parseURL(url, true).query;
};
}
function getCookieParser(req: VercelRequest) {
function getCookieParser(req: IncomingMessage) {
return function parseCookie(): VercelRequestCookies {
const header: undefined | string | string[] = req.headers.cookie;
if (!header) {
return {};
}
// eslint-disable-next-line @typescript-eslint/no-var-requires
if (!header) return {};
const { parse } = require('cookie');
return parse(Array.isArray(header) ? header.join(';') : header);
};
@@ -73,6 +77,13 @@ function status(res: VercelResponse, statusCode: number): VercelResponse {
return res;
}
function setCharset(type: string, charset: string) {
const { parse, format } = require('content-type');
const parsed = parse(type);
parsed.parameters.charset = charset;
return format(parsed);
}
function redirect(
res: VercelResponse,
statusOrUrl: string | number,
@@ -91,23 +102,42 @@ function redirect(
return res;
}
function setCharset(type: string, charset: string) {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const { parse, format } = require('content-type');
const parsed = parse(type);
parsed.parameters.charset = charset;
return format(parsed);
function setLazyProp<T>(req: IncomingMessage, prop: string, getter: () => T) {
const opts = { configurable: true, enumerable: true };
const optsReset = { ...opts, writable: true };
Object.defineProperty(req, prop, {
...opts,
get: () => {
const value = getter();
// we set the property on the object to avoid recalculating it
Object.defineProperty(req, prop, { ...optsReset, value });
return value;
},
set: value => {
Object.defineProperty(req, prop, { ...optsReset, value });
},
});
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function createETag(body: any, encoding: 'utf8' | undefined) {
// eslint-disable-next-line @typescript-eslint/no-var-requires
const etag = require('etag');
const buf = !Buffer.isBuffer(body) ? Buffer.from(body, encoding) : body;
return etag(buf, { weak: true });
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function json(
req: VercelRequest,
res: VercelResponse,
jsonBody: any
): VercelResponse {
const body = JSON.stringify(jsonBody);
if (!res.getHeader('content-type')) {
res.setHeader('content-type', 'application/json; charset=utf-8');
}
return send(req, res, body);
}
function send(
req: VercelRequest,
res: VercelResponse,
@@ -209,103 +239,37 @@ function send(
return res;
}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
function json(
req: VercelRequest,
res: VercelResponse,
jsonBody: any
): VercelResponse {
const body = JSON.stringify(jsonBody);
// content-type
if (!res.getHeader('content-type')) {
res.setHeader('content-type', 'application/json; charset=utf-8');
}
return send(req, res, body);
function restoreBody(req: IncomingMessage, body: Buffer) {
const replicateBody = new PassThrough();
const on = replicateBody.on.bind(replicateBody);
const originalOn = req.on.bind(req);
req.read = replicateBody.read.bind(replicateBody);
req.on = req.addListener = (name, cb) =>
// @ts-expect-error
name === 'data' || name === 'end' ? on(name, cb) : originalOn(name, cb);
replicateBody.write(body);
replicateBody.end();
}
export class ApiError extends Error {
readonly statusCode: number;
constructor(statusCode: number, message: string) {
super(message);
this.statusCode = statusCode;
}
async function readBody(req: IncomingMessage) {
const body = (await serializeBody(req)) || Buffer.from('');
restoreBody(req, body);
return body;
}
export function sendError(
res: VercelResponse,
statusCode: number,
message: string
) {
res.statusCode = statusCode;
res.statusMessage = message;
res.end();
}
function setLazyProp<T>(req: VercelRequest, prop: string, getter: () => T) {
const opts = { configurable: true, enumerable: true };
const optsReset = { ...opts, writable: true };
Object.defineProperty(req, prop, {
...opts,
get: () => {
const value = getter();
// we set the property on the object to avoid recalculating it
Object.defineProperty(req, prop, { ...optsReset, value });
return value;
},
set: value => {
Object.defineProperty(req, prop, { ...optsReset, value });
},
});
}
export function createServerWithHelpers(
handler: (req: VercelRequest, res: VercelResponse) => void | Promise<void>,
bridge: Bridge
) {
const server = new Server(async (_req, _res) => {
const req = _req as VercelRequest;
const res = _res as VercelResponse;
try {
const reqId = req.headers['x-now-bridge-request-id'];
// don't expose this header to the client
delete req.headers['x-now-bridge-request-id'];
if (typeof reqId !== 'string') {
throw new ApiError(500, 'Internal Server Error');
}
const event = bridge.consumeEvent(reqId);
setLazyProp<VercelRequestCookies>(req, 'cookies', getCookieParser(req));
setLazyProp<VercelRequestQuery>(req, 'query', getQueryParser(req));
setLazyProp<VercelRequestBody>(
req,
'body',
getBodyParser(req, event.body)
);
res.status = statusCode => status(res, statusCode);
res.redirect = (statusOrUrl, url) => redirect(res, statusOrUrl, url);
res.send = body => send(req, res, body);
res.json = jsonBody => json(req, res, jsonBody);
try {
await handler(req, res);
} catch (err) {
console.log(`Error from API Route ${req.url}: ${err.stack}`);
process.exit(1);
}
} catch (err) {
console.log(`Error while handling ${req.url}: ${err.message}`);
process.exit(1);
}
});
return server;
export async function addHelpers(_req: IncomingMessage, _res: ServerResponse) {
const req = _req as VercelRequest;
const res = _res as VercelResponse;
setLazyProp<VercelRequestCookies>(req, 'cookies', getCookieParser(req));
setLazyProp<VercelRequestQuery>(req, 'query', getQueryParser(req));
const contentType = req.headers['content-type'];
const body =
contentType === undefined ? Buffer.from('') : await readBody(req);
setLazyProp<VercelRequestBody>(req, 'body', getBodyParser(body, contentType));
res.status = statusCode => status(res, statusCode);
res.redirect = (statusOrUrl, url) => redirect(res, statusOrUrl, url);
res.send = body => send(req, res, body);
res.json = jsonBody => json(req, res, jsonBody);
}

View File

@@ -1,58 +1,92 @@
import { IncomingMessage } from 'http';
import { Readable } from 'stream';
import type { Bridge } from '@vercel/node-bridge/bridge';
import { getVercelLauncher } from '@vercel/node-bridge/launcher.js';
import { VercelProxyResponse } from '@vercel/node-bridge/types';
import { addHelpers } from './helpers';
import { createServer } from 'http';
// @ts-expect-error
import { dynamicImport } from './dynamic-import.js';
import { serializeBody } from '../utils';
import { streamToBuffer } from '@vercel/build-utils';
import exitHook from 'exit-hook';
import fetch from 'node-fetch';
import listen from 'async-listen';
import type { HeadersInit } from 'node-fetch';
import type { ServerResponse, IncomingMessage } from 'http';
import type { VercelProxyResponse } from '../types';
import type { VercelRequest, VercelResponse } from './helpers';
function rawBody(readable: Readable): Promise<Buffer> {
return new Promise((resolve, reject) => {
let bytes = 0;
const chunks: Buffer[] = [];
readable.on('error', reject);
readable.on('data', chunk => {
chunks.push(chunk);
bytes += chunk.length;
});
readable.on('end', () => {
resolve(Buffer.concat(chunks, bytes));
});
type ServerlessServerOptions = {
shouldAddHelpers: boolean;
useRequire: boolean;
mode: 'streaming' | 'buffer';
};
type ServerlessFunctionSignature = (
req: IncomingMessage | VercelRequest,
res: ServerResponse | VercelResponse
) => void;
async function createServerlessServer(
userCode: ServerlessFunctionSignature,
options: ServerlessServerOptions
) {
const server = createServer(async (req, res) => {
if (options.shouldAddHelpers) await addHelpers(req, res);
return userCode(req, res);
});
exitHook(() => server.close());
return { url: await listen(server) };
}
async function compileUserCode(
entrypointPath: string,
options: ServerlessServerOptions
) {
let fn = options.useRequire
? require(entrypointPath)
: await dynamicImport(entrypointPath);
/**
* In some cases we might have nested default props due to TS => JS
*/
for (let i = 0; i < 5; i++) {
if (fn.default) fn = fn.default;
}
return fn;
}
export async function createServerlessEventHandler(
entrypoint: string,
options: {
shouldAddHelpers: boolean;
useRequire: boolean;
}
entrypointPath: string,
options: ServerlessServerOptions
): Promise<(request: IncomingMessage) => Promise<VercelProxyResponse>> {
const launcher = getVercelLauncher({
entrypointPath: entrypoint,
helpersPath: './helpers.js',
shouldAddHelpers: options.shouldAddHelpers,
useRequire: options.useRequire,
// not used
bridgePath: '',
sourcemapSupportPath: '',
});
const bridge: Bridge = launcher();
const userCode = await compileUserCode(entrypointPath, options);
const server = await createServerlessServer(userCode, options);
return async function (request: IncomingMessage) {
const body = await rawBody(request);
const event = {
Action: 'Invoke',
body: JSON.stringify({
method: request.method,
path: request.url,
headers: request.headers,
encoding: 'base64',
body: body.toString('base64'),
}),
};
return bridge.launcher(event, {
callbackWaitsForEmptyEventLoop: false,
const url = new URL(request.url ?? '/', server.url);
const response = await fetch(url, {
body: await serializeBody(request),
headers: {
...request.headers,
host: request.headers['x-forwarded-host'],
} as unknown as HeadersInit,
method: request.method,
redirect: 'manual',
});
let body;
if (options.mode === 'streaming') {
body = response.body;
} else {
body = await streamToBuffer(response.body);
response.headers.delete('transfer-encoding');
//@ts-expect-error
response.headers.set('content-length', body.length);
}
return {
status: response.status,
headers: response.headers,
body,
encoding: 'utf8',
};
};
}

View File

@@ -1,4 +1,5 @@
import { ServerResponse, IncomingMessage } from 'http';
import type { Headers } from 'node-fetch';
export type VercelRequestCookies = { [key: string]: string };
export type VercelRequestQuery = { [key: string]: string | string[] };
@@ -39,3 +40,10 @@ export type NowResponse = VercelResponse;
/** @deprecated Use VercelApiHandler instead. */
export type NowApiHandler = VercelApiHandler;
export interface VercelProxyResponse {
status: number;
headers: Headers;
body: Buffer | NodeJS.ReadableStream;
encoding: BufferEncoding;
}

View File

@@ -1,6 +1,7 @@
import { extname } from 'path';
import { debug, streamToBuffer } from '@vercel/build-utils';
import { pathToRegexp } from 'path-to-regexp';
import { debug } from '@vercel/build-utils';
import type { IncomingMessage } from 'http';
import { extname } from 'path';
export function getRegExpFromMatchers(matcherOrMatchers: unknown): string {
if (!matcherOrMatchers) {
@@ -79,3 +80,11 @@ export function isEdgeRuntime(runtime?: string): runtime is EdgeRuntimes {
Object.values(EdgeRuntimes).includes(runtime as EdgeRuntimes)
);
}
export async function serializeBody(
request: IncomingMessage
): Promise<Buffer | undefined> {
return request.method !== 'GET' && request.method !== 'HEAD'
? await streamToBuffer(request)
: undefined;
}

View File

@@ -1,6 +1,5 @@
{
"private": true,
"type": "module",
"packageManager": "yarn@1.22.19",
"scripts": {
"test": "rm -rf dist && tsc && cat dist/api/index.js"

View File

@@ -24,7 +24,6 @@ function testForkDevServer(entrypoint: string) {
test('runs an edge function that uses `buffer`', async () => {
const child = testForkDevServer('./edge-buffer.js');
try {
const result = await readMessage(child);
if (result.state !== 'message') {
@@ -63,12 +62,12 @@ test('runs a mjs endpoint', async () => {
);
expect({
status: response.status,
headers: response.headers.raw(),
headers: Object.fromEntries(response.headers),
text: await response.text(),
}).toEqual({
status: 200,
headers: expect.objectContaining({
'x-hello': ['world'],
'x-hello': 'world',
}),
text: 'Hello, world!',
});
@@ -96,12 +95,12 @@ test('runs a esm typescript endpoint', async () => {
);
expect({
status: response.status,
headers: response.headers.raw(),
headers: Object.fromEntries(response.headers),
text: await response.text(),
}).toEqual({
status: 200,
headers: expect.objectContaining({
'x-hello': ['world'],
'x-hello': 'world',
}),
text: 'Hello, world!',
});

View File

@@ -1,101 +0,0 @@
import { Response, Request } from 'node-fetch';
import {
buildUrl,
respond,
// @ts-ignore - this is a special patch file to allow importing from the template
} from '../../../src/edge-functions/edge-handler-template.js';
describe('edge-handler-template', () => {
describe('buildUrl()', () => {
test('works with basic proto', async () => {
const url = buildUrl({
url: '/api/add',
headers: {
'x-forwarded-proto': 'https',
'x-forwarded-host': 'somewhere.com',
},
});
expect(url).toBe('https://somewhere.com/api/add');
});
test('works with multi proto', async () => {
const url = buildUrl({
url: '/api/add',
headers: {
'x-forwarded-proto': 'https,http',
'x-forwarded-host': 'somewhere.com',
},
});
expect(url).toBe('https://somewhere.com/api/add');
});
test('url falls back to `/`', async () => {
const url = buildUrl({
// missing url
headers: {
'x-forwarded-proto': 'https',
'x-forwarded-host': 'somewhere.com',
},
});
expect(url).toBe('https://somewhere.com/');
});
test('host header falls back to `127.0.0.1`', async () => {
const url = buildUrl({
url: '/api/add',
headers: {
'x-forwarded-proto': 'https',
// missing 'x-forwarded-host'
},
});
expect(url).toBe('https://127.0.0.1/api/add');
});
test('proto header falls back to `http`', async () => {
const url = buildUrl({
url: '/api/add',
headers: {
// missing 'x-forwarded-proto'
'x-forwarded-host': 'somewhere.com',
},
});
expect(url).toBe('http://somewhere.com/api/add');
});
});
describe('respond()', () => {
test('works', async () => {
const request = {
url: '/api/add',
headers: {
'x-forwarded-proto': 'https',
'x-forwarded-host': 'somewhere.com',
},
};
function userEdgeHandler(req: Request) {
return new Response(`hello from: ${req.url}`);
}
const event = {};
const isMiddleware = false;
const entrypointLabel = 'api/add.js';
const response = await respond(
userEdgeHandler,
request,
event,
{
isMiddleware,
entrypointLabel,
},
{
Request,
Response,
}
);
expect(await response.text()).toBe(
'hello from: https://somewhere.com/api/add'
);
});
});
});

View File

@@ -0,0 +1,108 @@
import { Headers, Response, Request } from 'node-fetch';
import {
getUrl,
respond,
// @ts-ignore - this is a special patch file to allow importing from the template
} from '../../../src/edge-functions/edge-handler-template.js';
describe('edge-handler-template', () => {
describe('getUrl()', () => {
test('single `x-forwarded-proto` value', async () => {
expect(
getUrl(
'http://127.0.0.1:51126/api/add',
new Headers({
'x-forwarded-port': '',
'x-forwarded-proto': 'https',
'x-forwarded-host': 'somewhere.com',
})
)
).toBe('https://somewhere.com/api/add');
});
test('multiple `x-forwarded-proto` value', async () => {
expect(
getUrl(
'https://127.0.0.1:51126/api/add',
new Headers({
'x-forwarded-port': '',
'x-forwarded-proto': 'https,http',
'x-forwarded-host': 'somewhere.com',
})
)
).toBe('https://somewhere.com/api/add');
});
test('keep the path as part of the URL', async () => {
expect(
getUrl(
'https://127.0.0.1:51126/',
new Headers({
'x-forwarded-port': '',
'x-forwarded-proto': 'https,http',
'x-forwarded-host': 'somewhere.com',
})
)
).toBe('https://somewhere.com/');
});
test('respect `x-forwarded-host` with no `x-forwarded-proto`', async () => {
expect(
getUrl(
'https://127.0.0.1:51126/api/add',
new Headers({
'x-forwarded-host': 'somewhere.com',
'x-forwarded-port': '',
})
)
).toBe('https://somewhere.com/api/add');
});
});
describe('respond()', () => {
test("don't expose internal proxy details", async () => {
function userEdgeHandler(req: Request) {
return new Response(`hello from: ${req.url}`);
}
const event = {
request: new Request('http://127.0.0.1:60705/api/add', {
headers: {
accept: '*/*',
'accept-encoding': 'gzip,deflate',
connection: 'close',
host: '127.0.0.1:60705',
'user-agent': 'curl/7.86.0',
'x-forwarded-for': '::ffff:127.0.0.1',
'x-forwarded-host': 'somewhere.com',
'x-forwarded-port': '',
'x-forwarded-proto': 'https,http',
'x-real-ip': '::ffff:127.0.0.1',
'x-vercel-deployment-url': 'localhost:1337',
'x-vercel-forwarded-for': '::ffff:127.0.0.1',
'x-vercel-id': 'dev1::dev1::iaq68-1681934030421-110d3964f516',
},
}),
};
const isMiddleware = false;
const entrypointLabel = 'api/add.js';
const response = await respond(
userEdgeHandler,
event,
{
isMiddleware,
entrypointLabel,
},
{
Request,
Response,
}
);
expect(await response.text()).toBe(
'hello from: https://somewhere.com/api/add'
);
});
});
});

View File

@@ -0,0 +1,33 @@
// @ts-expect-error
import { dynamicImport } from '../../../src/serverless-functions/dynamic-import.js';
import { resolve } from 'path';
describe('dynamic-import', () => {
test('load esm code', async () => {
const entrypointPath = resolve(
__dirname,
'../../dev-fixtures/esm-module.mjs'
);
const fn = await dynamicImport(entrypointPath);
let buffer = '';
const headers: Record<string, string> = {};
const res = {
send: (data: string) => {
buffer = data;
return res;
},
setHeader: (key: string, value: string) => (headers[key] = value),
end: () => {},
};
const req = {};
fn.default(req, res);
expect(buffer).toBe('Hello, world!');
expect(headers).toStrictEqual({ 'x-hello': 'world' });
});
});

View File

@@ -1,6 +1,6 @@
{
"name": "@vercel/static-build",
"version": "1.3.24",
"version": "1.3.25",
"license": "Apache-2.0",
"main": "./dist/index",
"homepage": "https://vercel.com/docs/build-step",
@@ -19,8 +19,8 @@
"test-e2e": "pnpm test test/integration-*.test.js"
},
"dependencies": {
"@vercel/gatsby-plugin-vercel-analytics": "1.0.9",
"@vercel/gatsby-plugin-vercel-builder": "1.2.9"
"@vercel/gatsby-plugin-vercel-analytics": "1.0.10",
"@vercel/gatsby-plugin-vercel-builder": "1.2.10"
},
"devDependencies": {
"@types/aws-lambda": "8.10.64",

View File

@@ -403,7 +403,7 @@ export const build: BuildV2 = async ({
break;
default:
debug(
`No analytics plugin injected for framework ${framework.slug}`
`No Web Vitals plugin injected for framework ${framework.slug}`
);
break;
}

View File

@@ -7,11 +7,11 @@ import { DeepWriteable, readPackageJson, writePackageJson } from './_shared';
const ANALYTICS_PLUGIN_PACKAGE = '@nuxtjs/web-vitals';
export async function injectVercelAnalyticsPlugin(dir: string) {
// First update the `.nuxtrc` file to inject the analytics plugin.
// First update the `.nuxtrc` file to inject the Speed Insights (formerly Analytics) plugin.
// See: https://gist.github.com/pi0/23b5253ac19b4ed5a70add3b971545c9
const nuxtrcPath = join(dir, '.nuxtrc');
console.log(
`Injecting Nuxt.js analytics plugin "${ANALYTICS_PLUGIN_PACKAGE}" to \`${nuxtrcPath}\``
`Injecting Nuxt.js Speed Insights plugin "${ANALYTICS_PLUGIN_PACKAGE}" to \`${nuxtrcPath}\``
);
update(
{

106
pnpm-lock.yaml generated
View File

@@ -266,7 +266,7 @@ importers:
'@vercel-internals/get-package-json': '*'
'@vercel-internals/types': '*'
'@vercel/build-utils': 6.7.1
'@vercel/client': 12.4.10
'@vercel/client': 12.4.11
'@vercel/error-utils': 1.0.10
'@vercel/frameworks': 1.3.4
'@vercel/fs-detectors': 3.8.11
@@ -275,13 +275,13 @@ importers:
'@vercel/hydrogen': 0.0.63
'@vercel/ncc': 0.24.0
'@vercel/next': 3.7.5
'@vercel/node': 2.11.0
'@vercel/node': 2.12.0
'@vercel/python': 3.1.59
'@vercel/redwood': 1.1.14
'@vercel/remix-builder': 1.8.5
'@vercel/routing-utils': 2.2.0
'@vercel/ruby': 1.3.75
'@vercel/static-build': 1.3.24
'@vercel/static-build': 1.3.25
'@zeit/source-map-support': 0.6.2
ajv: 6.12.2
alpha-sort: 2.0.1
@@ -657,7 +657,7 @@ importers:
'@types/node': 14.18.33
'@types/react': 18.0.26
'@vercel/build-utils': 6.7.1
'@vercel/node': 2.11.0
'@vercel/node': 2.12.0
'@vercel/routing-utils': 2.2.0
esbuild: 0.14.47
etag: 1.8.1
@@ -837,13 +837,16 @@ importers:
'@types/node-fetch': ^2.6.1
'@types/test-listen': 1.1.0
'@vercel/build-utils': 6.7.1
'@vercel/error-utils': 1.0.8
'@vercel/ncc': 0.24.0
'@vercel/nft': 0.22.5
'@vercel/node-bridge': 4.0.1
'@vercel/static-config': 2.0.16
async-listen: 1.2.0
content-type: 1.0.4
cookie: 0.4.0
edge-runtime: 2.0.0
cross-env: 7.0.3
edge-runtime: 2.1.4
esbuild: 0.14.47
etag: 1.8.1
execa: 3.2.0
@@ -860,9 +863,11 @@ importers:
'@edge-runtime/vm': 2.0.0
'@types/node': 14.18.33
'@vercel/build-utils': link:../build-utils
'@vercel/node-bridge': link:../node-bridge
'@vercel/error-utils': 1.0.8
'@vercel/node-bridge': 4.0.1
'@vercel/static-config': link:../static-config
edge-runtime: 2.0.0
async-listen: 1.2.0
edge-runtime: 2.1.4
esbuild: 0.14.47
exit-hook: 2.2.1
node-fetch: 2.6.7
@@ -885,36 +890,13 @@ importers:
'@vercel/nft': 0.22.5
content-type: 1.0.4
cookie: 0.4.0
cross-env: 7.0.3
etag: 1.8.1
execa: 3.2.0
fs-extra: 11.1.0
source-map-support: 0.5.12
test-listen: 1.1.0
packages/node-bridge:
specifiers:
'@types/aws-lambda': 8.10.19
'@types/node': 14.18.33
content-type: 1.0.4
cookie: 0.4.0
etag: 1.8.1
execa: 3.2.0
fs-extra: 10.0.0
jsonlines: 0.1.1
test-listen: 1.1.0
typescript: 4.3.4
devDependencies:
'@types/aws-lambda': 8.10.19
'@types/node': 14.18.33
content-type: 1.0.4
cookie: 0.4.0
etag: 1.8.1
execa: 3.2.0
fs-extra: 10.0.0
jsonlines: 0.1.1
test-listen: 1.1.0
typescript: 4.3.4
packages/python:
specifiers:
'@types/execa': ^0.9.0
@@ -1054,8 +1036,8 @@ importers:
'@vercel/build-utils': 6.7.1
'@vercel/frameworks': 1.3.4
'@vercel/fs-detectors': 3.8.11
'@vercel/gatsby-plugin-vercel-analytics': 1.0.9
'@vercel/gatsby-plugin-vercel-builder': 1.2.9
'@vercel/gatsby-plugin-vercel-analytics': 1.0.10
'@vercel/gatsby-plugin-vercel-builder': 1.2.10
'@vercel/ncc': 0.24.0
'@vercel/routing-utils': 2.2.0
'@vercel/static-config': 2.0.16
@@ -2475,8 +2457,9 @@ packages:
dependencies:
'@jridgewell/trace-mapping': 0.3.9
/@edge-runtime/format/1.1.0:
resolution: {integrity: sha512-MkLDDtPhXZIMx83NykdFmOpF7gVWIdd6GBHYb8V/E+PKWvD2pK/qWx9B30oN1iDJ2XBm0SGDjz02S8nDHI9lMQ==}
/@edge-runtime/format/2.0.1:
resolution: {integrity: sha512-aE+9DtBvQyg349srixtXEUNauWtIv5HTKPy8Q9dvG1NvpldVIvvhcDBI+SuvDVM8kQl8phbYnp2NTNloBCn/Yg==}
engines: {node: '>=14'}
dev: false
/@edge-runtime/jest-environment/2.0.0:
@@ -2493,11 +2476,23 @@ packages:
/@edge-runtime/primitives/2.0.0:
resolution: {integrity: sha512-AXqUq1zruTJAICrllUvZcgciIcEGHdF6KJ3r6FM0n4k8LpFxZ62tPWVIJ9HKm+xt+ncTBUZxwgUaQ73QMUQEKw==}
/@edge-runtime/primitives/2.1.2:
resolution: {integrity: sha512-SR04SMDybALlhIYIi0hiuEUwIl0b7Sn+RKwQkX6hydg4+AKMzBNDFhj2nqHDD1+xkHArV9EhmJIb6iGjShwSzg==}
engines: {node: '>=14'}
dev: false
/@edge-runtime/vm/2.0.0:
resolution: {integrity: sha512-BOLrAX8IWHRXu1siZocwLguKJPEUv7cr+rG8tI4hvHgMdIsBWHJlLeB8EjuUVnIURFrUiM49lVKn8DRrECmngw==}
dependencies:
'@edge-runtime/primitives': 2.0.0
/@edge-runtime/vm/2.1.2:
resolution: {integrity: sha512-j4H5S26NJhYOyjVMN8T/YJuwwslfnEX1P0j6N2Rq1FaubgNowdYunA9nlO7lg8Rgjv6dqJ2zKuM7GD1HFtNSGw==}
engines: {node: '>=14'}
dependencies:
'@edge-runtime/primitives': 2.1.2
dev: false
/@emotion/hash/0.9.0:
resolution: {integrity: sha512-14FtKiHhy2QoPIzdTcvh//8OyBlknNs2nXRwIhG904opCby3l+9Xaf/wuPvICBF0rc1ZCNBd3nKe9cd2mecVkQ==}
dev: false
@@ -6442,6 +6437,10 @@ packages:
resolution: {integrity: sha512-wUYa8eUyTg1jPGRCrjpIxJm1r6hQE7ccbECWzDCAikuWG4iadS2zWrF7bsAcuUj7fTMf8sNFhmsknTJgyN3B3g==}
dev: false
/@vercel/error-utils/1.0.8:
resolution: {integrity: sha512-s+f7jP2oH1koICbQ8e3K9hOpOeUct7rbCnF9qsNwXemq850wAh2e90tp9R6oYBM0BNpiLRRm+oG5zD2sCIm3HQ==}
dev: false
/@vercel/frameworks/1.3.0:
resolution: {integrity: sha512-guXALpQLhL0bCvIjUhHbYFyS8XusZQ6RtjqCTq0eJM6p8QLun4DI1TToqbIah/o7DY3s+RAyC2OUyOAY91qH4w==}
dependencies:
@@ -6501,6 +6500,10 @@ packages:
- encoding
- supports-color
/@vercel/node-bridge/4.0.1:
resolution: {integrity: sha512-XEfKfnLGzlIBpad7eGNPql1HnMhoSTv9q3uDNC4axdaAC/kI5yvl8kXjuCPAXYvpbJnVQPpcSUC5/r5ap8F3jA==}
dev: false
/@vercel/remix-run-dev/1.15.0_@types+node@14.18.33:
resolution: {integrity: sha512-pQTM5WmOzrvhpPSHFDShwqX71YnLaTUxffhnly4MxVNKJ2WKV9zqx8bGQ/7cLfpEu9JfY2c+pVjYYb3wAMBt+Q==}
engines: {node: '>=14'}
@@ -7219,7 +7222,11 @@ packages:
/async-listen/1.2.0:
resolution: {integrity: sha512-CcEtRh/oc9Jc4uWeUwdpG/+Mb2YUHKmdaTf0gUr7Wa+bfp4xx70HOb3RuSTJMvqKNB1TkdTfjLdrcz2X4rkkZA==}
dev: true
/async-listen/2.0.3:
resolution: {integrity: sha512-WVLi/FGIQaXyfYyNvmkwKT1RZbkzszLLnmW/gFCc5lbVvN/0QQCWpBwRBk2OWSdkkmKRBc8yD6BrKsjA3XKaSw==}
engines: {node: '>= 14'}
dev: false
/async-retry/1.1.3:
resolution: {integrity: sha512-fiAB2uaoAoUS5Ua75XFGoMKF4hmQ5H4u4gsINUjwPNof5dygJS1zyL9mh0SOmIkzAwGijwG4ybLNc8yG2OGpEQ==}
@@ -8449,6 +8456,14 @@ packages:
prompts: 2.4.2
dev: true
/cross-env/7.0.3:
resolution: {integrity: sha512-+/HKd6EgcQCJGh2PSjZuUitQBQynKor4wrFbRg4DtAgS1aWO+gU52xpH7M9ScGgXSYmAVS9bIJ8EzuaGw0oNAw==}
engines: {node: '>=10.14', npm: '>=6', yarn: '>=1'}
hasBin: true
dependencies:
cross-spawn: 7.0.3
dev: true
/cross-spawn/5.1.0:
resolution: {integrity: sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==}
dependencies:
@@ -8957,13 +8972,15 @@ packages:
safer-buffer: 2.1.2
dev: true
/edge-runtime/2.0.0:
resolution: {integrity: sha512-TmRJhKi4mlM1e+zgF4CSzVU5gJ1sWj7ia+XhVgZ8PYyYUxk4PPjJU8qScpSLsAbdSxoBghLxdMuwuCzdYLd1sQ==}
/edge-runtime/2.1.4:
resolution: {integrity: sha512-SertKByzAmjm+MkLbFl1q0ko+/90V24dhZgQM8fcdguQaDYVEVtb6okEBGeg8IQgL1/JUP8oSlUIxSI/bvsVRQ==}
engines: {node: '>=14'}
hasBin: true
dependencies:
'@edge-runtime/format': 1.1.0
'@edge-runtime/vm': 2.0.0
'@edge-runtime/format': 2.0.1
'@edge-runtime/vm': 2.1.2
async-listen: 2.0.3
exit-hook: 2.2.1
http-status: 1.5.3
mri: 1.2.0
picocolors: 1.0.0
pretty-bytes: 5.6.0
@@ -10148,6 +10165,7 @@ packages:
/esprima/4.0.1:
resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==}
engines: {node: '>=4'}
hasBin: true
/esquery/1.4.0:
resolution: {integrity: sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==}
@@ -11564,11 +11582,6 @@ packages:
sshpk: 1.17.0
dev: true
/http-status/1.5.3:
resolution: {integrity: sha512-jCClqdnnwigYslmtfb28vPplOgoiZ0siP2Z8C5Ua+3UKbx410v+c+jT+jh1bbI4TvcEySuX0vd/CfFZFbDkJeQ==}
engines: {node: '>= 0.4.0'}
dev: false
/http2-wrapper/1.0.3:
resolution: {integrity: sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==}
engines: {node: '>=10.19.0'}
@@ -18412,6 +18425,7 @@ packages:
/which/2.0.2:
resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==}
engines: {node: '>= 8'}
hasBin: true
dependencies:
isexe: 2.0.0

1
utils/run.js vendored
View File

@@ -10,7 +10,6 @@ const allPackages = [
'static-config',
'client',
'next',
'node-bridge',
'node',
'go',
'python',