mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-24 03:39:11 +00:00
Compare commits
55 Commits
@now/node-
...
@now/next@
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c8d225522d | ||
|
|
8ee5063669 | ||
|
|
9372e70747 | ||
|
|
4a4bd550a1 | ||
|
|
f53343d547 | ||
|
|
e4ed811b53 | ||
|
|
e9935dee31 | ||
|
|
2e1e6bb131 | ||
|
|
4a01ac4bd0 | ||
|
|
bd1a7c428f | ||
|
|
9a4a3dac47 | ||
|
|
4f2c35a0ee | ||
|
|
672df5d026 | ||
|
|
8cb648abc4 | ||
|
|
74f658c634 | ||
|
|
efbb54a232 | ||
|
|
3e2bd03e01 | ||
|
|
8dc92b70b9 | ||
|
|
4267be4e5a | ||
|
|
43ba6459eb | ||
|
|
8c5638915d | ||
|
|
3fab247c15 | ||
|
|
6ab0e2e9ab | ||
|
|
34369148d7 | ||
|
|
662ad1ed3a | ||
|
|
890cd74ee5 | ||
|
|
7ef616b31e | ||
|
|
bebcfa4bb5 | ||
|
|
25100c53aa | ||
|
|
fe20da87e7 | ||
|
|
18cb147c86 | ||
|
|
9c9e18586f | ||
|
|
0cd7192740 | ||
|
|
a2d9c4fb4b | ||
|
|
02fafd2ebc | ||
|
|
42577c915c | ||
|
|
73db9e11dd | ||
|
|
3125125c16 | ||
|
|
5335291408 | ||
|
|
36620559f9 | ||
|
|
360ea3a609 | ||
|
|
1cd362126c | ||
|
|
ae19fe95f6 | ||
|
|
3e34d402a2 | ||
|
|
cc7b97fbbb | ||
|
|
c1049985af | ||
|
|
214388ccf3 | ||
|
|
b1d6b7bfc0 | ||
|
|
ece3564dfd | ||
|
|
a88af1f077 | ||
|
|
d92f7b26c0 | ||
|
|
52198af750 | ||
|
|
d58bff2453 | ||
|
|
8c0a144ae4 | ||
|
|
106e4d5f36 |
3
.prettierrc.json
Normal file
3
.prettierrc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"singleQuote": true
|
||||
}
|
||||
@@ -21,7 +21,7 @@
|
||||
"pre-commit": "lint-staged",
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"prettier --write --single-quote",
|
||||
"prettier --write",
|
||||
"eslint --fix",
|
||||
"git add"
|
||||
]
|
||||
|
||||
@@ -12,9 +12,10 @@ exports.config = {
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
exports.build = async ({
|
||||
workPath, files, entrypoint, config,
|
||||
}) => {
|
||||
const srcDir = await getWritableDirectory();
|
||||
const workDir = await getWritableDirectory();
|
||||
|
||||
console.log('downloading files...');
|
||||
await download(files, srcDir);
|
||||
@@ -24,7 +25,7 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
return o;
|
||||
}, {});
|
||||
|
||||
const IMPORT_CACHE = `${workDir}/.import-cache`;
|
||||
const IMPORT_CACHE = `${workPath}/.import-cache`;
|
||||
const env = Object.assign({}, process.env, configEnv, {
|
||||
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
||||
IMPORT_CACHE,
|
||||
@@ -37,12 +38,12 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
|
||||
await execa(builderPath, [entrypoint], {
|
||||
env,
|
||||
cwd: workDir,
|
||||
cwd: workPath,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', workDir),
|
||||
files: await glob('**', workPath),
|
||||
handler: entrypoint, // not actually used in `bootstrap`
|
||||
runtime: 'provided',
|
||||
environment: Object.assign({}, configEnv, {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/bash",
|
||||
"version": "0.1.2",
|
||||
"version": "0.1.4-canary.0",
|
||||
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
||||
"main": "index.js",
|
||||
"author": "Nathan Rajlich <nate@zeit.co>",
|
||||
|
||||
@@ -54,9 +54,10 @@ _lambda_runtime_next() {
|
||||
local stdin
|
||||
stdin="$(mktemp -u)"
|
||||
mkfifo "$stdin"
|
||||
_lambda_runtime_body "$event" > "$stdin" &
|
||||
_lambda_runtime_body < "$event" > "$stdin" &
|
||||
|
||||
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
||||
|
||||
rm -f "$event" "$stdin"
|
||||
|
||||
if [ "$exit_code" -eq 0 ]; then
|
||||
@@ -74,12 +75,14 @@ _lambda_runtime_next() {
|
||||
}
|
||||
|
||||
_lambda_runtime_body() {
|
||||
if [ "$(jq --raw-output '.body | type' < "$1")" = "string" ]; then
|
||||
if [ "$(jq --raw-output '.encoding' < "$1")" = "base64" ]; then
|
||||
jq --raw-output '.body' < "$1" | base64 -d
|
||||
local event
|
||||
event="$(cat)"
|
||||
if [ "$(jq --raw-output '.body | type' <<< "$event")" = "string" ]; then
|
||||
if [ "$(jq --raw-output '.encoding' <<< "$event")" = "base64" ]; then
|
||||
jq --raw-output '.body' <<< "$event" | base64 --decode
|
||||
else
|
||||
# assume plain-text body
|
||||
jq --raw-output '.body' < "$1"
|
||||
jq --raw-output '.body' <<< "$event"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const prod = process.env.AWS_EXECUTION_ENV || process.env.X_GOOGLE_CODE_LOCATION;
|
||||
const TMP_PATH = prod ? '/tmp' : path.join(__dirname, 'tmp');
|
||||
const { join } = require('path');
|
||||
const { tmpdir } = require('os');
|
||||
const { mkdirp } = require('fs-extra');
|
||||
|
||||
module.exports = async function getWritableDirectory() {
|
||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||
const directory = path.join(TMP_PATH, name);
|
||||
await fs.mkdirp(directory);
|
||||
const directory = join(tmpdir(), name);
|
||||
await mkdirp(directory);
|
||||
return directory;
|
||||
};
|
||||
|
||||
@@ -13,9 +13,18 @@ function spawnAsync(command, args, cwd) {
|
||||
});
|
||||
}
|
||||
|
||||
async function chmodPlusX(fsPath) {
|
||||
const s = await fs.stat(fsPath);
|
||||
const newMode = s.mode | 64 | 8 | 1; // eslint-disable-line no-bitwise
|
||||
if (s.mode === newMode) return;
|
||||
const base8 = newMode.toString(8).slice(-3);
|
||||
await fs.chmod(fsPath, base8);
|
||||
}
|
||||
|
||||
async function runShellScript(fsPath) {
|
||||
assert(path.isAbsolute(fsPath));
|
||||
const destPath = path.dirname(fsPath);
|
||||
await chmodPlusX(fsPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.4.35-canary.2",
|
||||
"version": "0.4.37-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -68,6 +68,7 @@ async function createGo(
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
opts = {},
|
||||
goMod = false,
|
||||
) {
|
||||
const env = {
|
||||
...process.env,
|
||||
@@ -76,6 +77,10 @@ async function createGo(
|
||||
...opts.env,
|
||||
};
|
||||
|
||||
if (goMod) {
|
||||
env.GO111MODULE = 'on';
|
||||
}
|
||||
|
||||
function go(...args) {
|
||||
debug('Exec %o', `go ${args.join(' ')}`);
|
||||
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||
@@ -90,7 +95,7 @@ async function createGo(
|
||||
|
||||
async function downloadGo(
|
||||
dir = GO_DIR,
|
||||
version = '1.11.5',
|
||||
version = '1.12',
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
) {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const { join, dirname } = require('path');
|
||||
const { readFile, writeFile } = require('fs-extra');
|
||||
const {
|
||||
readFile, writeFile, pathExists, move,
|
||||
} = require('fs-extra');
|
||||
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
@@ -23,9 +25,9 @@ async function build({ files, entrypoint }) {
|
||||
const downloadedFiles = await download(files, srcPath);
|
||||
|
||||
console.log(`Parsing AST for "${entrypoint}"`);
|
||||
let handlerFunctionName;
|
||||
let parseFunctionName;
|
||||
try {
|
||||
handlerFunctionName = await getExportedFunctionName(
|
||||
parseFunctionName = await getExportedFunctionName(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
);
|
||||
} catch (err) {
|
||||
@@ -33,7 +35,7 @@ async function build({ files, entrypoint }) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!handlerFunctionName) {
|
||||
if (!parseFunctionName) {
|
||||
const err = new Error(
|
||||
`Could not find an exported function in "${entrypoint}"`,
|
||||
);
|
||||
@@ -41,53 +43,144 @@ async function build({ files, entrypoint }) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
const handlerFunctionName = parseFunctionName.split(',')[0];
|
||||
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`,
|
||||
);
|
||||
|
||||
const origianlMainGoContents = await readFile(
|
||||
join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
// in order to allow the user to have `main.go`, we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
// check if package name other than main
|
||||
const packageName = parseFunctionName.split(',')[1];
|
||||
const isGoModExist = await pathExists(`${entrypointDirname}/go.mod`);
|
||||
if (packageName !== 'main') {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
true,
|
||||
);
|
||||
if (!isGoModExist) {
|
||||
try {
|
||||
go('mod', 'init', packageName);
|
||||
} catch (err) {
|
||||
console.log(`failed to \`go mod init ${packageName}\``);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const go = await createGo(goPath, process.platform, process.arch, {
|
||||
cwd: entrypointDirname,
|
||||
});
|
||||
const mainModGoFileName = 'main__mod__.go';
|
||||
const modMainGoContents = await readFile(
|
||||
join(__dirname, mainModGoFileName),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||
// and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
let goPackageName = `${packageName}/${packageName}`;
|
||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
if (isGoModExist) {
|
||||
const goModContents = await readFile(
|
||||
`${entrypointDirname}/go.mod`,
|
||||
'utf8',
|
||||
);
|
||||
goPackageName = `${
|
||||
goModContents.split('\n')[0].split(' ')[1]
|
||||
}/${packageName}`;
|
||||
}
|
||||
|
||||
const mainModGoContents = modMainGoContents
|
||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||
|
||||
// write main__mod__.go
|
||||
await writeFile(
|
||||
join(entrypointDirname, mainModGoFileName),
|
||||
mainModGoContents,
|
||||
);
|
||||
|
||||
// move user go file to folder
|
||||
try {
|
||||
await move(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
`${join(entrypointDirname, packageName, entrypoint)}`,
|
||||
);
|
||||
} catch (err) {
|
||||
console.log('failed to move entry to package folder');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('tidy go.mod file');
|
||||
try {
|
||||
// ensure go.mod up-to-date
|
||||
await go('mod', 'tidy');
|
||||
} catch (err) {
|
||||
console.log('failed to `go mod tidy`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [join(entrypointDirname, mainModGoFileName)];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
false,
|
||||
);
|
||||
const origianlMainGoContents = await readFile(
|
||||
join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
|
||||
// in order to allow the user to have `main.go`,
|
||||
// we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||
// and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const lambda = await createLambda({
|
||||
|
||||
12
packages/now-go/main__mod__.go
Normal file
12
packages/now-go/main__mod__.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"__NOW_HANDLER_PACKAGE_NAME"
|
||||
|
||||
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||
)
|
||||
|
||||
func main() {
|
||||
now.Start(http.HandlerFunc(__NOW_HANDLER_FUNC_NAME))
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.2.13-canary.1",
|
||||
"version": "0.3.1-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -13,6 +13,7 @@
|
||||
"files": [
|
||||
"*.js",
|
||||
"main.go",
|
||||
"main__mod__.go",
|
||||
"util"
|
||||
],
|
||||
"dependencies": {
|
||||
|
||||
@@ -34,7 +34,7 @@ func main() {
|
||||
if fn.Name.IsExported() == true {
|
||||
// we found the first exported function
|
||||
// we're done!
|
||||
fmt.Print(fn.Name.Name)
|
||||
fmt.Print(fn.Name.Name, ",", parsed.Name.Name)
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,12 +3,20 @@ const download = require('@now/build-utils/fs/download.js'); // eslint-disable-l
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileBlob = require('@now/build-utils/file-blob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const path = require('path');
|
||||
const { readFile, writeFile, unlink } = require('fs.promised');
|
||||
const {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const {
|
||||
readFile,
|
||||
writeFile,
|
||||
unlink: unlinkFile,
|
||||
remove: removePath,
|
||||
mkdirp,
|
||||
rename: renamePath,
|
||||
pathExists,
|
||||
} = require('fs-extra');
|
||||
const semver = require('semver');
|
||||
const nextLegacyVersions = require('./legacy-versions');
|
||||
const {
|
||||
@@ -17,6 +25,7 @@ const {
|
||||
includeOnlyEntryDirectory,
|
||||
normalizePackageJson,
|
||||
onlyStaticDirectory,
|
||||
getNextConfig,
|
||||
} = require('./utils');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref').Files } Files */
|
||||
@@ -68,6 +77,36 @@ async function writeNpmRc(workPath, token) {
|
||||
);
|
||||
}
|
||||
|
||||
function getNextVersion(packageJson) {
|
||||
let nextVersion;
|
||||
if (packageJson.dependencies && packageJson.dependencies.next) {
|
||||
nextVersion = packageJson.dependencies.next;
|
||||
} else if (packageJson.devDependencies && packageJson.devDependencies.next) {
|
||||
nextVersion = packageJson.devDependencies.next;
|
||||
}
|
||||
return nextVersion;
|
||||
}
|
||||
|
||||
function isLegacyNext(nextVersion) {
|
||||
// If version is using the dist-tag instead of a version range
|
||||
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the version is an exact match with the legacy versions
|
||||
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
||||
// When the version can't be matched with legacy versions, so it must be a newer version
|
||||
if (maxSatisfying === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '5mb',
|
||||
};
|
||||
@@ -84,52 +123,34 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
await download(files, workPath);
|
||||
const entryPath = path.join(workPath, entryDirectory);
|
||||
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
|
||||
let nextVersion;
|
||||
if (pkg.dependencies && pkg.dependencies.next) {
|
||||
nextVersion = pkg.dependencies.next;
|
||||
} else if (pkg.devDependencies && pkg.devDependencies.next) {
|
||||
nextVersion = pkg.devDependencies.next;
|
||||
if (await pathExists(path.join(entryPath, '.next'))) {
|
||||
console.warn(
|
||||
'WARNING: You should probably not upload the `.next` directory. See https://zeit.co/docs/v2/deployments/official-builders/next-js-now-next/ for more information.',
|
||||
);
|
||||
}
|
||||
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
|
||||
const nextVersion = getNextVersion(pkg);
|
||||
if (!nextVersion) {
|
||||
throw new Error(
|
||||
'No Next.js version could be detected in "package.json". Make sure `"next"` is installed in "dependencies" or "devDependencies"',
|
||||
);
|
||||
}
|
||||
|
||||
const isLegacy = (() => {
|
||||
// If version is using the dist-tag instead of a version range
|
||||
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the version is an exact match with the legacy versions
|
||||
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
||||
// When the version can't be matched with legacy versions, so it must be a newer version
|
||||
if (maxSatisfying === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
})();
|
||||
const isLegacy = isLegacyNext(nextVersion);
|
||||
|
||||
console.log(`MODE: ${isLegacy ? 'legacy' : 'serverless'}`);
|
||||
|
||||
if (isLegacy) {
|
||||
try {
|
||||
await unlink(path.join(entryPath, 'yarn.lock'));
|
||||
await unlinkFile(path.join(entryPath, 'yarn.lock'));
|
||||
} catch (err) {
|
||||
console.log('no yarn.lock removed');
|
||||
}
|
||||
|
||||
try {
|
||||
await unlink(path.join(entryPath, 'package-lock.json'));
|
||||
await unlinkFile(path.join(entryPath, 'package-lock.json'));
|
||||
} catch (err) {
|
||||
console.log('no package-lock.json removed');
|
||||
}
|
||||
@@ -170,7 +191,7 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
}
|
||||
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
await unlink(path.join(entryPath, '.npmrc'));
|
||||
await unlinkFile(path.join(entryPath, '.npmrc'));
|
||||
}
|
||||
|
||||
const lambdas = {};
|
||||
@@ -273,11 +294,31 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
const pageKeys = Object.keys(pages);
|
||||
|
||||
if (pageKeys.length === 0) {
|
||||
const nextConfig = await getNextConfig(workPath, entryPath);
|
||||
|
||||
if (nextConfig != null) {
|
||||
console.info('Found next.config.js:');
|
||||
console.info(nextConfig);
|
||||
console.info();
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
'No serverless pages were built. https://err.sh/zeit/now-builders/now-next-no-serverless-pages-built',
|
||||
);
|
||||
}
|
||||
|
||||
// An optional assets folder that is placed alongside every page entrypoint
|
||||
const assets = await glob(
|
||||
'assets/**',
|
||||
path.join(entryPath, '.next', 'serverless'),
|
||||
);
|
||||
|
||||
const assetKeys = Object.keys(assets);
|
||||
if (assetKeys.length > 0) {
|
||||
console.log('detected assets to be bundled with lambda:');
|
||||
assetKeys.forEach(assetFile => console.log(`\t${assetFile}`));
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
pageKeys.map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
@@ -291,6 +332,7 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...launcherFiles,
|
||||
...assets,
|
||||
'page.js': pages[page],
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
@@ -313,16 +355,46 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
{},
|
||||
);
|
||||
|
||||
const nextStaticDirectory = onlyStaticDirectory(
|
||||
const staticDirectoryFiles = onlyStaticDirectory(
|
||||
includeOnlyEntryDirectory(files, entryDirectory),
|
||||
);
|
||||
const staticDirectoryFiles = Object.keys(nextStaticDirectory).reduce(
|
||||
(mappedFiles, file) => ({
|
||||
...mappedFiles,
|
||||
[path.join(entryDirectory, file)]: nextStaticDirectory[file],
|
||||
}),
|
||||
{},
|
||||
entryDirectory,
|
||||
);
|
||||
|
||||
return { ...lambdas, ...staticFiles, ...staticDirectoryFiles };
|
||||
};
|
||||
|
||||
exports.prepareCache = async ({ cachePath, workPath, entrypoint }) => {
|
||||
console.log('preparing cache ...');
|
||||
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const entryPath = path.join(workPath, entryDirectory);
|
||||
const cacheEntryPath = path.join(cachePath, entryDirectory);
|
||||
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
const nextVersion = getNextVersion(pkg);
|
||||
const isLegacy = isLegacyNext(nextVersion);
|
||||
|
||||
if (isLegacy) {
|
||||
// skip caching legacy mode (swapping deps between all and production can get bug-prone)
|
||||
return {};
|
||||
}
|
||||
|
||||
console.log('clearing old cache ...');
|
||||
await removePath(cacheEntryPath);
|
||||
await mkdirp(cacheEntryPath);
|
||||
|
||||
console.log('copying build files for cache ...');
|
||||
await renamePath(entryPath, cacheEntryPath);
|
||||
|
||||
console.log('producing cache file manifest ...');
|
||||
|
||||
const cacheEntrypoint = path.relative(cachePath, cacheEntryPath);
|
||||
return {
|
||||
...(await glob(
|
||||
path.join(cacheEntrypoint, 'node_modules/{**,!.*,.yarn*}'),
|
||||
cachePath,
|
||||
)),
|
||||
...(await glob(path.join(cacheEntrypoint, 'package-lock.json'), cachePath)),
|
||||
...(await glob(path.join(cacheEntrypoint, 'yarn.lock'), cachePath)),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/next",
|
||||
"version": "0.0.85-canary.6",
|
||||
"version": "0.1.3-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,9 +8,9 @@
|
||||
"directory": "packages/now-next"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "1.0.0-canary.2",
|
||||
"@now/node-bridge": "^1.0.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs.promised": "^3.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"semver": "^5.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,6 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
@@ -79,13 +82,13 @@ function excludeLockFiles(files) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude the static directory from files
|
||||
* Include the static directory from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function onlyStaticDirectory(files) {
|
||||
function onlyStaticDirectory(files, entryDir) {
|
||||
function matcher(filePath) {
|
||||
return !filePath.startsWith('static');
|
||||
return !filePath.startsWith(path.join(entryDir, 'static'));
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
@@ -136,6 +139,20 @@ function normalizePackageJson(defaultPackageJson = {}) {
|
||||
};
|
||||
}
|
||||
|
||||
async function getNextConfig(workPath, entryPath) {
|
||||
const entryConfig = path.join(entryPath, './next.config.js');
|
||||
if (await fs.pathExists(entryConfig)) {
|
||||
return fs.readFile(entryConfig, 'utf8');
|
||||
}
|
||||
|
||||
const workConfig = path.join(workPath, './next.config.js');
|
||||
if (await fs.pathExists(workConfig)) {
|
||||
return fs.readFile(workConfig, 'utf8');
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
@@ -143,4 +160,5 @@ module.exports = {
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
onlyStaticDirectory,
|
||||
getNextConfig,
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/node-bridge",
|
||||
"version": "1.0.0-canary.2",
|
||||
"version": "1.0.1-canary.0",
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"repository": {
|
||||
|
||||
@@ -48,7 +48,7 @@ async function downloadInstallAndBundle(
|
||||
data: JSON.stringify({
|
||||
license: 'UNLICENSED',
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.15.2',
|
||||
'@zeit/ncc': '0.16.0',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/node-server",
|
||||
"version": "0.5.0-canary.3",
|
||||
"version": "0.5.2-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,7 +8,7 @@
|
||||
"directory": "packages/now-node-server"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "1.0.0-canary.2",
|
||||
"@now/node-bridge": "^1.0.1-canary.0",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/node",
|
||||
"version": "0.5.0-canary.5",
|
||||
"version": "0.5.2-canary.1",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"repository": {
|
||||
@@ -9,7 +9,7 @@
|
||||
"directory": "packages/now-node"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "1.0.0-canary.2",
|
||||
"@now/node-bridge": "^1.0.1-canary.0",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
@@ -46,7 +46,7 @@ async function downloadInstallAndBundle(
|
||||
data: JSON.stringify({
|
||||
license: 'UNLICENSED',
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.15.2',
|
||||
'@zeit/ncc': '0.16.0',
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -64,7 +64,8 @@ async function transformFromAwsRequest({
|
||||
const { pathname, search, query: queryString } = parseUrl(path);
|
||||
let requestUri = pathname + (search || '');
|
||||
|
||||
let filename = pathJoin('/var/task/user', pathname);
|
||||
let filename = pathJoin('/var/task/user',
|
||||
process.env.NOW_ENTRYPOINT || pathname);
|
||||
if (await isDirectory(filename)) {
|
||||
if (!filename.endsWith('/')) {
|
||||
filename += '/';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/php-bridge",
|
||||
"version": "0.4.14-canary.0",
|
||||
"version": "0.4.16-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -20,6 +20,9 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
files: { ...userFiles, ...bridgeFiles },
|
||||
handler: 'launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
environment: {
|
||||
NOW_ENTRYPOINT: entrypoint,
|
||||
},
|
||||
});
|
||||
|
||||
return { [entrypoint]: lambda };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/php",
|
||||
"version": "0.4.14-canary.1",
|
||||
"version": "0.4.16-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,7 +8,7 @@
|
||||
"directory": "packages/now-php"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/php-bridge": "^0.4.14-canary.0"
|
||||
"@now/php-bridge": "^0.4.16-canary.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
|
||||
2
packages/now-php/test/fixtures/19-routes/index.php
vendored
Normal file
2
packages/now-php/test/fixtures/19-routes/index.php
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
<?php
|
||||
print('cow:RANDOMNESS_PLACEHOLDER:' . $_SERVER['REQUEST_URI']);
|
||||
13
packages/now-php/test/fixtures/19-routes/now.json
vendored
Normal file
13
packages/now-php/test/fixtures/19-routes/now.json
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.php", "use": "@now/php" }
|
||||
],
|
||||
"routes": [
|
||||
{ "src": "/(.*)", "dest": "index.php" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/any", "mustContain": "cow:RANDOMNESS_PLACEHOLDER:/any" },
|
||||
{ "path": "/any?type=some", "mustContain": "cow:RANDOMNESS_PLACEHOLDER:/any?type=some" }
|
||||
]
|
||||
}
|
||||
@@ -5,13 +5,20 @@ const toml = require('@iarna/toml');
|
||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { runShellScript } = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const installRustAndGCC = require('./download-install-rust-toolchain.js');
|
||||
const FileRef = require('@now/build-utils/file-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const installRust = require('./install-rust.js');
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '25mb',
|
||||
};
|
||||
|
||||
const codegenFlags = [
|
||||
'-C', 'target-cpu=ivybridge',
|
||||
'-C', 'target-feature=-aes,-avx,+fxsr,-popcnt,+sse,+sse2,-sse3,-sse4.1,-sse4.2,-ssse3,-xsave,-xsaveopt',
|
||||
];
|
||||
|
||||
async function inferCargoBinaries(config) {
|
||||
try {
|
||||
const { stdout: manifestStr } = await execa(
|
||||
@@ -45,7 +52,7 @@ async function buildWholeProject({
|
||||
const { debug } = config;
|
||||
console.log('running `cargo build`...');
|
||||
try {
|
||||
await execa('cargo', ['build'].concat(debug ? [] : ['--release']), {
|
||||
await execa('cargo', ['build', '--verbose'].concat(debug ? [] : ['--release']), {
|
||||
env: rustEnv,
|
||||
cwd: entrypointDirname,
|
||||
stdio: 'inherit',
|
||||
@@ -85,6 +92,17 @@ async function buildWholeProject({
|
||||
return lambdas;
|
||||
}
|
||||
|
||||
async function runUserScripts(entrypoint) {
|
||||
const entryDir = path.dirname(entrypoint);
|
||||
const buildScriptPath = path.join(entryDir, 'build.sh');
|
||||
const buildScriptExists = await fs.exists(buildScriptPath);
|
||||
|
||||
if (buildScriptExists) {
|
||||
console.log('running `build.sh`...');
|
||||
await runShellScript(buildScriptPath);
|
||||
}
|
||||
}
|
||||
|
||||
async function cargoLocateProject(config) {
|
||||
try {
|
||||
const { stdout: projectDescriptionStr } = await execa(
|
||||
@@ -171,7 +189,7 @@ async function buildSingleFile({
|
||||
try {
|
||||
await execa(
|
||||
'cargo',
|
||||
['build', '--bin', binName].concat(debug ? [] : ['--release']),
|
||||
['build', '--bin', binName, '--verbose'].concat(debug ? [] : ['--release']),
|
||||
{
|
||||
env: rustEnv,
|
||||
cwd: entrypointDirname,
|
||||
@@ -208,14 +226,16 @@ exports.build = async (m) => {
|
||||
console.log('downloading files');
|
||||
const downloadedFiles = await download(files, workPath);
|
||||
|
||||
const { PATH: toolchainPath, ...otherEnv } = await installRustAndGCC();
|
||||
await installRust();
|
||||
const { PATH, HOME } = process.env;
|
||||
const rustEnv = {
|
||||
...process.env,
|
||||
...otherEnv,
|
||||
PATH: `${path.join(HOME, '.cargo/bin')}:${toolchainPath}:${PATH}`,
|
||||
PATH: `${path.join(HOME, '.cargo/bin')}:${PATH}`,
|
||||
RUSTFLAGS: [process.env.RUSTFLAGS, ...codegenFlags].filter(Boolean).join(' '),
|
||||
};
|
||||
|
||||
await runUserScripts(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
const newM = Object.assign(m, { downloadedFiles, rustEnv });
|
||||
if (path.extname(entrypoint) === '.toml') {
|
||||
return buildWholeProject(newM);
|
||||
@@ -234,6 +254,7 @@ exports.prepareCache = async ({ cachePath, entrypoint, workPath }) => {
|
||||
const rustEnv = {
|
||||
...process.env,
|
||||
PATH: `${path.join(HOME, '.cargo/bin')}:${PATH}`,
|
||||
RUSTFLAGS: [process.env.RUSTFLAGS, ...codegenFlags].filter(Boolean).join(' '),
|
||||
};
|
||||
const entrypointDirname = path.dirname(path.join(workPath, entrypoint));
|
||||
const cargoTomlFile = await cargoLocateProject({
|
||||
@@ -264,7 +285,52 @@ exports.prepareCache = async ({ cachePath, entrypoint, workPath }) => {
|
||||
path.join(cacheEntrypointDirname, 'target'),
|
||||
);
|
||||
|
||||
return {
|
||||
...(await glob('**/**', path.join(cachePath))),
|
||||
};
|
||||
const cacheFiles = await glob('**/**', cachePath);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const f of Object.keys(cacheFiles)) {
|
||||
const accept = (/(?:^|\/)target\/release\/\.fingerprint\//.test(f))
|
||||
|| (/(?:^|\/)target\/release\/build\//.test(f))
|
||||
|| (/(?:^|\/)target\/release\/deps\//.test(f))
|
||||
|| (/(?:^|\/)target\/debug\/\.fingerprint\//.test(f))
|
||||
|| (/(?:^|\/)target\/debug\/build\//.test(f))
|
||||
|| (/(?:^|\/)target\/debug\/deps\//.test(f));
|
||||
if (!accept) {
|
||||
delete cacheFiles[f];
|
||||
}
|
||||
}
|
||||
|
||||
return cacheFiles;
|
||||
};
|
||||
|
||||
function findCargoToml(files, entrypoint) {
|
||||
let currentPath = path.dirname(entrypoint);
|
||||
let cargoTomlPath;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
cargoTomlPath = path.join(currentPath, 'Cargo.toml');
|
||||
if (files[cargoTomlPath]) break;
|
||||
const newPath = path.dirname(currentPath);
|
||||
if (currentPath === newPath) break;
|
||||
currentPath = newPath;
|
||||
}
|
||||
|
||||
return cargoTomlPath;
|
||||
}
|
||||
|
||||
/*
|
||||
console.log(findCargoToml({
|
||||
'rust/src/main.rs': true,
|
||||
'rust/Cargo.toml': true,
|
||||
'Cargo.toml': true
|
||||
}, 'rust/src/main.rs'));
|
||||
*/
|
||||
|
||||
exports.getDefaultCache = ({ files, entrypoint }) => {
|
||||
const cargoTomlPath = findCargoToml(files, entrypoint);
|
||||
if (!cargoTomlPath) return undefined;
|
||||
const targetFolderDir = path.dirname(cargoTomlPath);
|
||||
const defaultCacheRef = new FileRef({ digest: 'sha:204e0c840c43473bbd130d7bc704fe5588b4eab43cda9bc940f10b2a0ae14b16' });
|
||||
return { [targetFolderDir]: defaultCacheRef };
|
||||
};
|
||||
|
||||
@@ -3,7 +3,6 @@ const fetch = require('node-fetch');
|
||||
const execa = require('execa');
|
||||
|
||||
const rustUrl = 'https://dmmcy0pwk6bqi.cloudfront.net/rust.tar.gz';
|
||||
const ccUrl = 'https://dmmcy0pwk6bqi.cloudfront.net/gcc-4.8.5.tgz';
|
||||
|
||||
async function downloadRustToolchain() {
|
||||
console.log('downloading the rust toolchain');
|
||||
@@ -22,38 +21,6 @@ async function downloadRustToolchain() {
|
||||
});
|
||||
}
|
||||
|
||||
async function downloadGCC() {
|
||||
console.log('downloading GCC');
|
||||
const res = await fetch(ccUrl);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download: ${ccUrl}`);
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
res.body
|
||||
.on('error', reject)
|
||||
// NOTE(anmonteiro): We pipe GCC into `/tmp` instead of getting a writable
|
||||
// directory from `@now/build-utils` because the GCC distribution that we
|
||||
// use is specifically packaged for AWS Lambda (where `/tmp` is writable)
|
||||
// and contains several hardcoded symlinks to paths in `/tmp`.
|
||||
.pipe(tar.extract({ gzip: true, cwd: '/tmp' }))
|
||||
.on('finish', async () => {
|
||||
const { LD_LIBRARY_PATH } = process.env;
|
||||
// Set the environment variables as per
|
||||
// https://github.com/lambci/lambci/blob/e6c9c7/home/init/gcc#L14-L17
|
||||
const newEnv = {
|
||||
PATH: '/tmp/bin:/tmp/sbin',
|
||||
LD_LIBRARY_PATH: `/tmp/lib:/tmp/lib64:${LD_LIBRARY_PATH}`,
|
||||
CPATH: '/tmp/include',
|
||||
LIBRARY_PATH: '/tmp/lib',
|
||||
};
|
||||
|
||||
return resolve(newEnv);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function installOpenSSL() {
|
||||
console.log('installing openssl-devel...');
|
||||
try {
|
||||
@@ -77,8 +44,5 @@ async function installOpenSSL() {
|
||||
|
||||
module.exports = async () => {
|
||||
await downloadRustToolchain();
|
||||
const newEnv = await downloadGCC();
|
||||
await installOpenSSL();
|
||||
|
||||
return newEnv;
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/rust",
|
||||
"version": "0.0.3-canary.2",
|
||||
"version": "0.1.2-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"download-install-rust-toolchain.js",
|
||||
"install-rust.js",
|
||||
"launcher.rs"
|
||||
],
|
||||
"dependencies": {
|
||||
|
||||
@@ -109,7 +109,7 @@ impl<'a> From<NowRequest<'a>> for HttpRequest<Body> {
|
||||
// todo: document failure behavior
|
||||
Body::from(::base64::decode(b.as_ref()).unwrap_or_default())
|
||||
}
|
||||
(Some(b), Some(_)) => Body::from(b.into_owned()),
|
||||
(Some(b), _) => Body::from(b.into_owned()),
|
||||
_ => Body::from(()),
|
||||
})
|
||||
.expect("failed to build request");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/wordpress",
|
||||
"version": "0.4.15-canary.0",
|
||||
"version": "0.4.16-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,7 +8,7 @@
|
||||
"directory": "packages/now-wordpress"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/php-bridge": "^0.4.14-canary.0",
|
||||
"@now/php-bridge": "^0.4.16-canary.0",
|
||||
"node-fetch": "2.3.0",
|
||||
"yauzl": "2.10.0"
|
||||
},
|
||||
|
||||
@@ -27,6 +27,7 @@ it(
|
||||
path.join(__dirname, 'monorepo'),
|
||||
);
|
||||
expect(buildResult['www/index']).toBeDefined();
|
||||
expect(buildResult['www/static/test.txt']).toBeDefined();
|
||||
const filePaths = Object.keys(buildResult);
|
||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||
@@ -75,7 +76,7 @@ it('Should throw when package.json or next.config.js is not the "src"', async ()
|
||||
});
|
||||
|
||||
it(
|
||||
'Should build the static-files test',
|
||||
'Should build the static-files test on legacy',
|
||||
async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, 'legacy-static-files'),
|
||||
@@ -84,3 +85,14 @@ it(
|
||||
},
|
||||
FOUR_MINUTES,
|
||||
);
|
||||
|
||||
it(
|
||||
'Should build the static-files test',
|
||||
async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, 'static-files'),
|
||||
);
|
||||
expect(buildResult['static/test.txt']).toBeDefined();
|
||||
},
|
||||
FOUR_MINUTES,
|
||||
);
|
||||
|
||||
1
test/integration/now-next/monorepo/www/static/test.txt
Normal file
1
test/integration/now-next/monorepo/www/static/test.txt
Normal file
@@ -0,0 +1 @@
|
||||
hello world
|
||||
3
test/integration/now-next/static-files/next.config.js
Normal file
3
test/integration/now-next/static-files/next.config.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
target: 'serverless',
|
||||
};
|
||||
6
test/integration/now-next/static-files/now.json
Normal file
6
test/integration/now-next/static-files/now.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{"src": "next.config.js", "use": "@now/next"}
|
||||
]
|
||||
}
|
||||
10
test/integration/now-next/static-files/package.json
Normal file
10
test/integration/now-next/static-files/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "8",
|
||||
"react": "16",
|
||||
"react-dom": "16"
|
||||
}
|
||||
}
|
||||
1
test/integration/now-next/static-files/pages/index.js
Normal file
1
test/integration/now-next/static-files/pages/index.js
Normal file
@@ -0,0 +1 @@
|
||||
export default () => 'Index page';
|
||||
1
test/integration/now-next/static-files/static/test.txt
Normal file
1
test/integration/now-next/static-files/static/test.txt
Normal file
@@ -0,0 +1 @@
|
||||
hello world
|
||||
@@ -22,7 +22,11 @@ async function packAndDeploy (builderPath) {
|
||||
|
||||
const RANDOMNESS_PLACEHOLDER_STRING = 'RANDOMNESS_PLACEHOLDER';
|
||||
|
||||
async function testDeployment ({ builderUrl, buildUtilsUrl }, fixturePath, buildDelegate) {
|
||||
async function testDeployment (
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixturePath,
|
||||
buildDelegate
|
||||
) {
|
||||
console.log('testDeployment', fixturePath);
|
||||
const globResult = await glob(`${fixturePath}/**`, { nodir: true });
|
||||
const bodies = globResult.reduce((b, f) => {
|
||||
@@ -77,7 +81,7 @@ async function testDeployment ({ builderUrl, buildUtilsUrl }, fixturePath, build
|
||||
for (const probe of nowJson.probes || []) {
|
||||
console.log('testing', JSON.stringify(probe));
|
||||
const probeUrl = `https://${deploymentUrl}${probe.path}`;
|
||||
const text = await fetchDeploymentUrl(probeUrl, {
|
||||
const { text, resp } = await fetchDeploymentUrl(probeUrl, {
|
||||
method: probe.method,
|
||||
body: probe.body ? JSON.stringify(probe.body) : undefined,
|
||||
headers: {
|
||||
@@ -87,9 +91,13 @@ async function testDeployment ({ builderUrl, buildUtilsUrl }, fixturePath, build
|
||||
if (probe.mustContain) {
|
||||
if (!text.includes(probe.mustContain)) {
|
||||
await fs.writeFile(path.join(__dirname, 'failed-page.txt'), text);
|
||||
const headers = Array.from(resp.headers.entries())
|
||||
.map(([ k, v ]) => ` ${k}=${v}`)
|
||||
.join('\n');
|
||||
throw new Error(
|
||||
`Fetched page ${probeUrl} does not contain ${probe.mustContain}.`
|
||||
+ ` Instead it contains ${text.slice(0, 60)}`
|
||||
+ ` Instead it contains ${text.slice(0, 60)}`
|
||||
+ ` Response headers:\n ${headers}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@@ -118,9 +126,12 @@ async function fetchDeploymentUrl (url, opts) {
|
||||
for (let i = 0; i < 500; i += 1) {
|
||||
const resp = await fetch(url, opts);
|
||||
const text = await resp.text();
|
||||
if (text && !text.includes('Join Free')
|
||||
&& !text.includes('The page could not be found')) {
|
||||
return text;
|
||||
if (
|
||||
text
|
||||
&& !text.includes('Join Free')
|
||||
&& !text.includes('The page could not be found')
|
||||
) {
|
||||
return { resp, text };
|
||||
}
|
||||
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
|
||||
13
test/unit/now-next/__snapshots__/utils.test.js.snap
Normal file
13
test/unit/now-next/__snapshots__/utils.test.js.snap
Normal file
@@ -0,0 +1,13 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getNextConfig return null on nothing 1`] = `null`;
|
||||
|
||||
exports[`getNextConfig should find entry file 1`] = `
|
||||
"module.exports = {};
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`getNextConfig should find work file second 1`] = `
|
||||
"module.exports = { target: 'serverless' };
|
||||
"
|
||||
`;
|
||||
1
test/unit/now-next/fixtures/entry/next.config.js
Normal file
1
test/unit/now-next/fixtures/entry/next.config.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = {};
|
||||
1
test/unit/now-next/fixtures/next.config.js
Normal file
1
test/unit/now-next/fixtures/next.config.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = { target: 'serverless' };
|
||||
@@ -1,11 +1,33 @@
|
||||
const path = require('path');
|
||||
const {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
normalizePackageJson,
|
||||
getNextConfig,
|
||||
} = require('@now/next/utils');
|
||||
const FileRef = require('@now/build-utils/file-ref'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
describe('getNextConfig', () => {
|
||||
const workPath = path.join(__dirname, 'fixtures');
|
||||
const entryPath = path.join(__dirname, 'fixtures', 'entry');
|
||||
|
||||
it('should find entry file', async () => {
|
||||
const file = await getNextConfig(workPath, entryPath);
|
||||
expect(file).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should find work file second', async () => {
|
||||
const file = await getNextConfig(workPath, '/');
|
||||
expect(file).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('return null on nothing', async () => {
|
||||
const file = await getNextConfig('/', '/');
|
||||
expect(file).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('excludeFiles', () => {
|
||||
it('should exclude files', () => {
|
||||
const files = {
|
||||
|
||||
Reference in New Issue
Block a user