mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-24 19:00:03 +00:00
Compare commits
13 Commits
@now/pytho
...
@now/html-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b7c47fa587 | ||
|
|
f3bcefe822 | ||
|
|
7a481c9515 | ||
|
|
af275c60bf | ||
|
|
ded377dfc2 | ||
|
|
0daaa1cf6a | ||
|
|
6e928d1a68 | ||
|
|
791c26fdfe | ||
|
|
c14451c2c5 | ||
|
|
846b229511 | ||
|
|
d832658226 | ||
|
|
2df09c7742 | ||
|
|
e55b84b646 |
@@ -78,6 +78,8 @@ workflows:
|
||||
filters:
|
||||
tags:
|
||||
only: /^.*(\d+\.)(\d+\.)(\*|\d+)$/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
# - publish-canary:
|
||||
# requires:
|
||||
# - build
|
||||
|
||||
16
.eslintrc
16
.eslintrc
@@ -5,6 +5,20 @@
|
||||
"import/no-unresolved": 0,
|
||||
"import/no-dynamic-require": 0,
|
||||
"global-require": 0
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": ["test/**"],
|
||||
"rules": {
|
||||
"import/no-extraneous-dependencies": 0
|
||||
},
|
||||
"globals": {
|
||||
"describe": true,
|
||||
"it": true,
|
||||
"test": true,
|
||||
"expect": true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
module.exports = {
|
||||
testEnvironment: 'node',
|
||||
rootDir: 'test',
|
||||
collectCoverageFrom: ['packages/**/*.{js,jsx}', '!**/node_modules/**'],
|
||||
};
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
"publish-stable": "lerna version",
|
||||
"publish-canary": "lerna version prerelease --preid canary",
|
||||
"lint": "tsc && eslint .",
|
||||
"test": "jest",
|
||||
"test": "jest --runInBand",
|
||||
"lint-staged": "lint-staged"
|
||||
},
|
||||
"pre-commit": "lint-staged",
|
||||
@@ -26,6 +26,8 @@
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/fs-extra": "^5.0.4",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/multistream": "^2.1.1",
|
||||
"@types/node": "^10.12.8",
|
||||
"eslint": "^5.9.0",
|
||||
|
||||
@@ -1,20 +1,38 @@
|
||||
const assert = require('assert');
|
||||
const fs = require('fs-extra');
|
||||
const MultiStream = require('multistream');
|
||||
const multiStream = require('multistream');
|
||||
const path = require('path');
|
||||
const Sema = require('async-sema');
|
||||
|
||||
/** @typedef {{[filePath: string]: FileFsRef}} FsFiles */
|
||||
|
||||
const semaToPreventEMFILE = new Sema(30);
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
* @argument {string} options.fsPath
|
||||
*/
|
||||
class FileFsRef {
|
||||
constructor({ mode = 0o100644, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof fsPath === 'string');
|
||||
/** @type {string} */
|
||||
this.type = 'FileFsRef';
|
||||
/** @type {number} */
|
||||
this.mode = mode;
|
||||
/** @type {string} */
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
* @argument {NodeJS.ReadableStream} options.stream
|
||||
* @argument {string} options.fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
static async fromStream({ mode = 0o100644, stream, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
@@ -24,37 +42,45 @@ class FileFsRef {
|
||||
await new Promise((resolve, reject) => {
|
||||
const dest = fs.createWriteStream(fsPath);
|
||||
stream.pipe(dest);
|
||||
stream.on('error', error => reject(error));
|
||||
dest.on('finish', () => resolve());
|
||||
dest.on('error', error => reject(error));
|
||||
stream.on('error', reject);
|
||||
dest.on('finish', resolve);
|
||||
dest.on('error', reject);
|
||||
});
|
||||
|
||||
await fs.chmod(fsPath, mode.toString(8).slice(-3));
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<NodeJS.ReadableStream>}
|
||||
*/
|
||||
async toStreamAsync() {
|
||||
await semaToPreventEMFILE.acquire();
|
||||
const release = () => semaToPreventEMFILE.release();
|
||||
const stream = fs.createReadStream(this.fsPath);
|
||||
stream.on('end', release);
|
||||
stream.on('close', release);
|
||||
stream.on('error', release);
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {NodeJS.ReadableStream}
|
||||
*/
|
||||
toStream() {
|
||||
let flag;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new MultiStream((cb) => {
|
||||
if (flag) return cb();
|
||||
return multiStream((cb) => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync().then((stream) => {
|
||||
cb(undefined, stream);
|
||||
}).catch((error) => {
|
||||
cb(error);
|
||||
});
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,8 @@ const multiStream = require('multistream');
|
||||
const retry = require('async-retry');
|
||||
const Sema = require('async-sema');
|
||||
|
||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
||||
|
||||
const semaToDownloadFromS3 = new Sema(10);
|
||||
|
||||
class BailableError extends Error {
|
||||
|
||||
@@ -1,20 +1,38 @@
|
||||
const path = require('path');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
/** @typedef {import('../file-ref')} FileRef */
|
||||
/** @typedef {import('../file-fs-ref')} FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
/** @typedef {{[filePath: string]: FileFsRef}|{}} DownloadedFiles */
|
||||
|
||||
/**
|
||||
* @param {FileRef|FileFsRef} file
|
||||
* @param {string} fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
async function downloadFile(file, fsPath) {
|
||||
const { mode } = file;
|
||||
const stream = file.toStream();
|
||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* Download files to disk
|
||||
* @argument {Files} files
|
||||
* @argument {string} basePath
|
||||
* @returns {Promise<DownloadedFiles>}
|
||||
*/
|
||||
module.exports = async function download(files, basePath) {
|
||||
const files2 = {};
|
||||
|
||||
await Promise.all(Object.keys(files).map(async (name) => {
|
||||
const file = files[name];
|
||||
const fsPath = path.join(basePath, name);
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}));
|
||||
await Promise.all(
|
||||
Object.keys(files).map(async (name) => {
|
||||
const file = files[name];
|
||||
const fsPath = path.join(basePath, name);
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}),
|
||||
);
|
||||
|
||||
return files2;
|
||||
};
|
||||
|
||||
@@ -3,15 +3,30 @@ const path = require('path');
|
||||
const vanillaGlob = require('glob');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
/** @typedef {import('fs').Stats} Stats */
|
||||
/** @typedef {import('glob').IOptions} GlobOptions */
|
||||
/** @typedef {import('../file-fs-ref').FsFiles|{}} GlobFiles */
|
||||
|
||||
/**
|
||||
* @argument {string} pattern
|
||||
* @argument {GlobOptions|string} opts
|
||||
* @argument {string} [mountpoint]
|
||||
* @returns {Promise<GlobFiles>}
|
||||
*/
|
||||
module.exports = function glob(pattern, opts = {}, mountpoint) {
|
||||
return new Promise((resolve, reject) => {
|
||||
let options = opts;
|
||||
if (typeof options === 'string') {
|
||||
options = { cwd: options };
|
||||
/** @type {GlobOptions} */
|
||||
let options;
|
||||
if (typeof opts === 'string') {
|
||||
options = { cwd: opts };
|
||||
} else {
|
||||
options = opts;
|
||||
}
|
||||
|
||||
if (!options.cwd) {
|
||||
throw new Error('Second argument (basePath) must be specified for names of resulting files');
|
||||
throw new Error(
|
||||
'Second argument (basePath) must be specified for names of resulting files',
|
||||
);
|
||||
}
|
||||
|
||||
if (!path.isAbsolute(options.cwd)) {
|
||||
@@ -26,21 +41,27 @@ module.exports = function glob(pattern, opts = {}, mountpoint) {
|
||||
vanillaGlob(pattern, options, (error, files) => {
|
||||
if (error) return reject(error);
|
||||
|
||||
resolve(files.reduce((files2, relativePath) => {
|
||||
const fsPath = path.join(options.cwd, relativePath);
|
||||
const stat = options.statCache[fsPath];
|
||||
assert(stat, `statCache does not contain value for ${relativePath} (resolved to ${fsPath})`);
|
||||
if (stat.isFile()) {
|
||||
let finalPath = relativePath;
|
||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
||||
return {
|
||||
...files2,
|
||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
||||
};
|
||||
}
|
||||
resolve(
|
||||
files.reduce((files2, relativePath) => {
|
||||
const fsPath = path.join(options.cwd, relativePath);
|
||||
/** @type {Stats|any} */
|
||||
const stat = options.statCache[fsPath];
|
||||
assert(
|
||||
stat,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
||||
);
|
||||
if (stat && stat.isFile()) {
|
||||
let finalPath = relativePath;
|
||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
||||
return {
|
||||
...files2,
|
||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
||||
};
|
||||
}
|
||||
|
||||
return files2;
|
||||
}, {}));
|
||||
return files2;
|
||||
}, {}),
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,6 +1,25 @@
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
|
||||
/**
|
||||
* @callback delegate
|
||||
* @argument {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Rename files using delegate function
|
||||
* @argument {Files} files
|
||||
* @argument {delegate} delegate
|
||||
* @returns {Files}
|
||||
*/
|
||||
module.exports = function rename(files, delegate) {
|
||||
return Object.keys(files).reduce((newFiles, name) => ({
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}), {});
|
||||
return Object.keys(files).reduce(
|
||||
(newFiles, name) => ({
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}),
|
||||
{},
|
||||
);
|
||||
};
|
||||
|
||||
@@ -6,7 +6,9 @@ function spawnAsync(command, args, cwd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
||||
child.on('error', reject);
|
||||
child.on('close', (code, signal) => (code !== 0 ? reject(new Error(`Exited with ${code || signal}`)) : resolve()));
|
||||
child.on('close', (code, signal) => (code !== 0
|
||||
? reject(new Error(`Exited with ${code || signal}`))
|
||||
: resolve()));
|
||||
});
|
||||
}
|
||||
|
||||
@@ -43,8 +45,10 @@ async function runNpmInstall(destPath, args = []) {
|
||||
if (await shouldUseNpm(destPath)) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath);
|
||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath);
|
||||
} else {
|
||||
await spawnAsync('yarn', ['--cwd', destPath].concat(commandArgs), destPath);
|
||||
await spawnAsync('yarn', ['cache', 'clean'], destPath);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -55,7 +59,11 @@ async function runPackageJsonScript(destPath, scriptName) {
|
||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath);
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath, 'run', scriptName],
|
||||
destPath,
|
||||
);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error.message);
|
||||
|
||||
@@ -25,16 +25,21 @@ async function createLambda({
|
||||
assert(typeof environment === 'object', '"environment" is not an object');
|
||||
const zipFile = new ZipFile();
|
||||
|
||||
Object.keys(files).sort().forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
Object.keys(files)
|
||||
.sort()
|
||||
.forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
|
||||
zipFile.end();
|
||||
const zipBuffer = await streamToBuffer(zipFile.outputStream);
|
||||
return new Lambda({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
environment,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.4.29-canary.0",
|
||||
"version": "0.4.29",
|
||||
"dependencies": {
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "2.1.4",
|
||||
|
||||
@@ -9,34 +9,34 @@ const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
console.log('downloading files...');
|
||||
const srcDir = await getWritableDirectory();
|
||||
const outDir = await getWritableDirectory();
|
||||
console.log('downloading files...');
|
||||
const srcDir = await getWritableDirectory();
|
||||
const outDir = await getWritableDirectory();
|
||||
|
||||
await download(files, srcDir);
|
||||
await download(files, srcDir);
|
||||
|
||||
const handlerPath = path.join(__dirname, 'handler');
|
||||
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
||||
const handlerPath = path.join(__dirname, 'handler');
|
||||
await copyFile(handlerPath, path.join(outDir, 'handler'));
|
||||
|
||||
const entrypointOutDir = path.join(outDir, path.dirname(entrypoint));
|
||||
await mkdirp(entrypointOutDir);
|
||||
const entrypointOutDir = path.join(outDir, path.dirname(entrypoint));
|
||||
await mkdirp(entrypointOutDir);
|
||||
|
||||
// For now only the entrypoint file is copied into the lambda
|
||||
await copyFile(
|
||||
path.join(srcDir, entrypoint),
|
||||
path.join(outDir, entrypoint)
|
||||
);
|
||||
// For now only the entrypoint file is copied into the lambda
|
||||
await copyFile(
|
||||
path.join(srcDir, entrypoint),
|
||||
path.join(outDir, entrypoint),
|
||||
);
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', outDir),
|
||||
handler: 'handler',
|
||||
runtime: 'go1.x',
|
||||
environment: {
|
||||
SCRIPT_FILENAME: entrypoint
|
||||
}
|
||||
});
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', outDir),
|
||||
handler: 'handler',
|
||||
runtime: 'go1.x',
|
||||
environment: {
|
||||
SCRIPT_FILENAME: entrypoint,
|
||||
},
|
||||
});
|
||||
|
||||
return {
|
||||
[entrypoint]: lambda
|
||||
};
|
||||
return {
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/cgi",
|
||||
"version": "0.0.12-canary.0",
|
||||
"version": "0.0.12",
|
||||
"scripts": {
|
||||
"test": "best -I test/*.js",
|
||||
"prepublish": "./build.sh"
|
||||
|
||||
@@ -18,7 +18,7 @@ async function createGoPathTree(goPath) {
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '10mb'
|
||||
maxLambdaSize: '10mb',
|
||||
};
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
@@ -61,15 +61,25 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
}
|
||||
|
||||
if (handlerFunctionName === '') {
|
||||
const e = new Error(`Could not find an exported function on "${entrypoint}"`);
|
||||
const e = new Error(
|
||||
`Could not find an exported function on "${entrypoint}"`,
|
||||
);
|
||||
console.log(e.message);
|
||||
throw e;
|
||||
}
|
||||
|
||||
console.log(`Found exported function "${handlerFunctionName}" on "${entrypoint}"`);
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" on "${entrypoint}"`,
|
||||
);
|
||||
|
||||
const origianlMainGoContents = await readFile(path.join(__dirname, 'main.go'), 'utf8');
|
||||
const mainGoContents = origianlMainGoContents.replace('__NOW_HANDLER_FUNC_NAME', handlerFunctionName);
|
||||
const origianlMainGoContents = await readFile(
|
||||
path.join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
// in order to allow the user to have `main.go`, we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
@@ -81,12 +91,15 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(path.join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
|
||||
console.log('installing dependencies');
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse
|
||||
// the `import`s and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await execa(goBin, ['get'], { env: goEnv, cwd: entrypointDirname, stdio: 'inherit' });
|
||||
await execa(goBin, ['get'], {
|
||||
env: goEnv,
|
||||
cwd: entrypointDirname,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
@@ -94,11 +107,17 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
|
||||
console.log('running go build...');
|
||||
try {
|
||||
await execa(goBin, [
|
||||
'build',
|
||||
'-o', path.join(outDir, 'handler'),
|
||||
path.join(entrypointDirname, mainGoFileName), downloadedFiles[entrypoint].fsPath,
|
||||
], { env: goEnv, cwd: entrypointDirname, stdio: 'inherit' });
|
||||
await execa(
|
||||
goBin,
|
||||
[
|
||||
'build',
|
||||
'-o',
|
||||
path.join(outDir, 'handler'),
|
||||
path.join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
],
|
||||
{ env: goEnv, cwd: entrypointDirname, stdio: 'inherit' },
|
||||
);
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.2.9-canary.0",
|
||||
"version": "0.2.9",
|
||||
"scripts": {
|
||||
"test": "best -I test/*.js",
|
||||
"prepublish": "./build.sh"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/html-minifier",
|
||||
"version": "1.0.4",
|
||||
"version": "1.0.5",
|
||||
"dependencies": {
|
||||
"html-minifier": "3.5.21"
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/lambda",
|
||||
"version": "0.4.5",
|
||||
"version": "0.4.6",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
|
||||
@@ -22,12 +22,17 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
.use(markdown)
|
||||
.use(remark2rehype)
|
||||
.use(doc, {
|
||||
title, language, meta, css,
|
||||
title,
|
||||
language,
|
||||
meta,
|
||||
css,
|
||||
})
|
||||
.use(format)
|
||||
.use(html);
|
||||
|
||||
const result = await FileBlob.fromStream({ stream: stream.pipe(unifiedStream(processor)) });
|
||||
const result = await FileBlob.fromStream({
|
||||
stream: stream.pipe(unifiedStream(processor)),
|
||||
});
|
||||
|
||||
console.log(result.data.toString());
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/md",
|
||||
"version": "0.4.5",
|
||||
"version": "0.4.6",
|
||||
"dependencies": {
|
||||
"rehype-document": "^2.2.0",
|
||||
"rehype-format": "^2.3.0",
|
||||
|
||||
@@ -19,13 +19,19 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
await writeFile(packageJsonPath, JSON.stringify(packageJson));
|
||||
console.log('running npm install...');
|
||||
process.env.PUPPETEER_SKIP_CHROMIUM_DOWNLOAD = '1'; // TODO opts argument for runNpmInstall
|
||||
await runNpmInstall(path.dirname(packageJsonPath), ['--prod', '--prefer-offline']);
|
||||
await runNpmInstall(path.dirname(packageJsonPath), [
|
||||
'--prod',
|
||||
'--prefer-offline',
|
||||
]);
|
||||
console.log('building...');
|
||||
const outDir = await getWritableDirectory();
|
||||
const entrypointFsPath = downloadedFiles[entrypoint].fsPath;
|
||||
const mountpoint = path.dirname(entrypoint);
|
||||
|
||||
const build = require(path.join(workPath, 'node_modules/mdx-deck/lib/build.js'));
|
||||
const build = require(path.join(
|
||||
workPath,
|
||||
'node_modules/mdx-deck/lib/build.js',
|
||||
));
|
||||
|
||||
await build({
|
||||
html: true,
|
||||
@@ -48,8 +54,8 @@ exports.prepareCache = async ({ cachePath }) => {
|
||||
await runNpmInstall(path.dirname(packageJsonPath), ['--prod']);
|
||||
|
||||
return {
|
||||
...await glob('node_modules/**', cachePath),
|
||||
...await glob('package-lock.json', cachePath),
|
||||
...await glob('yarn.lock', cachePath),
|
||||
...(await glob('node_modules/**', cachePath)),
|
||||
...(await glob('package-lock.json', cachePath)),
|
||||
...(await glob('yarn.lock', cachePath)),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/mdx-deck",
|
||||
"version": "0.4.14",
|
||||
"version": "0.4.15",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
|
||||
@@ -4,128 +4,100 @@ const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob');
|
||||
const path = require('path');
|
||||
const { readFile, writeFile, unlink } = require('fs.promised');
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
const {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
moveEntryDirectoryToRoot,
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
excludeStaticDirectory,
|
||||
} = require('./utils');
|
||||
|
||||
// Exclude certain files from the files object
|
||||
function excludeFiles(files, matchFn) {
|
||||
return Object.keys(files).reduce((newFiles, fileName) => {
|
||||
if (matchFn(fileName)) {
|
||||
return newFiles;
|
||||
}
|
||||
return {
|
||||
...newFiles,
|
||||
[fileName]: files[fileName],
|
||||
};
|
||||
}, {});
|
||||
/** @typedef { import('@now/build-utils/file-ref').Files } Files */
|
||||
/** @typedef { import('@now/build-utils/fs/download').DownloadedFiles } DownloadedFiles */
|
||||
|
||||
/**
|
||||
* @typedef {Object} BuildParamsType
|
||||
* @property {Files} files - Files object
|
||||
* @property {string} entrypoint - Entrypoint specified for the builder
|
||||
* @property {string} workPath - Working directory for this build
|
||||
*/
|
||||
|
||||
/**
|
||||
* Read package.json from files
|
||||
* @param {DownloadedFiles} files
|
||||
*/
|
||||
async function readPackageJson(files) {
|
||||
if (!files['package.json']) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const packageJsonPath = files['package.json'].fsPath;
|
||||
return JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
||||
}
|
||||
|
||||
function shouldExcludeFile(entryDirectory) {
|
||||
return (file) => {
|
||||
// If the file is not in the entry directory
|
||||
if (entryDirectory !== '.' && !file.startsWith(entryDirectory)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Exclude static directory
|
||||
if (file.startsWith(path.join(entryDirectory, 'static'))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (file === 'package-lock.json') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (file === 'yarn.lock') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
if (
|
||||
!/package\.json$/.exec(entrypoint)
|
||||
&& !/next\.config\.js$/.exec(entrypoint)
|
||||
) {
|
||||
throw new Error(
|
||||
'Specified "src" for "@now/next" has to be "package.json" or "next.config.js"',
|
||||
);
|
||||
}
|
||||
|
||||
console.log('downloading user files...');
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const filesToDownload = excludeFiles(
|
||||
files,
|
||||
shouldExcludeFile(entryDirectory),
|
||||
);
|
||||
const entrypointHandledFilesToDownload = rename(filesToDownload, (file) => {
|
||||
if (entryDirectory !== '.') {
|
||||
return file.replace(new RegExp(`^${entryDirectory}/`), '');
|
||||
}
|
||||
return file;
|
||||
});
|
||||
let downloadedFiles = await download(
|
||||
entrypointHandledFilesToDownload,
|
||||
workPath,
|
||||
);
|
||||
|
||||
let packageJson = {};
|
||||
if (downloadedFiles['package.json']) {
|
||||
console.log('found package.json, overwriting');
|
||||
const packageJsonPath = downloadedFiles['package.json'].fsPath;
|
||||
packageJson = JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
||||
}
|
||||
|
||||
packageJson = {
|
||||
...packageJson,
|
||||
dependencies: {
|
||||
...packageJson.dependencies,
|
||||
'next-server': 'canary',
|
||||
},
|
||||
devDependencies: {
|
||||
...packageJson.devDependencies,
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
...packageJson.scripts,
|
||||
'now-build': 'next build',
|
||||
},
|
||||
};
|
||||
|
||||
if (!packageJson.dependencies.react) {
|
||||
console.log(
|
||||
'"react" not found in dependencies, adding to "package.json" "dependencies"',
|
||||
);
|
||||
packageJson.dependencies.react = 'latest';
|
||||
}
|
||||
if (!packageJson.dependencies['react-dom']) {
|
||||
console.log(
|
||||
'"react-dom" not found in dependencies, adding to "package.json" "dependencies"',
|
||||
);
|
||||
packageJson.dependencies['react-dom'] = 'latest';
|
||||
}
|
||||
|
||||
// in case the user has `next` on their `dependencies`, we remove it
|
||||
delete packageJson.dependencies.next;
|
||||
|
||||
/**
|
||||
* Write package.json
|
||||
* @param {string} workPath
|
||||
* @param {Object} packageJson
|
||||
*/
|
||||
async function writePackageJson(workPath, packageJson) {
|
||||
await writeFile(
|
||||
path.join(workPath, 'package.json'),
|
||||
JSON.stringify(packageJson, null, 2),
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write .npmrc with npm auth token
|
||||
* @param {string} workPath
|
||||
* @param {string} token
|
||||
*/
|
||||
async function writeNpmRc(workPath, token) {
|
||||
await writeFile(
|
||||
path.join(workPath, '.npmrc'),
|
||||
`//registry.npmjs.org/:_authToken=${token}`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {BuildParamsType} buildParams
|
||||
* @returns {Promise<Files>}
|
||||
*/
|
||||
exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
validateEntrypoint(entrypoint);
|
||||
|
||||
console.log('downloading user files...');
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const filesOnlyEntryDirectory = includeOnlyEntryDirectory(
|
||||
files,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithEntryDirectoryRoot = moveEntryDirectoryToRoot(
|
||||
filesOnlyEntryDirectory,
|
||||
entryDirectory,
|
||||
);
|
||||
const filesWithoutLockfiles = excludeLockFiles(filesWithEntryDirectoryRoot);
|
||||
const filesWithoutStaticDirectory = excludeStaticDirectory(
|
||||
filesWithoutLockfiles,
|
||||
);
|
||||
let downloadedFiles = await download(filesWithoutStaticDirectory, workPath);
|
||||
|
||||
console.log('normalizing package.json');
|
||||
const packageJson = normalizePackageJson(readPackageJson(downloadedFiles));
|
||||
await writePackageJson(workPath, packageJson);
|
||||
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
console.log('found NPM_AUTH_TOKEN in environment, creating .npmrc');
|
||||
await writeFile(
|
||||
path.join(workPath, '.npmrc'),
|
||||
`//registry.npmjs.org/:_authToken=${process.env.NPM_AUTH_TOKEN}`,
|
||||
);
|
||||
await writeNpmRc(workPath, process.env.NPM_AUTH_TOKEN);
|
||||
}
|
||||
|
||||
downloadedFiles = await glob('**', workPath);
|
||||
|
||||
console.log('running npm install...');
|
||||
@@ -137,6 +109,7 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
await unlink(path.join(workPath, '.npmrc'));
|
||||
}
|
||||
|
||||
downloadedFiles = await glob('**', workPath);
|
||||
|
||||
console.log('preparing lambda files...');
|
||||
@@ -203,6 +176,7 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
],
|
||||
};
|
||||
|
||||
console.log(`Creating lambda for page: "${page}"...`);
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...nextFiles,
|
||||
@@ -212,6 +186,7 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
console.log(`Created lambda for page: "${page}"`);
|
||||
}),
|
||||
);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/next",
|
||||
"version": "0.0.78-canary.0",
|
||||
"version": "0.0.78",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "0.1.4",
|
||||
"execa": "^1.0.0",
|
||||
|
||||
151
packages/now-next/utils.js
Normal file
151
packages/now-next/utils.js
Normal file
@@ -0,0 +1,151 @@
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
|
||||
/**
|
||||
* Validate if the entrypoint is allowed to be used
|
||||
* @param {string} entrypoint
|
||||
* @throws {Error}
|
||||
*/
|
||||
function validateEntrypoint(entrypoint) {
|
||||
if (
|
||||
!/package\.json$/.exec(entrypoint)
|
||||
&& !/next\.config\.js$/.exec(entrypoint)
|
||||
) {
|
||||
throw new Error(
|
||||
'Specified "src" for "@now/next" has to be "package.json" or "next.config.js"',
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This callback type is called `requestCallback` and is displayed as a global symbol.
|
||||
*
|
||||
* @callback matcher
|
||||
* @param {string} filePath
|
||||
* @returns {boolean}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Exclude certain files from the files object
|
||||
* @param {Files} files
|
||||
* @param {matcher} matcher
|
||||
* @returns {Files}
|
||||
*/
|
||||
function excludeFiles(files, matcher) {
|
||||
return Object.keys(files).reduce((newFiles, filePath) => {
|
||||
if (matcher(filePath)) {
|
||||
return newFiles;
|
||||
}
|
||||
return {
|
||||
...newFiles,
|
||||
[filePath]: files[filePath],
|
||||
};
|
||||
}, {});
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new Files object holding only the entrypoint files
|
||||
* @param {Files} files
|
||||
* @param {string} entryDirectory
|
||||
* @returns {Files}
|
||||
*/
|
||||
function includeOnlyEntryDirectory(files, entryDirectory) {
|
||||
if (entryDirectory === '.') {
|
||||
return files;
|
||||
}
|
||||
|
||||
function matcher(filePath) {
|
||||
return !filePath.startsWith(entryDirectory);
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves all files under the entry directory to the root directory
|
||||
* @param {Files} files
|
||||
* @param {string} entryDirectory
|
||||
* @returns {Files}
|
||||
*/
|
||||
function moveEntryDirectoryToRoot(files, entryDirectory) {
|
||||
if (entryDirectory === '.') {
|
||||
return files;
|
||||
}
|
||||
|
||||
function delegate(filePath) {
|
||||
return filePath.replace(new RegExp(`^${entryDirectory}/`), '');
|
||||
}
|
||||
|
||||
return rename(files, delegate);
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude package manager lockfiles from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function excludeLockFiles(files) {
|
||||
const newFiles = files;
|
||||
if (newFiles['package-lock.json']) {
|
||||
delete newFiles['package-lock.json'];
|
||||
}
|
||||
if (newFiles['yarn.lock']) {
|
||||
delete newFiles['yarn.lock'];
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude the static directory from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function excludeStaticDirectory(files) {
|
||||
function matcher(filePath) {
|
||||
return filePath.startsWith('static');
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
}
|
||||
|
||||
/**
|
||||
* Enforce specific package.json configuration for smallest possible lambda
|
||||
* @param {Object} defaultPackageJson
|
||||
*/
|
||||
function normalizePackageJson(defaultPackageJson = {}) {
|
||||
return {
|
||||
...defaultPackageJson,
|
||||
dependencies: {
|
||||
// react and react-dom can be overwritten
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
...defaultPackageJson.dependencies,
|
||||
// next-server is forced to canary
|
||||
'next-server': 'canary',
|
||||
next: undefined,
|
||||
},
|
||||
devDependencies: {
|
||||
...defaultPackageJson.devDependencies,
|
||||
// next is forced to canary
|
||||
next: 'canary',
|
||||
'next-server': undefined,
|
||||
},
|
||||
scripts: {
|
||||
...defaultPackageJson.scripts,
|
||||
'now-build': 'next build',
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
moveEntryDirectoryToRoot,
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
excludeStaticDirectory,
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/node-bridge",
|
||||
"version": "0.1.8-canary.0",
|
||||
"version": "0.1.8",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const fs = require('fs');
|
||||
const fs = require('fs-extra');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
@@ -37,7 +37,7 @@ async function commonForTwo({
|
||||
'package.json': new FileBlob({
|
||||
data: JSON.stringify({
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.1.3-webpack',
|
||||
'@zeit/ncc': '0.1.4-webpack',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
@@ -101,7 +101,10 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
return { [entrypoint]: lambda };
|
||||
};
|
||||
|
||||
exports.prepareCache = async ({ files, entrypoint, cachePath }) => {
|
||||
exports.prepareCache = async ({
|
||||
files, entrypoint, workPath, cachePath,
|
||||
}) => {
|
||||
await fs.remove(workPath);
|
||||
await commonForTwo({ files, entrypoint, cachePath });
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
{
|
||||
"name": "@now/node-server",
|
||||
"version": "0.4.23-canary.1",
|
||||
"version": "0.4.23",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^0.1.8-canary.0"
|
||||
"@now/node-bridge": "^0.1.8",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
|
||||
@@ -2,6 +2,7 @@ const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const fsExtra = require('fs-extra');
|
||||
const fs = require('fs');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
@@ -30,7 +31,7 @@ const readFile = promisify(fs.readFile);
|
||||
*/
|
||||
async function downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath },
|
||||
{ npmArguments = [] },
|
||||
{ npmArguments = [] } = {},
|
||||
) {
|
||||
const userPath = path.join(workPath, 'user');
|
||||
const nccPath = path.join(workPath, 'ncc');
|
||||
@@ -48,7 +49,7 @@ async function downloadInstallAndBundle(
|
||||
'package.json': new FileBlob({
|
||||
data: JSON.stringify({
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.1.3-webpack',
|
||||
'@zeit/ncc': '0.1.4-webpack',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
@@ -119,7 +120,10 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
return { [entrypoint]: lambda };
|
||||
};
|
||||
|
||||
exports.prepareCache = async ({ files, entrypoint, cachePath }) => {
|
||||
exports.prepareCache = async ({
|
||||
files, entrypoint, workPath, cachePath,
|
||||
}) => {
|
||||
await fsExtra.remove(workPath);
|
||||
await downloadInstallAndBundle({ files, entrypoint, workPath: cachePath });
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
{
|
||||
"name": "@now/node",
|
||||
"version": "0.4.25-canary.1",
|
||||
"version": "0.4.25",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^0.1.8-canary.0"
|
||||
"@now/node-bridge": "^0.1.8",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
|
||||
@@ -6,7 +6,10 @@ exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
const optimizer = new OptiPng(['-o9']);
|
||||
const stream = pipe(files[entrypoint].toStream(), optimizer);
|
||||
const stream = pipe(
|
||||
files[entrypoint].toStream(),
|
||||
optimizer,
|
||||
);
|
||||
const result = await FileBlob.fromStream({ stream });
|
||||
return { [entrypoint]: result };
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/optipng",
|
||||
"version": "0.4.5",
|
||||
"version": "0.4.6",
|
||||
"dependencies": {
|
||||
"multipipe": "2.0.3",
|
||||
"optipng": "1.1.0"
|
||||
|
||||
@@ -4,7 +4,7 @@ const path = require('path');
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '10mb'
|
||||
maxLambdaSize: '10mb',
|
||||
};
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/php",
|
||||
"version": "0.4.10-canary.0",
|
||||
"version": "0.4.10",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
|
||||
@@ -37,7 +37,9 @@ async function downloadAndInstallPip() {
|
||||
// installed to. `--user` will assume `~` if this
|
||||
// is not set, and `~` is not writeable on AWS Lambda.
|
||||
// let's refuse to proceed
|
||||
throw new Error('Could not install "pip": "PYTHONUSERBASE" env var is not set');
|
||||
throw new Error(
|
||||
'Could not install "pip": "PYTHONUSERBASE" env var is not set',
|
||||
);
|
||||
}
|
||||
const getPipFilePath = await downloadGetPipScript();
|
||||
|
||||
|
||||
@@ -10,15 +10,9 @@ const downloadAndInstallPip = require('./download-and-install-pip');
|
||||
async function pipInstall(pipPath, srcDir, ...args) {
|
||||
console.log(`running "pip install -t ${srcDir} ${args.join(' ')}"...`);
|
||||
try {
|
||||
await execa(
|
||||
pipPath,
|
||||
[
|
||||
'install',
|
||||
'-t', srcDir,
|
||||
...args,
|
||||
],
|
||||
{ stdio: 'inherit' },
|
||||
);
|
||||
await execa(pipPath, ['install', '-t', srcDir, ...args], {
|
||||
stdio: 'inherit',
|
||||
});
|
||||
} catch (err) {
|
||||
console.log(`failed to run "pip install -t ${srcDir} ${args.join(' ')}"`);
|
||||
throw err;
|
||||
@@ -26,7 +20,7 @@ async function pipInstall(pipPath, srcDir, ...args) {
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '5mb'
|
||||
maxLambdaSize: '5mb',
|
||||
};
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
@@ -53,11 +47,16 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
await pipInstall(pipPath, srcDir, '-r', requirementsTxtPath);
|
||||
}
|
||||
|
||||
const originalNowHandlerPyContents = await readFile(path.join(__dirname, 'now_handler.py'), 'utf8');
|
||||
const originalNowHandlerPyContents = await readFile(
|
||||
path.join(__dirname, 'now_handler.py'),
|
||||
'utf8',
|
||||
);
|
||||
// will be used on `from $here import handler`
|
||||
// for example, `from api.users import handler`
|
||||
console.log('entrypoint is', entrypoint);
|
||||
const userHandlerFilePath = entrypoint.replace(/\//g, '.').replace(/\.py$/, '');
|
||||
const userHandlerFilePath = entrypoint
|
||||
.replace(/\//g, '.')
|
||||
.replace(/\.py$/, '');
|
||||
const nowHandlerPyContents = originalNowHandlerPyContents.replace(
|
||||
'__NOW_HANDLER_FILENAME',
|
||||
userHandlerFilePath,
|
||||
@@ -67,7 +66,10 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
// somethig else
|
||||
const nowHandlerPyFilename = 'now__handler__python';
|
||||
|
||||
await writeFile(path.join(srcDir, `${nowHandlerPyFilename}.py`), nowHandlerPyContents);
|
||||
await writeFile(
|
||||
path.join(srcDir, `${nowHandlerPyFilename}.py`),
|
||||
nowHandlerPyContents,
|
||||
);
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', srcDir),
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/python",
|
||||
"version": "0.0.39-canary.0",
|
||||
"version": "0.0.39",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
||||
@@ -2,7 +2,9 @@ const download = require('@now/build-utils/fs/download.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const {
|
||||
runNpmInstall, runPackageJsonScript, runShellScript,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
runShellScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
|
||||
exports.build = async ({
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/static-build",
|
||||
"version": "0.4.13",
|
||||
"version": "0.4.14",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
|
||||
292
test/unit/now-next/utils.test.js
Normal file
292
test/unit/now-next/utils.test.js
Normal file
@@ -0,0 +1,292 @@
|
||||
const {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
moveEntryDirectoryToRoot,
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
excludeStaticDirectory,
|
||||
} = require('@now/next/utils');
|
||||
const FileRef = require('@now/build-utils/file-ref');
|
||||
|
||||
describe('excludeFiles', () => {
|
||||
it('should exclude files', () => {
|
||||
const files = {
|
||||
'pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'package-lock.json': new FileRef({ digest: 'package-lock' }),
|
||||
};
|
||||
const result = excludeFiles(
|
||||
files,
|
||||
filePath => filePath === 'package-lock.json',
|
||||
);
|
||||
expect(result['pages/index.js']).toBeDefined();
|
||||
expect(result['package.json']).toBeDefined();
|
||||
expect(result['package-lock.json']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateEntrypoint', () => {
|
||||
it('should allow package.json', () => {
|
||||
expect(validateEntrypoint('package.json')).toBeUndefined();
|
||||
});
|
||||
it('should allow nested package.json', () => {
|
||||
expect(validateEntrypoint('frontend/package.json')).toBeUndefined();
|
||||
});
|
||||
it('should allow next.config.js', () => {
|
||||
expect(validateEntrypoint('next.config.js')).toBeUndefined();
|
||||
});
|
||||
it('should allow nested next.config.js', () => {
|
||||
expect(validateEntrypoint('frontend/next.config.js')).toBeUndefined();
|
||||
});
|
||||
it('should not allow pages/index.js', () => {
|
||||
expect(() => validateEntrypoint('pages/index.js')).toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('includeOnlyEntryDirectory', () => {
|
||||
it('should exclude files outside entry directory', () => {
|
||||
const entryDirectory = 'frontend';
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'package-lock.json': new FileRef({ digest: 'package-lock' }),
|
||||
};
|
||||
const result = includeOnlyEntryDirectory(files, entryDirectory);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
expect(result['package.json']).toBeUndefined();
|
||||
expect(result['package-lock.json']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should handle entry directory being dot', () => {
|
||||
const entryDirectory = '.';
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'package-lock.json': new FileRef({ digest: 'package-lock' }),
|
||||
};
|
||||
const result = includeOnlyEntryDirectory(files, entryDirectory);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
expect(result['package.json']).toBeDefined();
|
||||
expect(result['package-lock.json']).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('moveEntryDirectoryToRoot', () => {
|
||||
it('should move entrydirectory files to the root', () => {
|
||||
const entryDirectory = 'frontend';
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
};
|
||||
const result = moveEntryDirectoryToRoot(files, entryDirectory);
|
||||
expect(result['pages/index.js']).toBeDefined();
|
||||
});
|
||||
|
||||
it('should work with deep nested subdirectories', () => {
|
||||
const entryDirectory = 'frontend/my/app';
|
||||
const files = {
|
||||
'frontend/my/app/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
};
|
||||
const result = moveEntryDirectoryToRoot(files, entryDirectory);
|
||||
expect(result['pages/index.js']).toBeDefined();
|
||||
});
|
||||
|
||||
it('should do nothing when entry directory is dot', () => {
|
||||
const entryDirectory = '.';
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
};
|
||||
const result = moveEntryDirectoryToRoot(files, entryDirectory);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('excludeLockFiles', () => {
|
||||
it('should remove package-lock.json', () => {
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'package-lock.json': new FileRef({ digest: 'package-lock' }),
|
||||
};
|
||||
const result = excludeLockFiles(files);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
expect(result['package-lock.json']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should remove yarn.lock', () => {
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'yarn.lock': new FileRef({ digest: 'yarn-lock' }),
|
||||
};
|
||||
const result = excludeLockFiles(files);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
expect(result['yarn.lock']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should remove both package-lock.json and yarn.lock', () => {
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'yarn.lock': new FileRef({ digest: 'yarn-lock' }),
|
||||
'package-lock.json': new FileRef({ digest: 'package-lock' }),
|
||||
};
|
||||
const result = excludeLockFiles(files);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
expect(result['yarn.lock']).toBeUndefined();
|
||||
expect(result['package-lock.json']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('excludeStaticDirectory', () => {
|
||||
it('should remove the /static directory files', () => {
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'yarn.lock': new FileRef({ digest: 'yarn-lock' }),
|
||||
'package-lock.json': new FileRef({ digest: 'package-lock' }),
|
||||
'static/image.png': new FileRef({ digest: 'image' }),
|
||||
};
|
||||
const result = excludeStaticDirectory(files);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
expect(result['yarn.lock']).toBeDefined();
|
||||
expect(result['package-lock.json']).toBeDefined();
|
||||
expect(result['static/image.png']).toBeUndefined();
|
||||
});
|
||||
|
||||
it('should remove the nested /static directory files', () => {
|
||||
const files = {
|
||||
'frontend/pages/index.js': new FileRef({ digest: 'index' }),
|
||||
'package.json': new FileRef({ digest: 'package' }),
|
||||
'yarn.lock': new FileRef({ digest: 'yarn-lock' }),
|
||||
'package-lock.json': new FileRef({ digest: 'package-lock' }),
|
||||
'static/images/png/image.png': new FileRef({ digest: 'image' }),
|
||||
};
|
||||
const result = excludeStaticDirectory(files);
|
||||
expect(result['frontend/pages/index.js']).toBeDefined();
|
||||
expect(result['yarn.lock']).toBeDefined();
|
||||
expect(result['package-lock.json']).toBeDefined();
|
||||
expect(result['static/images/png/image.png']).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizePackageJson', () => {
|
||||
it('should work without a package.json being supplied', () => {
|
||||
const result = normalizePackageJson();
|
||||
expect(result).toEqual({
|
||||
dependencies: {
|
||||
'next-server': 'canary',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
devDependencies: {
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
'now-build': 'next build',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with a package.json being supplied', () => {
|
||||
const defaultPackage = {
|
||||
dependencies: {
|
||||
'next-server': 'canary',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
devDependencies: {
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
'now-build': 'next build',
|
||||
},
|
||||
};
|
||||
const result = normalizePackageJson(defaultPackage);
|
||||
expect(result).toEqual({
|
||||
dependencies: {
|
||||
'next-server': 'canary',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
devDependencies: {
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
'now-build': 'next build',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should force next@canary to be a devDependency', () => {
|
||||
const defaultPackage = {
|
||||
dependencies: {
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
next: 'latest',
|
||||
},
|
||||
};
|
||||
const result = normalizePackageJson(defaultPackage);
|
||||
expect(result).toEqual({
|
||||
dependencies: {
|
||||
'next-server': 'canary',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
devDependencies: {
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
'now-build': 'next build',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should force next-server@canary to be a dependency', () => {
|
||||
const defaultPackage = {
|
||||
dependencies: {
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
next: 'latest',
|
||||
},
|
||||
};
|
||||
const result = normalizePackageJson(defaultPackage);
|
||||
expect(result).toEqual({
|
||||
dependencies: {
|
||||
'next-server': 'canary',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
devDependencies: {
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
'now-build': 'next build',
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it('should force now-build script', () => {
|
||||
const defaultPackage = {
|
||||
dependencies: {
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
next: 'latest',
|
||||
},
|
||||
};
|
||||
const result = normalizePackageJson(defaultPackage);
|
||||
expect(result).toEqual({
|
||||
dependencies: {
|
||||
'next-server': 'canary',
|
||||
react: 'latest',
|
||||
'react-dom': 'latest',
|
||||
},
|
||||
devDependencies: {
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
'now-build': 'next build',
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -12,6 +12,14 @@
|
||||
},
|
||||
"include": [
|
||||
"./packages/now-node/index.js",
|
||||
"./packages/now-build-utils/file-ref.js"
|
||||
"./packages/now-build-utils/file-ref.js",
|
||||
"./packages/now-build-utils/file-fs-ref.js",
|
||||
"./packages/now-build-utils/fs/rename.js",
|
||||
"./packages/now-build-utils/fs/download.js",
|
||||
"./packages/now-build-utils/fs/glob.js",
|
||||
"./packages/now-next"
|
||||
],
|
||||
"exclude": [
|
||||
"./packages/now-next/launcher.js"
|
||||
]
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user