mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-23 01:49:13 +00:00
Compare commits
4 Commits
@now/node-
...
@now/node-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
334aca3222 | ||
|
|
5cfc350b91 | ||
|
|
eb1ae073bb | ||
|
|
3d5f67c620 |
@@ -6,8 +6,18 @@ jobs:
|
||||
working_directory: ~/repo
|
||||
steps:
|
||||
- checkout
|
||||
- run: yarn install
|
||||
- run: yarn bootstrap
|
||||
- run:
|
||||
name: Updating apt-get list
|
||||
command: sudo apt-get update
|
||||
- run:
|
||||
name: Installing the latest version of Go
|
||||
command: sudo apt-get install golang-go
|
||||
- run:
|
||||
name: Installing dependencies
|
||||
command: yarn install
|
||||
- run:
|
||||
name: Bootstrapping
|
||||
command: yarn bootstrap
|
||||
- run:
|
||||
name: Saving Authentication Information
|
||||
command: echo "//registry.npmjs.org/:_authToken=$NPM_TOKEN" >> ~/.npmrc
|
||||
@@ -49,4 +59,4 @@ workflows:
|
||||
# tags:
|
||||
# only: /^(\d+\.)?(\d+\.)?(\*|\d+)$/
|
||||
# branches:
|
||||
# ignore: /.*/
|
||||
# ignore: /.*/
|
||||
|
||||
2
.eslintignore
Normal file
2
.eslintignore
Normal file
@@ -0,0 +1,2 @@
|
||||
/node_modules/*
|
||||
/**/node_modules/*
|
||||
10
.eslintrc
Normal file
10
.eslintrc
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"extends": ["prettier", "airbnb-base"],
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"import/no-unresolved": 0,
|
||||
"import/no-dynamic-require": 0,
|
||||
"global-require": 0
|
||||
}
|
||||
}
|
||||
|
||||
16
README.md
16
README.md
@@ -3,3 +3,19 @@
|
||||
This is the full list of official Builders provided by the ZEIT team.
|
||||
|
||||
More details here: http://zeit.co/docs
|
||||
|
||||
### Publishing to npm
|
||||
|
||||
Run the following command to publish modified builders to npm:
|
||||
|
||||
For the stable channel use:
|
||||
|
||||
```
|
||||
yarn publish-stable
|
||||
```
|
||||
|
||||
For the canary channel use:
|
||||
|
||||
```
|
||||
yarn publish-canary
|
||||
```
|
||||
|
||||
@@ -11,6 +11,13 @@
|
||||
"scripts": {
|
||||
"lerna": "lerna",
|
||||
"bootstrap": "lerna bootstrap",
|
||||
"publish-canary": "lerna version prerelease --preid canary"
|
||||
"publish-canary": "lerna version prerelease --preid canary",
|
||||
"lint": "eslint ."
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^5.9.0",
|
||||
"eslint-config-airbnb-base": "^13.1.0",
|
||||
"eslint-config-prettier": "^3.1.0",
|
||||
"eslint-plugin-import": "^2.14.0"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ const assert = require('assert');
|
||||
const intoStream = require('into-stream');
|
||||
|
||||
class FileBlob {
|
||||
constructor ({ mode = 0o100644, data }) {
|
||||
constructor({ mode = 0o100644, data }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
||||
this.type = 'FileBlob';
|
||||
@@ -10,14 +10,14 @@ class FileBlob {
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
static async fromStream ({ mode = 0o100644, stream }) {
|
||||
static async fromStream({ mode = 0o100644, stream }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
const chunks = [];
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||
stream.on('error', (error) => reject(error));
|
||||
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
|
||||
stream.on('error', error => reject(error));
|
||||
stream.on('end', () => resolve());
|
||||
});
|
||||
|
||||
@@ -25,7 +25,7 @@ class FileBlob {
|
||||
return new FileBlob({ mode, data });
|
||||
}
|
||||
|
||||
toStream () {
|
||||
toStream() {
|
||||
return intoStream(this.data);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,7 @@ const Sema = require('async-sema');
|
||||
const semaToPreventEMFILE = new Sema(30);
|
||||
|
||||
class FileFsRef {
|
||||
constructor ({ mode = 0o100644, fsPath }) {
|
||||
constructor({ mode = 0o100644, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof fsPath === 'string');
|
||||
this.type = 'FileFsRef';
|
||||
@@ -15,7 +15,7 @@ class FileFsRef {
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
static async fromStream ({ mode = 0o100644, stream, fsPath }) {
|
||||
static async fromStream({ mode = 0o100644, stream, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
assert(typeof fsPath === 'string');
|
||||
@@ -24,16 +24,16 @@ class FileFsRef {
|
||||
await new Promise((resolve, reject) => {
|
||||
const dest = fs.createWriteStream(fsPath);
|
||||
stream.pipe(dest);
|
||||
stream.on('error', (error) => reject(error));
|
||||
stream.on('error', error => reject(error));
|
||||
dest.on('finish', () => resolve());
|
||||
dest.on('error', (error) => reject(error));
|
||||
dest.on('error', error => reject(error));
|
||||
});
|
||||
|
||||
await fs.chmod(fsPath, mode.toString(8).slice(-3));
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
async toStreamAsync () {
|
||||
async toStreamAsync() {
|
||||
await semaToPreventEMFILE.acquire();
|
||||
const release = () => semaToPreventEMFILE.release();
|
||||
const stream = fs.createReadStream(this.fsPath);
|
||||
@@ -42,9 +42,10 @@ class FileFsRef {
|
||||
return stream;
|
||||
}
|
||||
|
||||
toStream () {
|
||||
toStream() {
|
||||
let flag;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new MultiStream((cb) => {
|
||||
if (flag) return cb();
|
||||
flag = true;
|
||||
|
||||
@@ -7,7 +7,7 @@ const Sema = require('async-sema');
|
||||
const semaToDownloadFromS3 = new Sema(10);
|
||||
|
||||
class FileRef {
|
||||
constructor ({ mode = 0o100644, digest }) {
|
||||
constructor({ mode = 0o100644, digest }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof digest === 'string');
|
||||
this.type = 'FileRef';
|
||||
@@ -15,7 +15,7 @@ class FileRef {
|
||||
this.digest = digest;
|
||||
}
|
||||
|
||||
async toStreamAsync () {
|
||||
async toStreamAsync() {
|
||||
let url;
|
||||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
||||
const digestParts = this.digest.split(':');
|
||||
@@ -44,9 +44,10 @@ class FileRef {
|
||||
}
|
||||
}
|
||||
|
||||
toStream () {
|
||||
toStream() {
|
||||
let flag;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new MultiStream((cb) => {
|
||||
if (flag) return cb();
|
||||
flag = true;
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
const path = require('path');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
async function downloadFile (file, fsPath) {
|
||||
async function downloadFile(file, fsPath) {
|
||||
const { mode } = file;
|
||||
const stream = file.toStream();
|
||||
return await FileFsRef.fromStream({ mode, stream, fsPath });
|
||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||
}
|
||||
|
||||
module.exports = async function download (files, basePath) {
|
||||
module.exports = async function download(files, basePath) {
|
||||
const files2 = {};
|
||||
|
||||
await Promise.all(Object.keys(files).map(async (name) => {
|
||||
|
||||
@@ -4,7 +4,7 @@ const fs = require('fs-extra');
|
||||
const dev = !process.env.AWS_EXECUTION_ENV;
|
||||
const TMP_PATH = dev ? path.join(process.cwd(), 'tmp') : '/tmp';
|
||||
|
||||
module.exports = async function getWritableDirectory () {
|
||||
module.exports = async function getWritableDirectory() {
|
||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||
const directory = path.join(TMP_PATH, name);
|
||||
await fs.mkdirp(directory);
|
||||
|
||||
@@ -1,37 +1,42 @@
|
||||
const assert = require('assert');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
const path = require('path');
|
||||
const vanillaGlob = require('glob');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
module.exports = function glob (pattern, opts = {}, mountpoint) {
|
||||
module.exports = function glob(pattern, opts = {}, mountpoint) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof opts === 'string') {
|
||||
opts = { cwd: opts };
|
||||
let options = opts;
|
||||
if (typeof options === 'string') {
|
||||
options = { cwd: options };
|
||||
}
|
||||
|
||||
if (!opts.cwd) {
|
||||
if (!options.cwd) {
|
||||
throw new Error('Second argument (basePath) must be specified for names of resulting files');
|
||||
}
|
||||
|
||||
if (!path.isAbsolute(opts.cwd)) {
|
||||
throw new Error(`basePath/cwd must be an absolute path (${opts.cwd})`);
|
||||
if (!path.isAbsolute(options.cwd)) {
|
||||
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`);
|
||||
}
|
||||
|
||||
opts.statCache = {};
|
||||
opts.stat = true;
|
||||
opts.dot = true;
|
||||
options.statCache = {};
|
||||
options.stat = true;
|
||||
options.dot = true;
|
||||
|
||||
vanillaGlob(pattern, opts, (error, files) => {
|
||||
// eslint-disable-next-line consistent-return
|
||||
vanillaGlob(pattern, options, (error, files) => {
|
||||
if (error) return reject(error);
|
||||
|
||||
resolve(files.reduce((files2, relativePath) => {
|
||||
const fsPath = path.join(opts.cwd, relativePath);
|
||||
const stat = opts.statCache[fsPath];
|
||||
const fsPath = path.join(options.cwd, relativePath);
|
||||
const stat = options.statCache[fsPath];
|
||||
assert(stat, `statCache does not contain value for ${relativePath} (resolved to ${fsPath})`);
|
||||
if (stat.isFile()) {
|
||||
let finalPath = relativePath;
|
||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
||||
files2[finalPath] = new FileFsRef({ mode: stat.mode, fsPath });
|
||||
return {
|
||||
...files2,
|
||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
||||
};
|
||||
}
|
||||
|
||||
return files2;
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
module.exports = function rename (files, delegate) {
|
||||
const files2 = {};
|
||||
|
||||
for (const name in files) {
|
||||
files2[delegate(name)] = files[name];
|
||||
}
|
||||
|
||||
return files2;
|
||||
module.exports = function rename(files, delegate) {
|
||||
return Object.keys(files).reduce((newFiles, name) => ({
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}), {});
|
||||
};
|
||||
|
||||
@@ -2,48 +2,60 @@ const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
async function runShellScript (fsPath) {
|
||||
function spawnAsync(command, args, cwd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
||||
child.on('error', reject);
|
||||
child.on('close', (code, signal) => (code !== 0 ? reject(new Error(`Exited with ${code || signal}`)) : resolve()));
|
||||
});
|
||||
}
|
||||
|
||||
async function runShellScript(fsPath) {
|
||||
const destPath = path.dirname(fsPath);
|
||||
await spawnAsync('./' + path.basename(fsPath), [], destPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function shouldUseNpm (destPath) {
|
||||
async function shouldUseNpm(destPath) {
|
||||
let currentDestPath = destPath;
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
if (await fs.exists(path.join(destPath, 'package.json'))) {
|
||||
if (await fs.exists(path.join(destPath, 'package-lock.json'))) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await fs.exists(path.join(currentDestPath, 'package.json'))) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await fs.exists(path.join(currentDestPath, 'package-lock.json'))) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
const newDestPath = path.dirname(destPath);
|
||||
if (destPath === newDestPath) break;
|
||||
destPath = newDestPath;
|
||||
const newDestPath = path.dirname(currentDestPath);
|
||||
if (currentDestPath === newDestPath) break;
|
||||
currentDestPath = newDestPath;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async function runNpmInstall (destPath, args = []) {
|
||||
async function runNpmInstall(destPath, args = []) {
|
||||
let commandArgs = args;
|
||||
console.log(`installing to ${destPath}`);
|
||||
if (await shouldUseNpm(destPath)) {
|
||||
args = args.filter((a) => a !== '--prefer-offline');
|
||||
await spawnAsync('npm', [ 'install' ].concat(args), destPath);
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath);
|
||||
} else {
|
||||
await spawnAsync('yarn', [ '--cwd', destPath ].concat(args), destPath);
|
||||
await spawnAsync('yarn', ['--cwd', destPath].concat(commandArgs), destPath);
|
||||
}
|
||||
}
|
||||
|
||||
async function runPackageJsonScript (destPath, scriptName) {
|
||||
async function runPackageJsonScript(destPath, scriptName) {
|
||||
try {
|
||||
if (await shouldUseNpm(destPath)) {
|
||||
console.log(`running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', [ 'run', scriptName ], destPath);
|
||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', [ '--cwd', destPath, 'run', scriptName ], destPath);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath);
|
||||
}
|
||||
} catch (error) {
|
||||
console.log(error.message);
|
||||
@@ -53,19 +65,8 @@ async function runPackageJsonScript (destPath, scriptName) {
|
||||
return true;
|
||||
}
|
||||
|
||||
function spawnAsync (command, args, cwd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
||||
child.on('error', reject);
|
||||
child.on('close', (code, signal) => {
|
||||
if (code !== 0) return reject(new Error(`Exited with ${code || signal}`));
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
runShellScript,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript
|
||||
runPackageJsonScript,
|
||||
};
|
||||
|
||||
@@ -1,10 +1,4 @@
|
||||
const streamToBuffer = require('fast-stream-to-buffer');
|
||||
const fastStreamToBuffer = require('fast-stream-to-buffer');
|
||||
const { promisify } = require('util');
|
||||
|
||||
module.exports = async function (stream) {
|
||||
return await new Promise((resolve, reject) => {
|
||||
streamToBuffer(stream, function (error, buffer) {
|
||||
if (error) return reject(error);
|
||||
resolve(buffer);
|
||||
});
|
||||
});
|
||||
};
|
||||
module.exports = promisify(fastStreamToBuffer);
|
||||
|
||||
@@ -3,7 +3,9 @@ const { ZipFile } = require('yazl');
|
||||
const streamToBuffer = require('./fs/stream-to-buffer.js');
|
||||
|
||||
class Lambda {
|
||||
constructor ({ zipBuffer, handler, runtime, environment }) {
|
||||
constructor({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
}) {
|
||||
this.type = 'Lambda';
|
||||
this.zipBuffer = zipBuffer;
|
||||
this.handler = handler;
|
||||
@@ -14,26 +16,29 @@ class Lambda {
|
||||
|
||||
const mtime = new Date(1540000000000);
|
||||
|
||||
async function createLambda ({ files, handler, runtime, environment }) {
|
||||
assert(typeof files === 'object');
|
||||
assert(typeof handler === 'string');
|
||||
assert(typeof runtime === 'string');
|
||||
if (environment === undefined) environment = {};
|
||||
assert(typeof environment === 'object');
|
||||
async function createLambda({
|
||||
files, handler, runtime, environment = {},
|
||||
}) {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
assert(typeof handler === 'string', '"handler" is not a string');
|
||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||
assert(typeof environment === 'object', '"environment" is not an object');
|
||||
const zipFile = new ZipFile();
|
||||
|
||||
for (const name of Object.keys(files).sort()) {
|
||||
Object.keys(files).sort().forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
}
|
||||
});
|
||||
|
||||
zipFile.end();
|
||||
const zipBuffer = await streamToBuffer(zipFile.outputStream);
|
||||
return new Lambda({ zipBuffer, handler, runtime, environment });
|
||||
return new Lambda({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Lambda,
|
||||
createLambda
|
||||
createLambda,
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.4.27-canary.3",
|
||||
"version": "0.4.27-canary.4",
|
||||
"dependencies": {
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "2.1.4",
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
const path = require('path')
|
||||
const path = require('path');
|
||||
|
||||
const fetch = require('node-fetch')
|
||||
const tar = require('tar')
|
||||
const fetch = require('node-fetch');
|
||||
const tar = require('tar');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
||||
|
||||
const url = 'https://dl.google.com/go/go1.11.1.linux-amd64.tar.gz'
|
||||
const url = 'https://dl.google.com/go/go1.11.1.linux-amd64.tar.gz';
|
||||
|
||||
module.exports = async () => {
|
||||
const res = await fetch(url)
|
||||
const dir = await getWritableDirectory()
|
||||
const res = await fetch(url);
|
||||
const dir = await getWritableDirectory();
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download: ${url}`);
|
||||
@@ -19,6 +19,5 @@ module.exports = async () => {
|
||||
.on('error', reject)
|
||||
.pipe(tar.extract({ cwd: dir, strip: 1 }))
|
||||
.on('finish', () => resolve(path.join(dir, 'bin', 'go')));
|
||||
})
|
||||
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
@@ -1,120 +1,113 @@
|
||||
const path = require("path")
|
||||
const { mkdirp, readFile, writeFile } = require('fs-extra')
|
||||
const path = require('path');
|
||||
const { mkdirp, readFile, writeFile } = require('fs-extra');
|
||||
|
||||
const execa = require('execa')
|
||||
const execa = require('execa');
|
||||
const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const downloadGit = require("lambda-git")
|
||||
const downloadGoBin = require("./download-go-bin")
|
||||
const downloadGit = require('lambda-git');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
|
||||
const goFilenames = new Set([
|
||||
'go.mod',
|
||||
'go.sum',
|
||||
'Gopkg.lock',
|
||||
'Gopkg.toml'
|
||||
]);
|
||||
const downloadGoBin = require('./download-go-bin');
|
||||
|
||||
// creates a `$GOPATH` direcotry tree, as per
|
||||
// `go help gopath`'s instructions.
|
||||
// without this, Go won't recognize the `$GOPATH`
|
||||
async function createGoPathTree(goPath) {
|
||||
await mkdirp(path.join(goPath, 'bin'))
|
||||
await mkdirp(path.join(goPath, 'pkg', 'linux_amd64'))
|
||||
await mkdirp(path.join(goPath, 'bin'));
|
||||
await mkdirp(path.join(goPath, 'pkg', 'linux_amd64'));
|
||||
}
|
||||
|
||||
exports.build = async ({files, entrypoint, config}) => {
|
||||
console.log('downloading files...')
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
console.log('downloading files...');
|
||||
|
||||
const gitPath = await getWritableDirectory()
|
||||
const goPath = await getWritableDirectory()
|
||||
const srcPath = path.join(goPath, 'src', 'lambda')
|
||||
const outDir = await getWritableDirectory()
|
||||
const gitPath = await getWritableDirectory();
|
||||
const goPath = await getWritableDirectory();
|
||||
const srcPath = path.join(goPath, 'src', 'lambda');
|
||||
const outDir = await getWritableDirectory();
|
||||
|
||||
await createGoPathTree(goPath)
|
||||
await createGoPathTree(goPath);
|
||||
|
||||
files = await download(files, srcPath)
|
||||
const downloadedFiles = await download(files, srcPath);
|
||||
|
||||
console.log('downloading go binary...')
|
||||
const goBin = await downloadGoBin()
|
||||
console.log('downloading go binary...');
|
||||
const goBin = await downloadGoBin();
|
||||
|
||||
console.log('downloading git binary...')
|
||||
console.log('downloading git binary...');
|
||||
// downloads a git binary that works on Amazon Linux and sets
|
||||
// `process.env.GIT_EXEC_PATH` so `go(1)` can see it
|
||||
await downloadGit({targetDirectory: gitPath})
|
||||
await downloadGit({ targetDirectory: gitPath });
|
||||
|
||||
const goEnv = {
|
||||
...process.env,
|
||||
GOOS: 'linux',
|
||||
GOARCH: 'amd64',
|
||||
GOPATH: goPath
|
||||
}
|
||||
GOPATH: goPath,
|
||||
};
|
||||
|
||||
console.log(`parsing AST for \"${entrypoint}\"`)
|
||||
let handlerFunctionName = ''
|
||||
console.log(`parsing AST for "${entrypoint}"`);
|
||||
let handlerFunctionName = '';
|
||||
try {
|
||||
handlerFunctionName = await execa.stdout(
|
||||
path.join(__dirname, 'bin', 'get-exported-function-name'),
|
||||
[files[entrypoint].fsPath]
|
||||
)
|
||||
[downloadedFiles[entrypoint].fsPath],
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`failed to parse AST for \"${entrypoint}\"`)
|
||||
throw err
|
||||
console.log(`failed to parse AST for "${entrypoint}"`);
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (handlerFunctionName === '') {
|
||||
const e = new Error(`Could not find an exported function on "${entrypoint}"`)
|
||||
console.log(e.message)
|
||||
throw e
|
||||
const e = new Error(`Could not find an exported function on "${entrypoint}"`);
|
||||
console.log(e.message);
|
||||
throw e;
|
||||
}
|
||||
|
||||
console.log(`Found exported function "${handlerFunctionName}" on \"${entrypoint}\"`)
|
||||
console.log(`Found exported function "${handlerFunctionName}" on "${entrypoint}"`);
|
||||
|
||||
const origianlMainGoContents = await readFile(path.join(__dirname, 'main.go'), 'utf8')
|
||||
const mainGoContents = origianlMainGoContents.replace('__NOW_HANDLER_FUNC_NAME', handlerFunctionName)
|
||||
const origianlMainGoContents = await readFile(path.join(__dirname, 'main.go'), 'utf8');
|
||||
const mainGoContents = origianlMainGoContents.replace('__NOW_HANDLER_FUNC_NAME', handlerFunctionName);
|
||||
// in order to allow the user to have `main.go`, we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go'
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const entrypointDirname = path.dirname(files[entrypoint].fsPath)
|
||||
const entrypointDirname = path.dirname(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(path.join(entrypointDirname, mainGoFileName), mainGoContents)
|
||||
await writeFile(path.join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
|
||||
console.log('installing dependencies')
|
||||
console.log('installing dependencies');
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse
|
||||
// the `import`s and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await execa(goBin, ['get'], {env: goEnv, cwd: entrypointDirname, stdio: 'inherit'});
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`')
|
||||
throw err
|
||||
await execa(goBin, ['get'], { env: goEnv, cwd: entrypointDirname, stdio: 'inherit' });
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('running go build...')
|
||||
console.log('running go build...');
|
||||
try {
|
||||
await execa(goBin, [
|
||||
'build',
|
||||
'-o', path.join(outDir, 'handler'),
|
||||
path.join(entrypointDirname, mainGoFileName), files[entrypoint].fsPath
|
||||
], {env: goEnv, cwd: entrypointDirname, stdio: 'inherit'})
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`')
|
||||
throw err
|
||||
path.join(entrypointDirname, mainGoFileName), downloadedFiles[entrypoint].fsPath,
|
||||
], { env: goEnv, cwd: entrypointDirname, stdio: 'inherit' });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', outDir),
|
||||
handler: 'handler',
|
||||
runtime: 'go1.x',
|
||||
environment: {}
|
||||
})
|
||||
environment: {},
|
||||
});
|
||||
|
||||
return {
|
||||
[entrypoint]: lambda
|
||||
}
|
||||
}
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.2.7-canary.3",
|
||||
"version": "0.2.7-canary.4",
|
||||
"scripts": {
|
||||
"test": "best -I test/*.js",
|
||||
"prepublish": "./build.sh"
|
||||
@@ -16,11 +16,14 @@
|
||||
"fs-extra": "^7.0.0",
|
||||
"lambda-git": "^0.1.2",
|
||||
"mkdirp-promise": "5.0.1",
|
||||
"now-fetch": "0.1.1",
|
||||
"node-fetch": "^2.2.1",
|
||||
"tar": "4.4.6"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@zeit/best": "0.4.3",
|
||||
"rmfr": "2.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,32 +2,30 @@ const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const { minify } = require('html-minifier');
|
||||
|
||||
const defaultOptions = {
|
||||
minifyCSS: true,
|
||||
minifyJS: true,
|
||||
removeComments: true,
|
||||
removeAttributeQuotes: true,
|
||||
removeEmptyAttributes: true,
|
||||
removeOptionalTags: true,
|
||||
removeRedundantAttributes: true,
|
||||
useShortDoctype: true,
|
||||
collapseWhitespace: true,
|
||||
collapseInlineTagWhitespace: true,
|
||||
collapseBooleanAttributes: true,
|
||||
caseSensitive: true
|
||||
minifyCSS: true,
|
||||
minifyJS: true,
|
||||
removeComments: true,
|
||||
removeAttributeQuotes: true,
|
||||
removeEmptyAttributes: true,
|
||||
removeOptionalTags: true,
|
||||
removeRedundantAttributes: true,
|
||||
useShortDoctype: true,
|
||||
collapseWhitespace: true,
|
||||
collapseInlineTagWhitespace: true,
|
||||
collapseBooleanAttributes: true,
|
||||
caseSensitive: true,
|
||||
};
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => {
|
||||
return files[entrypoint].digest;
|
||||
};
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
const stream = files[entrypoint].toStream();
|
||||
const options = Object.assign({}, defaultOptions, config || {});
|
||||
const { data } = await FileBlob.fromStream({ stream });
|
||||
const content = data.toString();
|
||||
const stream = files[entrypoint].toStream();
|
||||
const options = Object.assign({}, defaultOptions, config || {});
|
||||
const { data } = await FileBlob.fromStream({ stream });
|
||||
const content = data.toString();
|
||||
|
||||
const minified = minify(content, options);
|
||||
const result = new FileBlob({ data: minified });
|
||||
const minified = minify(content, options);
|
||||
const result = new FileBlob({ data: minified });
|
||||
|
||||
return { [entrypoint]: result };
|
||||
return { [entrypoint]: result };
|
||||
};
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
{
|
||||
"name": "@now/html-minifier",
|
||||
"version": "1.0.3-canary.3",
|
||||
"version": "1.0.3-canary.4",
|
||||
"dependencies": {
|
||||
"html-minifier": "3.5.21"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,8 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
|
||||
// handler=launcher.main!runtime=nodejs8.10!name.zip
|
||||
const config = path.basename(entrypoint).split('!').reduce((a, c) => {
|
||||
const [ k, v ] = c.split('=');
|
||||
const [k, v] = c.split('=');
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
if (v) a[k] = v;
|
||||
return a;
|
||||
}, {});
|
||||
@@ -17,7 +18,7 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
const lambda = new Lambda({
|
||||
zipStream: files[entrypoint].toStream(), // TODO zipBuffer
|
||||
handler: config.handler,
|
||||
runtime: config.runtime
|
||||
runtime: config.runtime,
|
||||
});
|
||||
|
||||
return { [entrypoint]: lambda };
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "@now/lambda",
|
||||
"version": "0.4.4-canary.3"
|
||||
"version": "0.4.4-canary.4",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,37 +1,37 @@
|
||||
const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const unified = require('unified')
|
||||
const unifiedStream = require('unified-stream')
|
||||
const markdown = require('remark-parse')
|
||||
const remark2rehype = require('remark-rehype')
|
||||
const doc = require('rehype-document')
|
||||
const format = require('rehype-format')
|
||||
const html = require('rehype-stringify')
|
||||
const unified = require('unified');
|
||||
const unifiedStream = require('unified-stream');
|
||||
const markdown = require('remark-parse');
|
||||
const remark2rehype = require('remark-rehype');
|
||||
const doc = require('rehype-document');
|
||||
const format = require('rehype-format');
|
||||
const html = require('rehype-stringify');
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => {
|
||||
return files[entrypoint].digest;
|
||||
};
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
const stream = files[entrypoint].toStream();
|
||||
const options = config || {}
|
||||
const options = config || {};
|
||||
|
||||
const title = options.title || null
|
||||
const language = options.language || 'en'
|
||||
const meta = options.meta || null
|
||||
const css = options.css || null
|
||||
const title = options.title || null;
|
||||
const language = options.language || 'en';
|
||||
const meta = options.meta || null;
|
||||
const css = options.css || null;
|
||||
|
||||
const processor = unified()
|
||||
.use(markdown)
|
||||
.use(remark2rehype)
|
||||
.use(doc, { title, language, meta, css })
|
||||
.use(doc, {
|
||||
title, language, meta, css,
|
||||
})
|
||||
.use(format)
|
||||
.use(html);
|
||||
|
||||
const result = await FileBlob.fromStream({ stream: stream.pipe(unifiedStream(processor)) });
|
||||
|
||||
console.log(result.data.toString())
|
||||
console.log(result.data.toString());
|
||||
|
||||
entrypoint = entrypoint.replace(/\.[^\.]+$/, '.html');
|
||||
const replacedEntrypoint = entrypoint.replace(/\.[^.]+$/, '.html');
|
||||
|
||||
return { [entrypoint]: result };
|
||||
return { [replacedEntrypoint]: result };
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/md",
|
||||
"version": "0.4.4-canary.3",
|
||||
"version": "0.4.4-canary.4",
|
||||
"dependencies": {
|
||||
"rehype-document": "^2.2.0",
|
||||
"rehype-format": "^2.3.0",
|
||||
@@ -9,5 +9,8 @@
|
||||
"remark-rehype": "^3.0.1",
|
||||
"unified": "^7.0.0",
|
||||
"unified-stream": "^1.0.2"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,25 +6,23 @@ const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const { runNpmInstall } = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => {
|
||||
return files[entrypoint].digest;
|
||||
};
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
const writeFile = promisify(fs.writeFile);
|
||||
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
console.log('downloading user files...');
|
||||
files = await download(files, workPath);
|
||||
const downloadedFiles = await download(files, workPath);
|
||||
console.log('writing package.json...');
|
||||
const packageJson = { dependencies: { 'mdx-deck': '1.7.7' } };
|
||||
const packageJsonPath = path.join(workPath, 'package.json');
|
||||
await writeFile(packageJsonPath, JSON.stringify(packageJson));
|
||||
console.log('running npm install...');
|
||||
process.env.PUPPETEER_SKIP_CHROMIUM_DOWNLOAD = '1'; // TODO opts argument for runNpmInstall
|
||||
await runNpmInstall(path.dirname(packageJsonPath), [ '--prod', '--prefer-offline' ]);
|
||||
await runNpmInstall(path.dirname(packageJsonPath), ['--prod', '--prefer-offline']);
|
||||
console.log('building...');
|
||||
const outDir = await getWritableDirectory();
|
||||
const entrypointFsPath = files[entrypoint].fsPath;
|
||||
const entrypointFsPath = downloadedFiles[entrypoint].fsPath;
|
||||
const mountpoint = path.dirname(entrypoint);
|
||||
|
||||
const build = require(path.join(workPath, 'node_modules/mdx-deck/lib/build.js'));
|
||||
@@ -34,11 +32,11 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
dirname: workPath,
|
||||
outDir,
|
||||
globals: {
|
||||
FILENAME: JSON.stringify(entrypointFsPath)
|
||||
}
|
||||
FILENAME: JSON.stringify(entrypointFsPath),
|
||||
},
|
||||
});
|
||||
|
||||
return await glob('**', outDir, mountpoint);
|
||||
return glob('**', outDir, mountpoint);
|
||||
};
|
||||
|
||||
exports.prepareCache = async ({ cachePath }) => {
|
||||
@@ -47,11 +45,11 @@ exports.prepareCache = async ({ cachePath }) => {
|
||||
const packageJsonPath = path.join(cachePath, 'package.json');
|
||||
await writeFile(packageJsonPath, JSON.stringify(packageJson));
|
||||
console.log('running npm install...');
|
||||
await runNpmInstall(path.dirname(packageJsonPath), [ '--prod' ]);
|
||||
await runNpmInstall(path.dirname(packageJsonPath), ['--prod']);
|
||||
|
||||
return {
|
||||
...await glob('node_modules/**', cachePath),
|
||||
...await glob('package-lock.json', cachePath),
|
||||
...await glob('yarn.lock', cachePath)
|
||||
...await glob('yarn.lock', cachePath),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "@now/mdx-deck",
|
||||
"version": "0.4.13-canary.3"
|
||||
"version": "0.4.13-canary.4",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,158 +4,165 @@ const FileFsRef = require('@now/build-utils/file-fs-ref.js');
|
||||
const path = require('path');
|
||||
const { readFile, writeFile, unlink } = require('fs.promised');
|
||||
const rename = require('@now/build-utils/fs/rename.js');
|
||||
const { runNpmInstall, runPackageJsonScript
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const { runNpmInstall, runPackageJsonScript } = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
|
||||
// Exclude certain files from the files object
|
||||
function excludeFiles(files, matchFn) {
|
||||
return Object.keys(files).reduce((newFiles, fileName) => {
|
||||
if(matchFn(fileName)) {
|
||||
return newFiles
|
||||
if (matchFn(fileName)) {
|
||||
return newFiles;
|
||||
}
|
||||
newFiles[fileName] = files[fileName]
|
||||
return newFiles
|
||||
}, {})
|
||||
return {
|
||||
...newFiles,
|
||||
[fileName]: files[fileName],
|
||||
};
|
||||
}, {});
|
||||
}
|
||||
|
||||
exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
if(!/package\.json$/.exec(entrypoint) && !/next\.config\.js$/.exec(entrypoint)) {
|
||||
throw new Error(`Specified "src" for "@now/next" has to be "package.json" or "next.config.js"`)
|
||||
}
|
||||
|
||||
const entryDirectory = path.dirname(entrypoint)
|
||||
console.log('downloading user files...');
|
||||
const filesToDownload = excludeFiles(files, (file) => {
|
||||
function shouldExcludeFile(entryDirectory) {
|
||||
return (file) => {
|
||||
// If the file is not in the entry directory
|
||||
if(entryDirectory !== '.' && !file.startsWith(entryDirectory)) {
|
||||
return true
|
||||
if (entryDirectory !== '.' && !file.startsWith(entryDirectory)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Exclude static directory
|
||||
if(file.startsWith(path.join(entryDirectory, 'static'))) {
|
||||
return true
|
||||
if (file.startsWith(path.join(entryDirectory, 'static'))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if(file === 'package-lock.json') {
|
||||
return true
|
||||
if (file === 'package-lock.json') {
|
||||
return true;
|
||||
}
|
||||
|
||||
if(file === 'yarn.lock') {
|
||||
return true
|
||||
if (file === 'yarn.lock') {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false
|
||||
})
|
||||
files = await download(rename(filesToDownload, (file) => {
|
||||
if(entryDirectory !== '.') {
|
||||
return file.replace(new RegExp(`^${entryDirectory}/`), '')
|
||||
}
|
||||
return file
|
||||
}), workPath);
|
||||
return false;
|
||||
};
|
||||
}
|
||||
|
||||
let packageJson = {}
|
||||
if (files['package.json']) {
|
||||
console.log('found package.json, overwriting')
|
||||
const packageJsonPath = files['package.json'].fsPath
|
||||
packageJson = JSON.parse(await readFile(packageJsonPath, 'utf8'))
|
||||
exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
if (!/package\.json$/.exec(entrypoint) && !/next\.config\.js$/.exec(entrypoint)) {
|
||||
throw new Error('Specified "src" for "@now/next" has to be "package.json" or "next.config.js"');
|
||||
}
|
||||
|
||||
console.log('downloading user files...');
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const filesToDownload = excludeFiles(files, shouldExcludeFile(entryDirectory));
|
||||
const entrypointHandledFilesToDownload = rename(filesToDownload, (file) => {
|
||||
if (entryDirectory !== '.') {
|
||||
return file.replace(new RegExp(`^${entryDirectory}/`), '');
|
||||
}
|
||||
return file;
|
||||
});
|
||||
let downloadedFiles = await download(entrypointHandledFilesToDownload, workPath);
|
||||
|
||||
let packageJson = {};
|
||||
if (downloadedFiles['package.json']) {
|
||||
console.log('found package.json, overwriting');
|
||||
const packageJsonPath = downloadedFiles['package.json'].fsPath;
|
||||
packageJson = JSON.parse(await readFile(packageJsonPath, 'utf8'));
|
||||
}
|
||||
|
||||
packageJson = {
|
||||
...packageJson,
|
||||
dependencies: {
|
||||
...packageJson.dependencies,
|
||||
'next-server': 'canary'
|
||||
'next-server': 'canary',
|
||||
},
|
||||
devDependencies: {
|
||||
...packageJson.devDependencies,
|
||||
'next': 'canary'
|
||||
next: 'canary',
|
||||
},
|
||||
scripts: {
|
||||
...packageJson.scripts,
|
||||
'now-build': 'next build'
|
||||
}
|
||||
}
|
||||
'now-build': 'next build',
|
||||
},
|
||||
};
|
||||
|
||||
if(!packageJson.dependencies.react) {
|
||||
console.log('"react" not found in dependencies, adding to "package.json" "dependencies"')
|
||||
packageJson.dependencies['react'] = 'latest'
|
||||
if (!packageJson.dependencies.react) {
|
||||
console.log('"react" not found in dependencies, adding to "package.json" "dependencies"');
|
||||
packageJson.dependencies.react = 'latest';
|
||||
}
|
||||
if(!packageJson.dependencies['react-dom']) {
|
||||
console.log('"react-dom" not found in dependencies, adding to "package.json" "dependencies"')
|
||||
packageJson.dependencies['react-dom'] = 'latest'
|
||||
if (!packageJson.dependencies['react-dom']) {
|
||||
console.log('"react-dom" not found in dependencies, adding to "package.json" "dependencies"');
|
||||
packageJson.dependencies['react-dom'] = 'latest';
|
||||
}
|
||||
|
||||
// in case the user has `next` on their `dependencies`, we remove it
|
||||
delete packageJson.dependencies.next
|
||||
delete packageJson.dependencies.next;
|
||||
|
||||
console.log('>>>>>>', JSON.stringify(packageJson))
|
||||
await writeFile(path.join(workPath, 'package.json'), JSON.stringify(packageJson, null, 2))
|
||||
await writeFile(path.join(workPath, 'package.json'), JSON.stringify(packageJson, null, 2));
|
||||
|
||||
if(process.env.NPM_AUTH_TOKEN) {
|
||||
console.log('found NPM_AUTH_TOKEN in environement, creating .npmrc')
|
||||
await writeFile(path.join(workPath, '.npmrc'), `//registry.npmjs.org/:_authToken=${process.env.NPM_AUTH_TOKEN}`)
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
console.log('found NPM_AUTH_TOKEN in environement, creating .npmrc');
|
||||
await writeFile(path.join(workPath, '.npmrc'), `//registry.npmjs.org/:_authToken=${process.env.NPM_AUTH_TOKEN}`);
|
||||
}
|
||||
files = await glob('**', workPath);
|
||||
downloadedFiles = await glob('**', workPath);
|
||||
|
||||
console.log('running npm install...');
|
||||
await runNpmInstall(workPath, [ '--prefer-offline' ]);
|
||||
await runNpmInstall(workPath, ['--prefer-offline']);
|
||||
console.log('running user script...');
|
||||
await runPackageJsonScript(workPath, 'now-build');
|
||||
console.log('running npm install --production...');
|
||||
await runNpmInstall(workPath, [ '--prefer-offline', '--production' ]);
|
||||
if(process.env.NPM_AUTH_TOKEN) {
|
||||
await unlink(path.join(workPath, '.npmrc'))
|
||||
await runNpmInstall(workPath, ['--prefer-offline', '--production']);
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
await unlink(path.join(workPath, '.npmrc'));
|
||||
}
|
||||
files = await glob('**', workPath)
|
||||
downloadedFiles = await glob('**', workPath);
|
||||
|
||||
console.log('preparing lambda files...');
|
||||
let buildId
|
||||
let buildId;
|
||||
try {
|
||||
buildId = await readFile(path.join(workPath, '.next', 'BUILD_ID'), 'utf8')
|
||||
} catch(err) {
|
||||
console.error(`BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"`)
|
||||
throw new Error('Missing BUILD_ID')
|
||||
buildId = await readFile(path.join(workPath, '.next', 'BUILD_ID'), 'utf8');
|
||||
} catch (err) {
|
||||
console.error('BUILD_ID not found in ".next". The "package.json" "build" script did not run "next build"');
|
||||
throw new Error('Missing BUILD_ID');
|
||||
}
|
||||
const dotNextRootFiles = await glob('.next/*', workPath)
|
||||
const dotNextServerRootFiles = await glob('.next/server/*', workPath)
|
||||
const nodeModules = excludeFiles(await glob('node_modules/**', workPath), (file) => file.startsWith('node_modules/.cache'))
|
||||
const dotNextRootFiles = await glob('.next/*', workPath);
|
||||
const dotNextServerRootFiles = await glob('.next/server/*', workPath);
|
||||
const nodeModules = excludeFiles(await glob('node_modules/**', workPath), file => file.startsWith('node_modules/.cache'));
|
||||
const launcherFiles = {
|
||||
'now__launcher.js': new FileFsRef({ fsPath: path.join(__dirname, 'launcher.js') }),
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') })
|
||||
'now__bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
};
|
||||
const nextFiles = {...nodeModules, ...dotNextRootFiles, ...dotNextServerRootFiles, ...launcherFiles}
|
||||
if(files['next.config.js']) {
|
||||
nextFiles['next.config.js'] = files['next.config.js']
|
||||
const nextFiles = {
|
||||
...nodeModules, ...dotNextRootFiles, ...dotNextServerRootFiles, ...launcherFiles,
|
||||
};
|
||||
if (downloadedFiles['next.config.js']) {
|
||||
nextFiles['next.config.js'] = downloadedFiles['next.config.js'];
|
||||
}
|
||||
const pages = await glob('**/*.js', path.join(workPath, '.next', 'server', 'static', buildId, 'pages'))
|
||||
const pages = await glob('**/*.js', path.join(workPath, '.next', 'server', 'static', buildId, 'pages'));
|
||||
|
||||
const lambdas = {}
|
||||
for(const page in pages) {
|
||||
// These default pages don't have to be handled a rendering, they'd always 404
|
||||
if(['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
continue
|
||||
const lambdas = {};
|
||||
await Promise.all(Object.keys(pages).map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
if (['_app.js', '_error.js', '_document.js'].includes(page)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pageFiles = {
|
||||
[`.next/server/static/${buildId}/pages/_document.js`]: files[`.next/server/static/${buildId}/pages/_document.js`],
|
||||
[`.next/server/static/${buildId}/pages/_app.js`]: files[`.next/server/static/${buildId}/pages/_app.js`],
|
||||
[`.next/server/static/${buildId}/pages/_error.js`]: files[`.next/server/static/${buildId}/pages/_error.js`],
|
||||
[`.next/server/static/${buildId}/pages/${page}`]: files[`.next/server/static/${buildId}/pages/${page}`]
|
||||
}
|
||||
[`.next/server/static/${buildId}/pages/_document.js`]: downloadedFiles[`.next/server/static/${buildId}/pages/_document.js`],
|
||||
[`.next/server/static/${buildId}/pages/_app.js`]: downloadedFiles[`.next/server/static/${buildId}/pages/_app.js`],
|
||||
[`.next/server/static/${buildId}/pages/_error.js`]: downloadedFiles[`.next/server/static/${buildId}/pages/_error.js`],
|
||||
[`.next/server/static/${buildId}/pages/${page}`]: downloadedFiles[`.next/server/static/${buildId}/pages/${page}`],
|
||||
};
|
||||
|
||||
lambdas[path.join(entryDirectory, page.replace(/\.js$/, ''))] = await createLambda({
|
||||
files: {...nextFiles, ...pageFiles},
|
||||
files: { ...nextFiles, ...pageFiles },
|
||||
handler: 'now__launcher.launcher',
|
||||
runtime: 'nodejs8.10'
|
||||
})
|
||||
}
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
}));
|
||||
|
||||
const nextStaticFiles = await glob('**', path.join(workPath, '.next', 'static'))
|
||||
const staticFiles = {}
|
||||
for(const staticFile in nextStaticFiles) {
|
||||
staticFiles[path.join(entryDirectory, '_next/static/' + staticFile)] = nextStaticFiles[staticFile]
|
||||
}
|
||||
const nextStaticFiles = await glob('**', path.join(workPath, '.next', 'static'));
|
||||
const staticFiles = Object.keys(nextStaticFiles).reduce((mappedFiles, file) => ({
|
||||
...mappedFiles,
|
||||
[path.join(entryDirectory, `_next/static/${file}`)]: nextStaticFiles[file],
|
||||
}), {});
|
||||
|
||||
return { ...lambdas, ...staticFiles };
|
||||
};
|
||||
@@ -163,11 +170,11 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
exports.prepareCache = async ({ files, cachePath, workPath }) => {
|
||||
console.log('downloading user files...');
|
||||
await download(files, cachePath);
|
||||
await download(await glob('.next/**', workPath), cachePath)
|
||||
await download(await glob('node_modules/**', workPath), cachePath)
|
||||
await download(await glob('.next/**', workPath), cachePath);
|
||||
await download(await glob('node_modules/**', workPath), cachePath);
|
||||
|
||||
console.log('.next folder contents', await glob('.next/**', cachePath))
|
||||
console.log('.cache folder contents', await glob('node_modules/.cache/**', cachePath))
|
||||
console.log('.next folder contents', await glob('.next/**', cachePath));
|
||||
console.log('.cache folder contents', await glob('node_modules/.cache/**', cachePath));
|
||||
|
||||
console.log('running npm install...');
|
||||
await runNpmInstall(cachePath);
|
||||
@@ -177,6 +184,6 @@ exports.prepareCache = async ({ files, cachePath, workPath }) => {
|
||||
...await glob('.next/records.json', cachePath),
|
||||
...await glob('.next/server/records.json', cachePath),
|
||||
...await glob('node_modules/**', cachePath),
|
||||
...await glob('yarn.lock', cachePath)
|
||||
...await glob('yarn.lock', cachePath),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,13 +1,14 @@
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
const { Server } = require('http');
|
||||
const next = require('next-server')
|
||||
const next = require('next-server');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
|
||||
const bridge = new Bridge();
|
||||
bridge.port = 3000;
|
||||
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
const app = next({})
|
||||
const handler = app.getRequestHandler()
|
||||
const app = next({});
|
||||
const handler = app.getRequestHandler();
|
||||
|
||||
const server = new Server(handler);
|
||||
server.listen(bridge.port);
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
{
|
||||
"name": "@now/next",
|
||||
"version": "0.0.74-canary.3",
|
||||
"version": "0.0.74-canary.4",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "0.1.4",
|
||||
"execa": "^1.0.0",
|
||||
"fs.promised": "^3.0.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,48 @@
|
||||
const assert = require('assert');
|
||||
const http = require('http');
|
||||
|
||||
function normalizeEvent(event) {
|
||||
if (event.Action === 'Invoke') {
|
||||
const invokeEvent = JSON.parse(event.body);
|
||||
const {
|
||||
method, path, headers, encoding,
|
||||
} = invokeEvent;
|
||||
let { body } = invokeEvent;
|
||||
if (body) {
|
||||
assert(encoding === 'base64', JSON.stringify(event)); // do we support anything else?
|
||||
body = Buffer.from(body, encoding);
|
||||
}
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
};
|
||||
}
|
||||
|
||||
const {
|
||||
httpMethod: method,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
} = event;
|
||||
|
||||
return {
|
||||
method,
|
||||
path,
|
||||
headers,
|
||||
body,
|
||||
};
|
||||
}
|
||||
|
||||
class Bridge {
|
||||
constructor () {
|
||||
constructor() {
|
||||
this.launcher = this.launcher.bind(this);
|
||||
}
|
||||
|
||||
launcher (event) {
|
||||
launcher(event) {
|
||||
// eslint-disable-next-line consistent-return
|
||||
return new Promise((resolve, reject) => {
|
||||
if (this.userError) {
|
||||
console.error('Error while initializing entrypoint:', this.userError);
|
||||
@@ -17,45 +53,32 @@ class Bridge {
|
||||
return resolve({ statusCode: 504, body: '' });
|
||||
}
|
||||
|
||||
let method, path, headers, body;
|
||||
|
||||
if (event.Action === 'Invoke') {
|
||||
event = JSON.parse(event.body);
|
||||
method = event.method;
|
||||
path = event.path;
|
||||
headers = event.headers;
|
||||
if (event.body) {
|
||||
assert(event.encoding === 'base64', JSON.stringify(event)); // do we support anything else?
|
||||
body = Buffer.from(event.body, event.encoding);
|
||||
}
|
||||
} else {
|
||||
method = event.httpMethod;
|
||||
path = event.path;
|
||||
headers = event.headers;
|
||||
body = event.body;
|
||||
}
|
||||
const {
|
||||
method, path, headers, body,
|
||||
} = normalizeEvent(event);
|
||||
|
||||
const opts = {
|
||||
hostname: '127.0.0.1',
|
||||
port: this.port,
|
||||
path,
|
||||
method,
|
||||
headers
|
||||
headers,
|
||||
};
|
||||
|
||||
const req = http.request(opts, (resp) => {
|
||||
const req = http.request(opts, (res) => {
|
||||
const response = res;
|
||||
const respBodyChunks = [];
|
||||
resp.on('data', (chunk) => respBodyChunks.push(Buffer.from(chunk)));
|
||||
resp.on('error', (error) => reject(error));
|
||||
resp.on('end', () => {
|
||||
delete resp.headers.connection;
|
||||
delete resp.headers['content-length'];
|
||||
response.on('data', chunk => respBodyChunks.push(Buffer.from(chunk)));
|
||||
response.on('error', error => reject(error));
|
||||
response.on('end', () => {
|
||||
delete response.headers.connection;
|
||||
delete response.headers['content-length'];
|
||||
|
||||
resolve({
|
||||
statusCode: resp.statusCode,
|
||||
headers: resp.headers,
|
||||
statusCode: response.statusCode,
|
||||
headers: response.headers,
|
||||
body: Buffer.concat(respBodyChunks).toString('base64'),
|
||||
encoding: 'base64'
|
||||
encoding: 'base64',
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -67,5 +90,5 @@ class Bridge {
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Bridge
|
||||
Bridge,
|
||||
};
|
||||
|
||||
@@ -1,2 +1,3 @@
|
||||
const path = require('path');
|
||||
|
||||
module.exports = path.join(__dirname, 'bridge.js');
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "@now/node-bridge",
|
||||
"version": "0.1.6-canary.3"
|
||||
"version": "0.1.6-canary.4",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,16 +6,17 @@ const fs = require('fs');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const { runNpmInstall, runPackageJsonScript
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const { runNpmInstall, runPackageJsonScript } = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
|
||||
const fsp = {
|
||||
readFile: promisify(fs.readFile)
|
||||
readFile: promisify(fs.readFile),
|
||||
};
|
||||
|
||||
async function commonForTwo ({ files, entrypoint, workPath, cachePath }) {
|
||||
async function commonForTwo({
|
||||
files, entrypoint, workPath, cachePath,
|
||||
}) {
|
||||
const xPath = workPath || cachePath;
|
||||
const preferOfflineArgument = workPath ? [ '--prefer-offline' ] : [];
|
||||
const preferOfflineArgument = workPath ? ['--prefer-offline'] : [];
|
||||
|
||||
const xUserPath = path.join(xPath, 'user');
|
||||
const xRollupPath = path.join(xPath, 'rollup');
|
||||
@@ -32,25 +33,64 @@ async function commonForTwo ({ files, entrypoint, workPath, cachePath }) {
|
||||
'package.json': new FileBlob({
|
||||
data: JSON.stringify({
|
||||
dependencies: {
|
||||
'builtins': '2.0.0',
|
||||
'rollup': '0.67.0',
|
||||
builtins: '2.0.0',
|
||||
rollup: '0.67.0',
|
||||
'rollup-plugin-commonjs': '9.2.0',
|
||||
'rollup-plugin-json': '3.1.0',
|
||||
'rollup-plugin-node-resolve': '3.4.0',
|
||||
'rollup-plugin-terser': '3.0.0'
|
||||
}
|
||||
})
|
||||
})
|
||||
'rollup-plugin-terser': '3.0.0',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
}, xRollupPath);
|
||||
|
||||
console.log('running npm install for rollup...');
|
||||
await runNpmInstall(xRollupPath, preferOfflineArgument);
|
||||
return [ filesOnDisk, xRollupPath, entrypointFsDirname ];
|
||||
return [filesOnDisk, xRollupPath, entrypointFsDirname];
|
||||
}
|
||||
|
||||
async function compile(workRollupPath, input) {
|
||||
const rollup = require(path.join(workRollupPath, 'node_modules/rollup'));
|
||||
const nodeResolve = require(path.join(workRollupPath, 'node_modules/rollup-plugin-node-resolve'));
|
||||
const commonjs = require(path.join(workRollupPath, 'node_modules/rollup-plugin-commonjs'));
|
||||
const json = require(path.join(workRollupPath, 'node_modules/rollup-plugin-json'));
|
||||
const { terser } = require(path.join(workRollupPath, 'node_modules/rollup-plugin-terser'));
|
||||
const builtins = require(path.join(workRollupPath, 'node_modules/builtins'))();
|
||||
|
||||
const bundle = await rollup.rollup({
|
||||
input,
|
||||
plugins: [
|
||||
nodeResolve({
|
||||
module: false,
|
||||
jsnext: false,
|
||||
browser: false,
|
||||
preferBuiltins: true,
|
||||
}),
|
||||
json(),
|
||||
commonjs(),
|
||||
terser(),
|
||||
],
|
||||
onwarn(error) {
|
||||
if (/external dependency/.test(error.message)) {
|
||||
const mod = error.message.split('\'')[1];
|
||||
// ignore rollup warnings about known node.js modules
|
||||
if (builtins.indexOf(mod) > -1) return;
|
||||
}
|
||||
console.error(error.message);
|
||||
},
|
||||
});
|
||||
|
||||
return (await bundle.generate({
|
||||
format: 'cjs',
|
||||
})).code;
|
||||
}
|
||||
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
const [ filesOnDisk, workRollupPath, entrypointFsDirname ] =
|
||||
await commonForTwo({ files, entrypoint, workPath });
|
||||
const [
|
||||
filesOnDisk,
|
||||
workRollupPath,
|
||||
entrypointFsDirname,
|
||||
] = await commonForTwo({ files, entrypoint, workPath });
|
||||
|
||||
console.log('running user script...');
|
||||
await runPackageJsonScript(entrypointFsDirname, 'now-build');
|
||||
@@ -67,18 +107,18 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
|
||||
launcherData = launcherData.replace('// PLACEHOLDER', [
|
||||
'process.chdir("./user");',
|
||||
`require("./${path.join('user', entrypoint)}");`
|
||||
`require("./${path.join('user', entrypoint)}");`,
|
||||
].join(' '));
|
||||
|
||||
const launcherFiles = {
|
||||
'launcher.js': new FileBlob({ data: launcherData }),
|
||||
'bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') })
|
||||
'bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
};
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: { ...compiledFiles, ...launcherFiles },
|
||||
handler: 'launcher.launcher',
|
||||
runtime: 'nodejs8.10'
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
|
||||
return { [entrypoint]: lambda };
|
||||
@@ -93,42 +133,6 @@ exports.prepareCache = async ({ files, entrypoint, cachePath }) => {
|
||||
...await glob('user/yarn.lock', cachePath),
|
||||
...await glob('rollup/node_modules/**', cachePath),
|
||||
...await glob('rollup/package-lock.json', cachePath),
|
||||
...await glob('rollup/yarn.lock', cachePath)
|
||||
...await glob('rollup/yarn.lock', cachePath),
|
||||
};
|
||||
};
|
||||
|
||||
async function compile (workRollupPath, input) {
|
||||
const rollup = require(path.join(workRollupPath, 'node_modules/rollup'));
|
||||
const nodeResolve = require(path.join(workRollupPath, 'node_modules/rollup-plugin-node-resolve'));
|
||||
const commonjs = require(path.join(workRollupPath, 'node_modules/rollup-plugin-commonjs'));
|
||||
const json = require(path.join(workRollupPath, 'node_modules/rollup-plugin-json'));
|
||||
const { terser } = require(path.join(workRollupPath, 'node_modules/rollup-plugin-terser'));
|
||||
const builtins = require(path.join(workRollupPath, 'node_modules/builtins'))();
|
||||
|
||||
const bundle = await rollup.rollup({
|
||||
input,
|
||||
plugins: [
|
||||
nodeResolve({
|
||||
module: false,
|
||||
jsnext: false,
|
||||
browser: false,
|
||||
preferBuiltins: true
|
||||
}),
|
||||
json(),
|
||||
commonjs(),
|
||||
terser()
|
||||
],
|
||||
onwarn: function (error) {
|
||||
if (/external dependency/.test(error.message)) {
|
||||
const mod = error.message.split('\'')[1];
|
||||
// ignore rollup warnings about known node.js modules
|
||||
if (builtins.indexOf(mod) > -1) return;
|
||||
}
|
||||
console.error(error.message);
|
||||
}
|
||||
});
|
||||
|
||||
return (await bundle.generate({
|
||||
format: 'cjs'
|
||||
})).code;
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
const { Bridge } = require('./bridge.js');
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('./bridge.js');
|
||||
|
||||
const bridge = new Bridge();
|
||||
|
||||
const saveListen = Server.prototype.listen;
|
||||
Server.prototype.listen = function (...args) {
|
||||
this.on('listening', function () {
|
||||
Server.prototype.listen = function listen(...args) {
|
||||
this.on('listening', function listening() {
|
||||
bridge.port = this.address().port;
|
||||
});
|
||||
saveListen.apply(this, args);
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
{
|
||||
"name": "@now/node-server",
|
||||
"version": "0.4.21-canary.3",
|
||||
"version": "0.4.21-canary.4",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^0.1.6-canary.3"
|
||||
"@now/node-bridge": "^0.1.6-canary.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,16 +6,17 @@ const fs = require('fs');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const { promisify } = require('util');
|
||||
const { runNpmInstall, runPackageJsonScript
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const { runNpmInstall, runPackageJsonScript } = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
|
||||
const fsp = {
|
||||
readFile: promisify(fs.readFile)
|
||||
readFile: promisify(fs.readFile),
|
||||
};
|
||||
|
||||
async function commonForTwo ({ files, entrypoint, workPath, cachePath }) {
|
||||
async function commonForTwo({
|
||||
files, entrypoint, workPath, cachePath,
|
||||
}) {
|
||||
const xPath = workPath || cachePath;
|
||||
const preferOfflineArgument = workPath ? [ '--prefer-offline' ] : [];
|
||||
const preferOfflineArgument = workPath ? ['--prefer-offline'] : [];
|
||||
|
||||
const xUserPath = path.join(xPath, 'user');
|
||||
const xRollupPath = path.join(xPath, 'rollup');
|
||||
@@ -32,25 +33,64 @@ async function commonForTwo ({ files, entrypoint, workPath, cachePath }) {
|
||||
'package.json': new FileBlob({
|
||||
data: JSON.stringify({
|
||||
dependencies: {
|
||||
'builtins': '2.0.0',
|
||||
'rollup': '0.67.0',
|
||||
builtins: '2.0.0',
|
||||
rollup: '0.67.0',
|
||||
'rollup-plugin-commonjs': '9.2.0',
|
||||
'rollup-plugin-json': '3.1.0',
|
||||
'rollup-plugin-node-resolve': '3.4.0',
|
||||
'rollup-plugin-terser': '3.0.0'
|
||||
}
|
||||
})
|
||||
})
|
||||
'rollup-plugin-terser': '3.0.0',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
}, xRollupPath);
|
||||
|
||||
console.log('running npm install for rollup...');
|
||||
await runNpmInstall(xRollupPath, preferOfflineArgument);
|
||||
return [ filesOnDisk, xRollupPath, entrypointFsDirname ];
|
||||
return [filesOnDisk, xRollupPath, entrypointFsDirname];
|
||||
}
|
||||
|
||||
async function compile(workRollupPath, input) {
|
||||
const rollup = require(path.join(workRollupPath, 'node_modules/rollup'));
|
||||
const nodeResolve = require(path.join(workRollupPath, 'node_modules/rollup-plugin-node-resolve'));
|
||||
const commonjs = require(path.join(workRollupPath, 'node_modules/rollup-plugin-commonjs'));
|
||||
const json = require(path.join(workRollupPath, 'node_modules/rollup-plugin-json'));
|
||||
const { terser } = require(path.join(workRollupPath, 'node_modules/rollup-plugin-terser'));
|
||||
const builtins = require(path.join(workRollupPath, 'node_modules/builtins'))();
|
||||
|
||||
const bundle = await rollup.rollup({
|
||||
input,
|
||||
plugins: [
|
||||
nodeResolve({
|
||||
module: false,
|
||||
jsnext: false,
|
||||
browser: false,
|
||||
preferBuiltins: true,
|
||||
}),
|
||||
json(),
|
||||
commonjs(),
|
||||
terser(),
|
||||
],
|
||||
onwarn(error) {
|
||||
if (/external dependency/.test(error.message)) {
|
||||
const mod = error.message.split('\'')[1];
|
||||
// ignore rollup warnings about known node.js modules
|
||||
if (builtins.indexOf(mod) > -1) return;
|
||||
}
|
||||
console.error(error.message);
|
||||
},
|
||||
});
|
||||
|
||||
return (await bundle.generate({
|
||||
format: 'cjs',
|
||||
})).code;
|
||||
}
|
||||
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
const [ filesOnDisk, workRollupPath, entrypointFsDirname ] =
|
||||
await commonForTwo({ files, entrypoint, workPath });
|
||||
const [
|
||||
filesOnDisk,
|
||||
workRollupPath,
|
||||
entrypointFsDirname,
|
||||
] = await commonForTwo({ files, entrypoint, workPath });
|
||||
|
||||
console.log('running user script...');
|
||||
await runPackageJsonScript(entrypointFsDirname, 'now-build');
|
||||
@@ -67,18 +107,18 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
|
||||
launcherData = launcherData.replace('// PLACEHOLDER', [
|
||||
'process.chdir("./user");',
|
||||
`listener = require("./${path.join('user', entrypoint)}");`
|
||||
`listener = require("./${path.join('user', entrypoint)}");`,
|
||||
].join(' '));
|
||||
|
||||
const launcherFiles = {
|
||||
'launcher.js': new FileBlob({ data: launcherData }),
|
||||
'bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') })
|
||||
'bridge.js': new FileFsRef({ fsPath: require('@now/node-bridge') }),
|
||||
};
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: { ...compiledFiles, ...launcherFiles },
|
||||
handler: 'launcher.launcher',
|
||||
runtime: 'nodejs8.10'
|
||||
runtime: 'nodejs8.10',
|
||||
});
|
||||
|
||||
return { [entrypoint]: lambda };
|
||||
@@ -93,42 +133,6 @@ exports.prepareCache = async ({ files, entrypoint, cachePath }) => {
|
||||
...await glob('user/yarn.lock', cachePath),
|
||||
...await glob('rollup/node_modules/**', cachePath),
|
||||
...await glob('rollup/package-lock.json', cachePath),
|
||||
...await glob('rollup/yarn.lock', cachePath)
|
||||
...await glob('rollup/yarn.lock', cachePath),
|
||||
};
|
||||
};
|
||||
|
||||
async function compile (workRollupPath, input) {
|
||||
const rollup = require(path.join(workRollupPath, 'node_modules/rollup'));
|
||||
const nodeResolve = require(path.join(workRollupPath, 'node_modules/rollup-plugin-node-resolve'));
|
||||
const commonjs = require(path.join(workRollupPath, 'node_modules/rollup-plugin-commonjs'));
|
||||
const json = require(path.join(workRollupPath, 'node_modules/rollup-plugin-json'));
|
||||
const { terser } = require(path.join(workRollupPath, 'node_modules/rollup-plugin-terser'));
|
||||
const builtins = require(path.join(workRollupPath, 'node_modules/builtins'))();
|
||||
|
||||
const bundle = await rollup.rollup({
|
||||
input,
|
||||
plugins: [
|
||||
nodeResolve({
|
||||
module: false,
|
||||
jsnext: false,
|
||||
browser: false,
|
||||
preferBuiltins: true
|
||||
}),
|
||||
json(),
|
||||
commonjs(),
|
||||
terser()
|
||||
],
|
||||
onwarn: function (error) {
|
||||
if (/external dependency/.test(error.message)) {
|
||||
const mod = error.message.split('\'')[1];
|
||||
// ignore rollup warnings about known node.js modules
|
||||
if (builtins.indexOf(mod) > -1) return;
|
||||
}
|
||||
console.error(error.message);
|
||||
}
|
||||
});
|
||||
|
||||
return (await bundle.generate({
|
||||
format: 'cjs'
|
||||
})).code;
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
const { Bridge } = require('./bridge.js');
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('./bridge.js');
|
||||
|
||||
const bridge = new Bridge();
|
||||
bridge.port = 3000;
|
||||
let listener;
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
{
|
||||
"name": "@now/node",
|
||||
"version": "0.4.23-canary.3",
|
||||
"version": "0.4.23-canary.4",
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "^0.1.6-canary.3"
|
||||
"@now/node-bridge": "^0.1.6-canary.4"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,12 +2,10 @@ const FileBlob = require('@now/build-utils/file-blob.js');
|
||||
const OptiPng = require('optipng');
|
||||
const pipe = require('multipipe');
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => {
|
||||
return files[entrypoint].digest;
|
||||
};
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
const optimizer = new OptiPng([ '-o9' ]);
|
||||
const optimizer = new OptiPng(['-o9']);
|
||||
const stream = pipe(files[entrypoint].toStream(), optimizer);
|
||||
const result = await FileBlob.fromStream({ stream });
|
||||
return { [entrypoint]: result };
|
||||
|
||||
@@ -1,8 +1,11 @@
|
||||
{
|
||||
"name": "@now/optipng",
|
||||
"version": "0.4.4-canary.3",
|
||||
"version": "0.4.4-canary.4",
|
||||
"dependencies": {
|
||||
"multipipe": "2.0.3",
|
||||
"optipng": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ const rename = require('@now/build-utils/fs/rename.js');
|
||||
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
// move all user code to 'user' subdirectory
|
||||
const userFiles = rename(files, (name) => path.join('user', name));
|
||||
const userFiles = rename(files, name => path.join('user', name));
|
||||
const launcherFiles = await glob('**', path.join(__dirname, 'dist'));
|
||||
const zipFiles = { ...userFiles, ...launcherFiles };
|
||||
|
||||
@@ -15,8 +15,8 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
runtime: 'go1.x',
|
||||
environment: {
|
||||
SCRIPT_NAME: path.join('/', entrypoint),
|
||||
NOW_PHP_SCRIPT: path.join('user', entrypoint)
|
||||
}
|
||||
NOW_PHP_SCRIPT: path.join('user', entrypoint),
|
||||
},
|
||||
});
|
||||
|
||||
return { [entrypoint]: lambda };
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "@now/php",
|
||||
"version": "0.4.8-canary.3"
|
||||
"version": "0.4.8-canary.4",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,33 +1,34 @@
|
||||
const path = require('path')
|
||||
const fetch = require('node-fetch')
|
||||
const execa = require('execa')
|
||||
const { createWriteStream } = require('fs')
|
||||
const path = require('path');
|
||||
const fetch = require('node-fetch');
|
||||
const execa = require('execa');
|
||||
const { createWriteStream } = require('fs');
|
||||
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
||||
const url = 'https://bootstrap.pypa.io/get-pip.py'
|
||||
|
||||
const url = 'https://bootstrap.pypa.io/get-pip.py';
|
||||
|
||||
// downloads `get-pip.py` and returns its absolute path
|
||||
async function downloadGetPipScript() {
|
||||
console.log('downloading "get-pip.py"...')
|
||||
const res = await fetch(url)
|
||||
console.log('downloading "get-pip.py"...');
|
||||
const res = await fetch(url);
|
||||
|
||||
if (!res.ok || res.status !== 200) {
|
||||
throw new Error(`Could not download "get-pip.py" from "${url}"`)
|
||||
}
|
||||
|
||||
const dir = await getWritableDirectory()
|
||||
const filePath = path.join(dir, 'get-pip.py')
|
||||
const writeStream = createWriteStream(filePath)
|
||||
|
||||
throw new Error(`Could not download "get-pip.py" from "${url}"`);
|
||||
}
|
||||
|
||||
const dir = await getWritableDirectory();
|
||||
const filePath = path.join(dir, 'get-pip.py');
|
||||
const writeStream = createWriteStream(filePath);
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
res.body
|
||||
.on('error', reject)
|
||||
.pipe(writeStream)
|
||||
.on('finish', () => resolve(filePath))
|
||||
})
|
||||
.on('finish', () => resolve(filePath));
|
||||
});
|
||||
}
|
||||
|
||||
// downloads and installs `pip` (respecting
|
||||
// downloads and installs `pip` (respecting
|
||||
// process.env.PYTHONUSERBASE), and returns
|
||||
// the absolute path to it
|
||||
async function downloadAndInstallPip() {
|
||||
@@ -36,19 +37,19 @@ async function downloadAndInstallPip() {
|
||||
// installed to. `--user` will assume `~` if this
|
||||
// is not set, and `~` is not writeable on AWS Lambda.
|
||||
// let's refuse to proceed
|
||||
throw new Error('Could not install "pip": "PYTHONUSERBASE" env var is not set')
|
||||
throw new Error('Could not install "pip": "PYTHONUSERBASE" env var is not set');
|
||||
}
|
||||
const getPipFilePath = await downloadGetPipScript()
|
||||
const getPipFilePath = await downloadGetPipScript();
|
||||
|
||||
console.log('runing "python get-pip.py"...')
|
||||
console.log('runing "python get-pip.py"...');
|
||||
try {
|
||||
await execa('python3', [getPipFilePath, '--user'], {stdio: 'inherit'})
|
||||
await execa('python3', [getPipFilePath, '--user'], { stdio: 'inherit' });
|
||||
} catch (err) {
|
||||
console.log('could not install pip')
|
||||
throw err
|
||||
console.log('could not install pip');
|
||||
throw err;
|
||||
}
|
||||
|
||||
return path.join(process.env.PYTHONUSERBASE, 'bin', 'pip')
|
||||
return path.join(process.env.PYTHONUSERBASE, 'bin', 'pip');
|
||||
}
|
||||
|
||||
module.exports = downloadAndInstallPip
|
||||
module.exports = downloadAndInstallPip;
|
||||
|
||||
@@ -1,77 +1,78 @@
|
||||
const path = require('path')
|
||||
const execa = require('execa')
|
||||
const { readFile, writeFile } = require('fs.promised')
|
||||
const path = require('path');
|
||||
const execa = require('execa');
|
||||
const { readFile, writeFile } = require('fs.promised');
|
||||
const getWritableDirectory = require('@now/build-utils/fs/get-writable-directory.js');
|
||||
const download = require('@now/build-utils/fs/download.js')
|
||||
const downloadAndInstallPip = require('./download-and-install-pip')
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const { createLambda } = require('@now/build-utils/lambda.js');
|
||||
const downloadAndInstallPip = require('./download-and-install-pip');
|
||||
|
||||
async function pipInstall(pipPath, srcDir, ...args) {
|
||||
console.log(`running "pip install -t ${srcDir} ${args.join(' ')}"...`)
|
||||
console.log(`running "pip install -t ${srcDir} ${args.join(' ')}"...`);
|
||||
try {
|
||||
await execa(
|
||||
pipPath,
|
||||
[
|
||||
'install',
|
||||
'-t', srcDir,
|
||||
...args
|
||||
...args,
|
||||
],
|
||||
{stdio: 'inherit'}
|
||||
)
|
||||
{ stdio: 'inherit' },
|
||||
);
|
||||
} catch (err) {
|
||||
console.log(`failed to run "pip install -t ${srcDir} ${args.join(' ')}"`)
|
||||
throw err
|
||||
console.log(`failed to run "pip install -t ${srcDir} ${args.join(' ')}"`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
console.log('downloading files...')
|
||||
exports.build = async ({ files, entrypoint }) => {
|
||||
console.log('downloading files...');
|
||||
|
||||
const srcDir = await getWritableDirectory()
|
||||
const srcDir = await getWritableDirectory();
|
||||
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
files = await download(files, srcDir);
|
||||
|
||||
files = await download(files, srcDir)
|
||||
|
||||
// this is where `pip` will be installed to
|
||||
// we need it to be under `/tmp`
|
||||
const pyUserBase = await getWritableDirectory()
|
||||
process.env.PYTHONUSERBASE = pyUserBase
|
||||
const pyUserBase = await getWritableDirectory();
|
||||
process.env.PYTHONUSERBASE = pyUserBase;
|
||||
|
||||
const pipPath = await downloadAndInstallPip()
|
||||
const pipPath = await downloadAndInstallPip();
|
||||
|
||||
await pipInstall(pipPath, srcDir, 'requests')
|
||||
await pipInstall(pipPath, srcDir, 'requests');
|
||||
|
||||
if (files['requirements.txt']) {
|
||||
console.log('found "requirements.txt"')
|
||||
console.log('found "requirements.txt"');
|
||||
|
||||
const requirementsTxtPath = files['requirements.txt'].fsPath
|
||||
await pipInstall(pipPath, srcDir, '-r', requirementsTxtPath)
|
||||
const requirementsTxtPath = files['requirements.txt'].fsPath;
|
||||
await pipInstall(pipPath, srcDir, '-r', requirementsTxtPath);
|
||||
}
|
||||
|
||||
const originalNowHandlerPyContents = await readFile(path.join(__dirname, 'now_handler.py'), 'utf8')
|
||||
const originalNowHandlerPyContents = await readFile(path.join(__dirname, 'now_handler.py'), 'utf8');
|
||||
// will be used on `from $here import handler`
|
||||
// for example, `from api.users import handler`
|
||||
console.log('entrypoint is', entrypoint)
|
||||
const userHandlerFilePath = entrypoint.replace(/\//g, '.').replace(/\.py$/, '')
|
||||
console.log('entrypoint is', entrypoint);
|
||||
const userHandlerFilePath = entrypoint.replace(/\//g, '.').replace(/\.py$/, '');
|
||||
const nowHandlerPyContents = originalNowHandlerPyContents.replace(
|
||||
'__NOW_HANDLER_FILENAME',
|
||||
userHandlerFilePath
|
||||
)
|
||||
userHandlerFilePath,
|
||||
);
|
||||
|
||||
// in order to allow the user to have `server.py`, we need our `server.py` to be called
|
||||
// somethig else
|
||||
const nowHandlerPyFilename = 'now__handler__python'
|
||||
const nowHandlerPyFilename = 'now__handler__python';
|
||||
|
||||
await writeFile(path.join(srcDir, nowHandlerPyFilename + '.py'), nowHandlerPyContents)
|
||||
await writeFile(path.join(srcDir, `${nowHandlerPyFilename}.py`), nowHandlerPyContents);
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', srcDir),
|
||||
handler: nowHandlerPyFilename + '.now_handler',
|
||||
handler: `${nowHandlerPyFilename}.now_handler`,
|
||||
runtime: 'python3.6',
|
||||
environment: {}
|
||||
})
|
||||
environment: {},
|
||||
});
|
||||
|
||||
return {
|
||||
[entrypoint]: lambda
|
||||
}
|
||||
}
|
||||
[entrypoint]: lambda,
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
{
|
||||
"name": "@now/python",
|
||||
"version": "0.0.37-canary.3",
|
||||
"version": "0.0.37-canary.4",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"execa": "^1.0.0",
|
||||
"fs.promised": "^3.0.0",
|
||||
"node-fetch": "^2.2.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,10 @@
|
||||
const download = require('@now/build-utils/fs/download.js');
|
||||
const glob = require('@now/build-utils/fs/glob.js');
|
||||
const path = require('path');
|
||||
const { runNpmInstall, runPackageJsonScript,
|
||||
runShellScript } = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
const {
|
||||
runNpmInstall, runPackageJsonScript,
|
||||
runShellScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js');
|
||||
|
||||
exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
console.log('downloading user files...');
|
||||
@@ -13,17 +15,16 @@ exports.build = async ({ files, entrypoint, workPath }) => {
|
||||
const distPath = path.join(workPath, path.dirname(entrypoint), 'dist');
|
||||
|
||||
if (path.basename(entrypoint) === 'package.json') {
|
||||
await runNpmInstall(entrypointFsDirname, [ '--prefer-offline' ]);
|
||||
await runNpmInstall(entrypointFsDirname, ['--prefer-offline']);
|
||||
if (await runPackageJsonScript(entrypointFsDirname, 'now-build')) {
|
||||
return await glob('**', distPath, mountpoint);
|
||||
} else {
|
||||
throw new Error(`An error running "now-build" script in "${entrypoint}"`);
|
||||
return glob('**', distPath, mountpoint);
|
||||
}
|
||||
throw new Error(`An error running "now-build" script in "${entrypoint}"`);
|
||||
}
|
||||
|
||||
if (path.extname(entrypoint) === '.sh') {
|
||||
await runShellScript(path.join(workPath, entrypoint));
|
||||
return await glob('**', distPath, mountpoint);
|
||||
return glob('**', distPath, mountpoint);
|
||||
}
|
||||
|
||||
return {};
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
{
|
||||
"name": "@now/static-build",
|
||||
"version": "0.4.12-canary.3"
|
||||
"version": "0.4.12-canary.4",
|
||||
"peerDependencies": {
|
||||
"@now/build-utils": ">=0.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user