mirror of
https://github.com/LukeHagar/vercel.git
synced 2025-12-24 03:39:11 +00:00
Compare commits
85 Commits
@now/node-
...
@now/next@
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0da7197c3e | ||
|
|
950a4e98e9 | ||
|
|
8258ede23f | ||
|
|
77f84fe2aa | ||
|
|
5c4b946864 | ||
|
|
dfc51ad97f | ||
|
|
d32afc8332 | ||
|
|
9d1263ccc2 | ||
|
|
7bf2cfb3dc | ||
|
|
9b37460c4f | ||
|
|
b7f8b37ca6 | ||
|
|
13aa1b2d1c | ||
|
|
92437c075e | ||
|
|
331c263587 | ||
|
|
7d4f6f636b | ||
|
|
5e90ef8e34 | ||
|
|
4885d680a7 | ||
|
|
97cbe0b894 | ||
|
|
301eea90ee | ||
|
|
ea4f9dd930 | ||
|
|
38928ab942 | ||
|
|
bfb67d10ec | ||
|
|
616bad8a3d | ||
|
|
e026ddf805 | ||
|
|
bec9ea101f | ||
|
|
54f3f755fb | ||
|
|
5b03109ba7 | ||
|
|
7ff9e810ff | ||
|
|
3036aff45e | ||
|
|
c366aa69a4 | ||
|
|
c8d225522d | ||
|
|
8ee5063669 | ||
|
|
9372e70747 | ||
|
|
4a4bd550a1 | ||
|
|
f53343d547 | ||
|
|
e4ed811b53 | ||
|
|
e9935dee31 | ||
|
|
2e1e6bb131 | ||
|
|
4a01ac4bd0 | ||
|
|
bd1a7c428f | ||
|
|
9a4a3dac47 | ||
|
|
4f2c35a0ee | ||
|
|
672df5d026 | ||
|
|
8cb648abc4 | ||
|
|
74f658c634 | ||
|
|
efbb54a232 | ||
|
|
3e2bd03e01 | ||
|
|
8dc92b70b9 | ||
|
|
4267be4e5a | ||
|
|
43ba6459eb | ||
|
|
8c5638915d | ||
|
|
3fab247c15 | ||
|
|
6ab0e2e9ab | ||
|
|
34369148d7 | ||
|
|
662ad1ed3a | ||
|
|
890cd74ee5 | ||
|
|
7ef616b31e | ||
|
|
bebcfa4bb5 | ||
|
|
25100c53aa | ||
|
|
fe20da87e7 | ||
|
|
18cb147c86 | ||
|
|
9c9e18586f | ||
|
|
0cd7192740 | ||
|
|
a2d9c4fb4b | ||
|
|
02fafd2ebc | ||
|
|
42577c915c | ||
|
|
73db9e11dd | ||
|
|
3125125c16 | ||
|
|
5335291408 | ||
|
|
36620559f9 | ||
|
|
360ea3a609 | ||
|
|
1cd362126c | ||
|
|
ae19fe95f6 | ||
|
|
3e34d402a2 | ||
|
|
cc7b97fbbb | ||
|
|
c1049985af | ||
|
|
214388ccf3 | ||
|
|
b1d6b7bfc0 | ||
|
|
ece3564dfd | ||
|
|
a88af1f077 | ||
|
|
d92f7b26c0 | ||
|
|
52198af750 | ||
|
|
d58bff2453 | ||
|
|
8c0a144ae4 | ||
|
|
106e4d5f36 |
@@ -20,12 +20,12 @@ jobs:
|
||||
- run:
|
||||
name: Bootstrapping
|
||||
command: yarn bootstrap
|
||||
- run:
|
||||
name: Linting
|
||||
command: yarn lint
|
||||
- run:
|
||||
name: Building
|
||||
command: ./.circleci/build.sh
|
||||
- run:
|
||||
name: Linting
|
||||
command: yarn lint
|
||||
- run:
|
||||
name: Tests
|
||||
command: yarn test
|
||||
|
||||
@@ -2,3 +2,6 @@
|
||||
/node_modules/*
|
||||
/**/node_modules/*
|
||||
/packages/now-go/go/*
|
||||
/packages/now-build-utils/dist/*
|
||||
/packages/now-node/dist/*
|
||||
/packages/now-node-bridge/*
|
||||
|
||||
3
.prettierrc.json
Normal file
3
.prettierrc.json
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"singleQuote": true
|
||||
}
|
||||
@@ -29,11 +29,13 @@ npm install next --save
|
||||
```js
|
||||
module.exports = {
|
||||
target: 'serverless'
|
||||
// Other options are still valid
|
||||
// Other options
|
||||
}
|
||||
```
|
||||
|
||||
4. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||
4. Remove `distDir` from `next.config.js` as `@now/next` can't parse this file and expects your build output at `/.next`
|
||||
|
||||
5. Optionally make sure the `"src"` in `"builds"` points to your application `package.json`
|
||||
|
||||
```js
|
||||
{
|
||||
|
||||
@@ -14,14 +14,15 @@
|
||||
"bootstrap": "lerna bootstrap",
|
||||
"publish-stable": "lerna version",
|
||||
"publish-canary": "lerna version prerelease --preid canary",
|
||||
"lint": "tsc && eslint .",
|
||||
"build": "./.circleci/build.sh",
|
||||
"lint": "eslint .",
|
||||
"test": "jest --runInBand --verbose",
|
||||
"lint-staged": "lint-staged"
|
||||
},
|
||||
"pre-commit": "lint-staged",
|
||||
"lint-staged": {
|
||||
"*.js": [
|
||||
"prettier --write --single-quote",
|
||||
"prettier --write",
|
||||
"eslint --fix",
|
||||
"git add"
|
||||
]
|
||||
@@ -43,7 +44,6 @@
|
||||
"lint-staged": "^8.0.4",
|
||||
"node-fetch": "^2.3.0",
|
||||
"pre-commit": "^1.2.2",
|
||||
"prettier": "^1.15.2",
|
||||
"typescript": "^3.1.6"
|
||||
"prettier": "^1.15.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -12,9 +12,10 @@ exports.config = {
|
||||
|
||||
exports.analyze = ({ files, entrypoint }) => files[entrypoint].digest;
|
||||
|
||||
exports.build = async ({ files, entrypoint, config }) => {
|
||||
exports.build = async ({
|
||||
workPath, files, entrypoint, config,
|
||||
}) => {
|
||||
const srcDir = await getWritableDirectory();
|
||||
const workDir = await getWritableDirectory();
|
||||
|
||||
console.log('downloading files...');
|
||||
await download(files, srcDir);
|
||||
@@ -24,7 +25,7 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
return o;
|
||||
}, {});
|
||||
|
||||
const IMPORT_CACHE = `${workDir}/.import-cache`;
|
||||
const IMPORT_CACHE = `${workPath}/.import-cache`;
|
||||
const env = Object.assign({}, process.env, configEnv, {
|
||||
PATH: `${IMPORT_CACHE}/bin:${process.env.PATH}`,
|
||||
IMPORT_CACHE,
|
||||
@@ -37,12 +38,12 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
|
||||
await execa(builderPath, [entrypoint], {
|
||||
env,
|
||||
cwd: workDir,
|
||||
cwd: workPath,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
|
||||
const lambda = await createLambda({
|
||||
files: await glob('**', workDir),
|
||||
files: await glob('**', workPath),
|
||||
handler: entrypoint, // not actually used in `bootstrap`
|
||||
runtime: 'provided',
|
||||
environment: Object.assign({}, configEnv, {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/bash",
|
||||
"version": "0.1.2",
|
||||
"version": "0.1.5-canary.0",
|
||||
"description": "Now 2.0 builder for HTTP endpoints written in Bash",
|
||||
"main": "index.js",
|
||||
"author": "Nathan Rajlich <nate@zeit.co>",
|
||||
|
||||
@@ -54,9 +54,10 @@ _lambda_runtime_next() {
|
||||
local stdin
|
||||
stdin="$(mktemp -u)"
|
||||
mkfifo "$stdin"
|
||||
_lambda_runtime_body "$event" > "$stdin" &
|
||||
_lambda_runtime_body < "$event" > "$stdin" &
|
||||
|
||||
handler "$event" < "$stdin" > "$body" || exit_code="$?"
|
||||
|
||||
rm -f "$event" "$stdin"
|
||||
|
||||
if [ "$exit_code" -eq 0 ]; then
|
||||
@@ -74,12 +75,14 @@ _lambda_runtime_next() {
|
||||
}
|
||||
|
||||
_lambda_runtime_body() {
|
||||
if [ "$(jq --raw-output '.body | type' < "$1")" = "string" ]; then
|
||||
if [ "$(jq --raw-output '.encoding' < "$1")" = "base64" ]; then
|
||||
jq --raw-output '.body' < "$1" | base64 -d
|
||||
local event
|
||||
event="$(cat)"
|
||||
if [ "$(jq --raw-output '.body | type' <<< "$event")" = "string" ]; then
|
||||
if [ "$(jq --raw-output '.encoding' <<< "$event")" = "base64" ]; then
|
||||
jq --raw-output '.body' <<< "$event" | base64 --decode
|
||||
else
|
||||
# assume plain-text body
|
||||
jq --raw-output '.body' < "$1"
|
||||
jq --raw-output '.body' <<< "$event"
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
1
packages/now-build-utils/.gitignore
vendored
Normal file
1
packages/now-build-utils/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
dist
|
||||
@@ -1,2 +1,3 @@
|
||||
/src
|
||||
/test
|
||||
tmp
|
||||
@@ -1,33 +1 @@
|
||||
const assert = require('assert');
|
||||
const intoStream = require('into-stream');
|
||||
|
||||
class FileBlob {
|
||||
constructor({ mode = 0o100644, data }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
||||
this.type = 'FileBlob';
|
||||
this.mode = mode;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
static async fromStream({ mode = 0o100644, stream }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
const chunks = [];
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
|
||||
stream.on('error', error => reject(error));
|
||||
stream.on('end', () => resolve());
|
||||
});
|
||||
|
||||
const data = Buffer.concat(chunks);
|
||||
return new FileBlob({ mode, data });
|
||||
}
|
||||
|
||||
toStream() {
|
||||
return intoStream(this.data);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileBlob;
|
||||
module.exports = require('./dist/index').FileBlob;
|
||||
|
||||
@@ -1,100 +1 @@
|
||||
const assert = require('assert');
|
||||
const fs = require('fs-extra');
|
||||
const multiStream = require('multistream');
|
||||
const path = require('path');
|
||||
const Sema = require('async-sema');
|
||||
|
||||
/** @typedef {{[filePath: string]: FileFsRef}} FsFiles */
|
||||
|
||||
const semaToPreventEMFILE = new Sema(30);
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
* @argument {string} options.fsPath
|
||||
*/
|
||||
class FileFsRef {
|
||||
constructor({ mode = 0o100644, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof fsPath === 'string');
|
||||
/** @type {string} */
|
||||
this.type = 'FileFsRef';
|
||||
/** @type {number} */
|
||||
this.mode = mode;
|
||||
/** @type {string} */
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a `FileFsRef` with the correct `mode` from the file system.
|
||||
*
|
||||
* @argument {Object} options
|
||||
* @argument {string} options.fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
static async fromFsPath({ fsPath }) {
|
||||
const { mode } = await fs.lstat(fsPath);
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
* @argument {NodeJS.ReadableStream} options.stream
|
||||
* @argument {string} options.fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
static async fromStream({ mode = 0o100644, stream, fsPath }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
assert(typeof fsPath === 'string');
|
||||
await fs.mkdirp(path.dirname(fsPath));
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const dest = fs.createWriteStream(fsPath);
|
||||
stream.pipe(dest);
|
||||
stream.on('error', reject);
|
||||
dest.on('finish', resolve);
|
||||
dest.on('error', reject);
|
||||
});
|
||||
|
||||
await fs.chmod(fsPath, mode.toString(8).slice(-3));
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<NodeJS.ReadableStream>}
|
||||
*/
|
||||
async toStreamAsync() {
|
||||
await semaToPreventEMFILE.acquire();
|
||||
const release = () => semaToPreventEMFILE.release();
|
||||
const stream = fs.createReadStream(this.fsPath);
|
||||
stream.on('close', release);
|
||||
stream.on('error', release);
|
||||
return stream;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {NodeJS.ReadableStream}
|
||||
*/
|
||||
toStream() {
|
||||
let flag;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return multiStream((cb) => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileFsRef;
|
||||
module.exports = require('./dist/index').FileFsRef;
|
||||
|
||||
@@ -1,96 +1 @@
|
||||
const assert = require('assert');
|
||||
const fetch = require('node-fetch');
|
||||
const multiStream = require('multistream');
|
||||
const retry = require('async-retry');
|
||||
const Sema = require('async-sema');
|
||||
|
||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
||||
|
||||
const semaToDownloadFromS3 = new Sema(10);
|
||||
|
||||
class BailableError extends Error {
|
||||
constructor(...args) {
|
||||
super(...args);
|
||||
/** @type {boolean} */
|
||||
this.bail = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @constructor
|
||||
* @argument {Object} options
|
||||
* @argument {number} [options.mode=0o100644]
|
||||
* @argument {string} options.digest
|
||||
*/
|
||||
class FileRef {
|
||||
constructor({ mode = 0o100644, digest }) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof digest === 'string');
|
||||
/** @type {string} */
|
||||
this.type = 'FileRef';
|
||||
/** @type {number} */
|
||||
this.mode = mode;
|
||||
/** @type {string} */
|
||||
this.digest = digest;
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {Promise<NodeJS.ReadableStream>}
|
||||
*/
|
||||
async toStreamAsync() {
|
||||
let url;
|
||||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
||||
const digestParts = this.digest.split(':');
|
||||
if (digestParts[0] === 'sha') {
|
||||
// url = `https://s3.amazonaws.com/now-files/${digestParts[1]}`;
|
||||
url = `https://dmmcy0pwk6bqi.cloudfront.net/${digestParts[1]}`;
|
||||
}
|
||||
|
||||
assert(url);
|
||||
|
||||
await semaToDownloadFromS3.acquire();
|
||||
// console.time(`downloading ${url}`);
|
||||
try {
|
||||
return await retry(
|
||||
async () => {
|
||||
const resp = await fetch(url);
|
||||
if (!resp.ok) {
|
||||
const error = new BailableError(
|
||||
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
||||
);
|
||||
if (resp.status === 403) error.bail = true;
|
||||
throw error;
|
||||
}
|
||||
return resp.body;
|
||||
},
|
||||
{ factor: 1, retries: 3 },
|
||||
);
|
||||
} finally {
|
||||
// console.timeEnd(`downloading ${url}`);
|
||||
semaToDownloadFromS3.release();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns {NodeJS.ReadableStream}
|
||||
*/
|
||||
toStream() {
|
||||
let flag;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return multiStream((cb) => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FileRef;
|
||||
module.exports = require('./dist/index').FileRef;
|
||||
|
||||
@@ -1,38 +1 @@
|
||||
const path = require('path');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
/** @typedef {import('../file-ref')} FileRef */
|
||||
/** @typedef {import('../file-fs-ref')} FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
/** @typedef {{[filePath: string]: FileFsRef}|{}} DownloadedFiles */
|
||||
|
||||
/**
|
||||
* @param {FileRef|FileFsRef} file
|
||||
* @param {string} fsPath
|
||||
* @returns {Promise<FileFsRef>}
|
||||
*/
|
||||
async function downloadFile(file, fsPath) {
|
||||
const { mode } = file;
|
||||
const stream = file.toStream();
|
||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||
}
|
||||
|
||||
/**
|
||||
* Download files to disk
|
||||
* @argument {Files} files
|
||||
* @argument {string} basePath
|
||||
* @returns {Promise<DownloadedFiles>}
|
||||
*/
|
||||
module.exports = async function download(files, basePath) {
|
||||
const files2 = {};
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(files).map(async (name) => {
|
||||
const file = files[name];
|
||||
const fsPath = path.join(basePath, name);
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}),
|
||||
);
|
||||
|
||||
return files2;
|
||||
};
|
||||
module.exports = require('../dist/fs/download').default;
|
||||
|
||||
@@ -1,12 +1 @@
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const prod = process.env.AWS_EXECUTION_ENV || process.env.X_GOOGLE_CODE_LOCATION;
|
||||
const TMP_PATH = prod ? '/tmp' : path.join(__dirname, 'tmp');
|
||||
|
||||
module.exports = async function getWritableDirectory() {
|
||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||
const directory = path.join(TMP_PATH, name);
|
||||
await fs.mkdirp(directory);
|
||||
return directory;
|
||||
};
|
||||
module.exports = require('../dist/fs/get-writable-directory').default;
|
||||
|
||||
@@ -1,67 +1 @@
|
||||
const assert = require('assert');
|
||||
const path = require('path');
|
||||
const vanillaGlob = require('glob');
|
||||
const FileFsRef = require('../file-fs-ref.js');
|
||||
|
||||
/** @typedef {import('fs').Stats} Stats */
|
||||
/** @typedef {import('glob').IOptions} GlobOptions */
|
||||
/** @typedef {import('../file-fs-ref').FsFiles|{}} GlobFiles */
|
||||
|
||||
/**
|
||||
* @argument {string} pattern
|
||||
* @argument {GlobOptions|string} opts
|
||||
* @argument {string} [mountpoint]
|
||||
* @returns {Promise<GlobFiles>}
|
||||
*/
|
||||
module.exports = function glob(pattern, opts = {}, mountpoint) {
|
||||
return new Promise((resolve, reject) => {
|
||||
/** @type {GlobOptions} */
|
||||
let options;
|
||||
if (typeof opts === 'string') {
|
||||
options = { cwd: opts };
|
||||
} else {
|
||||
options = opts;
|
||||
}
|
||||
|
||||
if (!options.cwd) {
|
||||
throw new Error(
|
||||
'Second argument (basePath) must be specified for names of resulting files',
|
||||
);
|
||||
}
|
||||
|
||||
if (!path.isAbsolute(options.cwd)) {
|
||||
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`);
|
||||
}
|
||||
|
||||
options.statCache = {};
|
||||
options.stat = true;
|
||||
options.dot = true;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
vanillaGlob(pattern, options, (error, files) => {
|
||||
if (error) return reject(error);
|
||||
|
||||
resolve(
|
||||
files.reduce((files2, relativePath) => {
|
||||
const fsPath = path.join(options.cwd, relativePath);
|
||||
/** @type {Stats|any} */
|
||||
const stat = options.statCache[fsPath];
|
||||
assert(
|
||||
stat,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
||||
);
|
||||
if (stat && stat.isFile()) {
|
||||
let finalPath = relativePath;
|
||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
||||
return {
|
||||
...files2,
|
||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
||||
};
|
||||
}
|
||||
|
||||
return files2;
|
||||
}, {}),
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
module.exports = require('../dist/fs/glob').default;
|
||||
|
||||
@@ -1,25 +1 @@
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
|
||||
/**
|
||||
* @callback delegate
|
||||
* @argument {string} name
|
||||
* @returns {string}
|
||||
*/
|
||||
|
||||
/**
|
||||
* Rename files using delegate function
|
||||
* @argument {Files} files
|
||||
* @argument {delegate} delegate
|
||||
* @returns {Files}
|
||||
*/
|
||||
module.exports = function rename(files, delegate) {
|
||||
return Object.keys(files).reduce(
|
||||
(newFiles, name) => ({
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}),
|
||||
{},
|
||||
);
|
||||
};
|
||||
module.exports = require('../dist/fs/rename').default;
|
||||
|
||||
@@ -1,96 +1 @@
|
||||
const assert = require('assert');
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
function spawnAsync(command, args, cwd) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd });
|
||||
child.on('error', reject);
|
||||
child.on('close', (code, signal) => (code !== 0
|
||||
? reject(new Error(`Exited with ${code || signal}`))
|
||||
: resolve()));
|
||||
});
|
||||
}
|
||||
|
||||
async function runShellScript(fsPath) {
|
||||
assert(path.isAbsolute(fsPath));
|
||||
const destPath = path.dirname(fsPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function scanParentDirs(destPath, scriptName) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let hasScript = false;
|
||||
let hasPackageLockJson = false;
|
||||
let currentDestPath = destPath;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const packageJsonPath = path.join(currentDestPath, 'package.json');
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await fs.exists(packageJsonPath)) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath));
|
||||
hasScript = Boolean(
|
||||
packageJson.scripts && scriptName && packageJson.scripts[scriptName],
|
||||
);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
hasPackageLockJson = await fs.exists(
|
||||
path.join(currentDestPath, 'package-lock.json'),
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
const newDestPath = path.dirname(currentDestPath);
|
||||
if (currentDestPath === newDestPath) break;
|
||||
currentDestPath = newDestPath;
|
||||
}
|
||||
|
||||
return { hasScript, hasPackageLockJson };
|
||||
}
|
||||
|
||||
async function installDependencies(destPath, args = []) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let commandArgs = args;
|
||||
console.log(`installing to ${destPath}`);
|
||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath);
|
||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath);
|
||||
} else {
|
||||
await spawnAsync('yarn', ['--cwd', destPath].concat(commandArgs), destPath);
|
||||
await spawnAsync('yarn', ['cache', 'clean'], destPath);
|
||||
}
|
||||
}
|
||||
|
||||
async function runPackageJsonScript(destPath, scriptName) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
||||
destPath,
|
||||
scriptName,
|
||||
);
|
||||
if (!hasScript) return false;
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
console.log(`running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
runShellScript,
|
||||
installDependencies,
|
||||
runNpmInstall: installDependencies,
|
||||
runPackageJsonScript,
|
||||
};
|
||||
module.exports = require('../dist/fs/run-user-scripts');
|
||||
|
||||
@@ -1,4 +1 @@
|
||||
const fastStreamToBuffer = require('fast-stream-to-buffer');
|
||||
const { promisify } = require('util');
|
||||
|
||||
module.exports = promisify(fastStreamToBuffer);
|
||||
module.exports = require('../dist/fs/stream-to-buffer').default;
|
||||
|
||||
@@ -1,60 +1 @@
|
||||
const assert = require('assert');
|
||||
const Sema = require('async-sema');
|
||||
const { ZipFile } = require('yazl');
|
||||
const streamToBuffer = require('./fs/stream-to-buffer.js');
|
||||
|
||||
class Lambda {
|
||||
constructor({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
}) {
|
||||
this.type = 'Lambda';
|
||||
this.zipBuffer = zipBuffer;
|
||||
this.handler = handler;
|
||||
this.runtime = runtime;
|
||||
this.environment = environment;
|
||||
}
|
||||
}
|
||||
|
||||
const sema = new Sema(10);
|
||||
const mtime = new Date(1540000000000);
|
||||
|
||||
async function createLambda({
|
||||
files, handler, runtime, environment = {},
|
||||
}) {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
assert(typeof handler === 'string', '"handler" is not a string');
|
||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||
assert(typeof environment === 'object', '"environment" is not an object');
|
||||
|
||||
await sema.acquire();
|
||||
try {
|
||||
const zipFile = new ZipFile();
|
||||
const zipBuffer = await new Promise((resolve, reject) => {
|
||||
Object.keys(files)
|
||||
.sort()
|
||||
.forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream();
|
||||
stream.on('error', reject);
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
|
||||
zipFile.end();
|
||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
||||
});
|
||||
|
||||
return new Lambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
environment,
|
||||
});
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Lambda,
|
||||
createLambda,
|
||||
};
|
||||
module.exports = require('./dist/index');
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
{
|
||||
"name": "@now/build-utils",
|
||||
"version": "0.4.35-canary.2",
|
||||
"version": "0.4.37-canary.3",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index.js",
|
||||
"types": "./dist/index.d.js",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
@@ -10,16 +12,26 @@
|
||||
"dependencies": {
|
||||
"async-retry": "1.2.3",
|
||||
"async-sema": "2.1.4",
|
||||
"fast-stream-to-buffer": "1.0.0",
|
||||
"end-of-stream": "^1.4.1",
|
||||
"fs-extra": "7.0.0",
|
||||
"glob": "7.1.3",
|
||||
"into-stream": "4.0.0",
|
||||
"into-stream": "5.0.0",
|
||||
"memory-fs": "0.4.1",
|
||||
"multistream": "2.1.1",
|
||||
"node-fetch": "2.2.0",
|
||||
"yazl": "2.4.3"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
"build": "tsc",
|
||||
"test": "tsc && jest",
|
||||
"prepublish": "tsc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/async-retry": "^1.2.1",
|
||||
"@types/end-of-stream": "^1.4.0",
|
||||
"@types/fs-extra": "^5.0.5",
|
||||
"@types/node-fetch": "^2.1.6",
|
||||
"@types/yazl": "^2.4.1",
|
||||
"typescript": "3.3.4000"
|
||||
}
|
||||
}
|
||||
|
||||
27
packages/now-build-utils/src/download.ts
Normal file
27
packages/now-build-utils/src/download.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import path from 'path';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import { File, Files } from './types';
|
||||
|
||||
export interface DownloadedFiles {
|
||||
[filePath: string]: FileFsRef
|
||||
}
|
||||
|
||||
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||
const { mode } = file;
|
||||
const stream = file.toStream();
|
||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||
}
|
||||
|
||||
export default async function download(files: Files, basePath: string): Promise<DownloadedFiles> {
|
||||
const files2: DownloadedFiles = {};
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(files).map(async (name) => {
|
||||
const file = files[name];
|
||||
const fsPath = path.join(basePath, name);
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}),
|
||||
);
|
||||
|
||||
return files2;
|
||||
};
|
||||
46
packages/now-build-utils/src/file-blob.ts
Normal file
46
packages/now-build-utils/src/file-blob.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
import assert from 'assert';
|
||||
import intoStream from 'into-stream';
|
||||
import { File } from './types';
|
||||
|
||||
interface FileBlobOptions {
|
||||
mode?: number;
|
||||
data: string | Buffer;
|
||||
}
|
||||
|
||||
interface FromStreamOptions {
|
||||
mode?: number;
|
||||
stream: NodeJS.ReadableStream;
|
||||
}
|
||||
|
||||
export default class FileBlob implements File {
|
||||
public type: string;
|
||||
public mode: number;
|
||||
public data: string | Buffer;
|
||||
|
||||
constructor({ mode = 0o100644, data }: FileBlobOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof data === 'string' || Buffer.isBuffer(data));
|
||||
this.type = 'FileBlob';
|
||||
this.mode = mode;
|
||||
this.data = data;
|
||||
}
|
||||
|
||||
static async fromStream({ mode = 0o100644, stream }: FromStreamOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
stream.on('data', chunk => chunks.push(Buffer.from(chunk)));
|
||||
stream.on('error', error => reject(error));
|
||||
stream.on('end', () => resolve());
|
||||
});
|
||||
|
||||
const data = Buffer.concat(chunks);
|
||||
return new FileBlob({ mode, data });
|
||||
}
|
||||
|
||||
toStream(): NodeJS.ReadableStream {
|
||||
return intoStream(this.data);
|
||||
}
|
||||
}
|
||||
89
packages/now-build-utils/src/file-fs-ref.ts
Normal file
89
packages/now-build-utils/src/file-fs-ref.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import assert from 'assert';
|
||||
import fs from 'fs-extra';
|
||||
import multiStream from 'multistream';
|
||||
import path from 'path';
|
||||
import Sema from 'async-sema';
|
||||
import { File } from './types';
|
||||
|
||||
const semaToPreventEMFILE = new Sema(30);
|
||||
|
||||
interface FileFsRefOptions {
|
||||
mode?: number;
|
||||
fsPath: string;
|
||||
}
|
||||
|
||||
interface FromOptions {
|
||||
fsPath: string;
|
||||
}
|
||||
|
||||
interface FromStreamOptions {
|
||||
mode: number;
|
||||
stream: NodeJS.ReadableStream;
|
||||
fsPath: string;
|
||||
}
|
||||
|
||||
class FileFsRef implements File {
|
||||
public type: string;
|
||||
public mode: number;
|
||||
public fsPath: string;
|
||||
|
||||
constructor({ mode = 0o100644, fsPath }: FileFsRefOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof fsPath === 'string');
|
||||
this.type = 'FileFsRef';
|
||||
this.mode = mode;
|
||||
this.fsPath = fsPath;
|
||||
}
|
||||
|
||||
static async fromFsPath({ fsPath }: FromOptions): Promise<FileFsRef> {
|
||||
const { mode } = await fs.lstat(fsPath);
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
static async fromStream({ mode = 0o100644, stream, fsPath }: FromStreamOptions): Promise<FileFsRef> {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof stream.pipe === 'function'); // is-stream
|
||||
assert(typeof fsPath === 'string');
|
||||
await fs.mkdirp(path.dirname(fsPath));
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const dest = fs.createWriteStream(fsPath);
|
||||
stream.pipe(dest);
|
||||
stream.on('error', reject);
|
||||
dest.on('finish', resolve);
|
||||
dest.on('error', reject);
|
||||
});
|
||||
|
||||
await fs.chmod(fsPath, mode.toString(8).slice(-3));
|
||||
return new FileFsRef({ mode, fsPath });
|
||||
}
|
||||
|
||||
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||
await semaToPreventEMFILE.acquire();
|
||||
const release = () => semaToPreventEMFILE.release();
|
||||
const stream = fs.createReadStream(this.fsPath);
|
||||
stream.on('close', release);
|
||||
stream.on('error', release);
|
||||
return stream;
|
||||
}
|
||||
|
||||
toStream(): NodeJS.ReadableStream {
|
||||
let flag = false;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return multiStream((cb) => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export = FileFsRef;
|
||||
88
packages/now-build-utils/src/file-ref.ts
Normal file
88
packages/now-build-utils/src/file-ref.ts
Normal file
@@ -0,0 +1,88 @@
|
||||
import assert from 'assert';
|
||||
import fetch from 'node-fetch';
|
||||
import multiStream from 'multistream';
|
||||
import retry from 'async-retry';
|
||||
import Sema from 'async-sema';
|
||||
import { File } from './types';
|
||||
|
||||
interface FileRefOptions {
|
||||
mode?: number;
|
||||
digest: string;
|
||||
}
|
||||
|
||||
const semaToDownloadFromS3 = new Sema(10);
|
||||
|
||||
class BailableError extends Error {
|
||||
public bail: boolean;
|
||||
|
||||
constructor(...args: string[]) {
|
||||
super(...args);
|
||||
this.bail = false;
|
||||
}
|
||||
}
|
||||
|
||||
export default class FileRef implements File {
|
||||
public type: string;
|
||||
public mode: number;
|
||||
public digest: string;
|
||||
|
||||
constructor({ mode = 0o100644, digest }: FileRefOptions) {
|
||||
assert(typeof mode === 'number');
|
||||
assert(typeof digest === 'string');
|
||||
this.type = 'FileRef';
|
||||
this.mode = mode;
|
||||
this.digest = digest;
|
||||
}
|
||||
|
||||
async toStreamAsync(): Promise<NodeJS.ReadableStream> {
|
||||
let url = '';
|
||||
// sha:24be087eef9fac01d61b30a725c1a10d7b45a256
|
||||
const digestParts = this.digest.split(':');
|
||||
if (digestParts[0] === 'sha') {
|
||||
// url = `https://s3.amazonaws.com/now-files/${digestParts[1]}`;
|
||||
url = `https://dmmcy0pwk6bqi.cloudfront.net/${digestParts[1]}`;
|
||||
} else {
|
||||
throw new Error('Expected digest to be sha');
|
||||
}
|
||||
|
||||
await semaToDownloadFromS3.acquire();
|
||||
// console.time(`downloading ${url}`);
|
||||
try {
|
||||
return await retry(
|
||||
async () => {
|
||||
const resp = await fetch(url);
|
||||
if (!resp.ok) {
|
||||
const error = new BailableError(
|
||||
`download: ${resp.status} ${resp.statusText} for ${url}`,
|
||||
);
|
||||
if (resp.status === 403) error.bail = true;
|
||||
throw error;
|
||||
}
|
||||
return resp.body;
|
||||
},
|
||||
{ factor: 1, retries: 3 },
|
||||
);
|
||||
} finally {
|
||||
// console.timeEnd(`downloading ${url}`);
|
||||
semaToDownloadFromS3.release();
|
||||
}
|
||||
}
|
||||
|
||||
toStream(): NodeJS.ReadableStream {
|
||||
let flag = false;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
return multiStream((cb) => {
|
||||
if (flag) return cb(null, null);
|
||||
flag = true;
|
||||
|
||||
this.toStreamAsync()
|
||||
.then((stream) => {
|
||||
cb(null, stream);
|
||||
})
|
||||
.catch((error) => {
|
||||
cb(error, null);
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
27
packages/now-build-utils/src/fs/download.ts
Normal file
27
packages/now-build-utils/src/fs/download.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import path from 'path';
|
||||
import FileFsRef from '../file-fs-ref';
|
||||
import { File, Files } from '../types';
|
||||
|
||||
export interface DownloadedFiles {
|
||||
[filePath: string]: FileFsRef
|
||||
}
|
||||
|
||||
async function downloadFile(file: File, fsPath: string): Promise<FileFsRef> {
|
||||
const { mode } = file;
|
||||
const stream = file.toStream();
|
||||
return FileFsRef.fromStream({ mode, stream, fsPath });
|
||||
}
|
||||
|
||||
export default async function download(files: Files, basePath: string): Promise<DownloadedFiles> {
|
||||
const files2: DownloadedFiles = {};
|
||||
|
||||
await Promise.all(
|
||||
Object.keys(files).map(async (name) => {
|
||||
const file = files[name];
|
||||
const fsPath = path.join(basePath, name);
|
||||
files2[name] = await downloadFile(file, fsPath);
|
||||
}),
|
||||
);
|
||||
|
||||
return files2;
|
||||
};
|
||||
10
packages/now-build-utils/src/fs/get-writable-directory.ts
Normal file
10
packages/now-build-utils/src/fs/get-writable-directory.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
import { join } from 'path';
|
||||
import { tmpdir } from 'os';
|
||||
import { mkdirp } from 'fs-extra';
|
||||
|
||||
export default async function getWritableDirectory() {
|
||||
const name = Math.floor(Math.random() * 0x7fffffff).toString(16);
|
||||
const directory = join(tmpdir(), name);
|
||||
await mkdirp(directory);
|
||||
return directory;
|
||||
}
|
||||
61
packages/now-build-utils/src/fs/glob.ts
Normal file
61
packages/now-build-utils/src/fs/glob.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import assert from 'assert';
|
||||
import path from 'path';
|
||||
import vanillaGlob from 'glob';
|
||||
import FileFsRef from '../file-fs-ref';
|
||||
|
||||
type GlobOptions = import('glob').IOptions;
|
||||
|
||||
interface FsFiles {
|
||||
[filePath: string]: FileFsRef
|
||||
}
|
||||
|
||||
export default function glob(pattern: string, opts: GlobOptions | string, mountpoint?: string): Promise<FsFiles> {
|
||||
return new Promise<FsFiles>((resolve, reject) => {
|
||||
let options: GlobOptions;
|
||||
if (typeof opts === 'string') {
|
||||
options = { cwd: opts };
|
||||
} else {
|
||||
options = opts;
|
||||
}
|
||||
|
||||
if (!options.cwd) {
|
||||
throw new Error(
|
||||
'Second argument (basePath) must be specified for names of resulting files',
|
||||
);
|
||||
}
|
||||
|
||||
if (!path.isAbsolute(options.cwd)) {
|
||||
throw new Error(`basePath/cwd must be an absolute path (${options.cwd})`);
|
||||
}
|
||||
|
||||
options.statCache = {};
|
||||
options.stat = true;
|
||||
options.dot = true;
|
||||
|
||||
// eslint-disable-next-line consistent-return
|
||||
vanillaGlob(pattern, options, (error, files) => {
|
||||
if (error) return reject(error);
|
||||
|
||||
resolve(
|
||||
files.reduce<FsFiles>((files2, relativePath) => {
|
||||
const fsPath = path.join(options.cwd!, relativePath);
|
||||
const stat = options.statCache![fsPath] as import('fs').Stats;
|
||||
assert(
|
||||
stat,
|
||||
`statCache does not contain value for ${relativePath} (resolved to ${fsPath})`,
|
||||
);
|
||||
if (stat && stat.isFile()) {
|
||||
let finalPath = relativePath;
|
||||
if (mountpoint) finalPath = path.join(mountpoint, finalPath);
|
||||
return {
|
||||
...files2,
|
||||
[finalPath]: new FileFsRef({ mode: stat.mode, fsPath }),
|
||||
};
|
||||
}
|
||||
|
||||
return files2;
|
||||
}, {}),
|
||||
);
|
||||
});
|
||||
});
|
||||
};
|
||||
12
packages/now-build-utils/src/fs/rename.ts
Normal file
12
packages/now-build-utils/src/fs/rename.ts
Normal file
@@ -0,0 +1,12 @@
|
||||
import { Files } from '../types';
|
||||
type Delegate = (name: string) => string;
|
||||
|
||||
export default function rename(files: Files, delegate: Delegate): Files {
|
||||
return Object.keys(files).reduce(
|
||||
(newFiles, name) => ({
|
||||
...newFiles,
|
||||
[delegate(name)]: files[name],
|
||||
}),
|
||||
{},
|
||||
);
|
||||
}
|
||||
114
packages/now-build-utils/src/fs/run-user-scripts.ts
Normal file
114
packages/now-build-utils/src/fs/run-user-scripts.ts
Normal file
@@ -0,0 +1,114 @@
|
||||
import assert from 'assert';
|
||||
import fs from 'fs-extra';
|
||||
import path from 'path';
|
||||
import { spawn, SpawnOptions } from 'child_process';
|
||||
|
||||
function spawnAsync(command: string, args: string[], cwd: string, opts: SpawnOptions = {}) {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
const child = spawn(command, args, { stdio: 'inherit', cwd, ...opts });
|
||||
child.on('error', reject);
|
||||
child.on('close', (code, signal) => (code !== 0
|
||||
? reject(new Error(`Exited with ${code || signal}`))
|
||||
: resolve()));
|
||||
});
|
||||
}
|
||||
|
||||
async function chmodPlusX(fsPath: string) {
|
||||
const s = await fs.stat(fsPath);
|
||||
const newMode = s.mode | 64 | 8 | 1; // eslint-disable-line no-bitwise
|
||||
if (s.mode === newMode) return;
|
||||
const base8 = newMode.toString(8).slice(-3);
|
||||
await fs.chmod(fsPath, base8);
|
||||
}
|
||||
|
||||
export async function runShellScript(fsPath: string) {
|
||||
assert(path.isAbsolute(fsPath));
|
||||
const destPath = path.dirname(fsPath);
|
||||
await chmodPlusX(fsPath);
|
||||
await spawnAsync(`./${path.basename(fsPath)}`, [], destPath);
|
||||
return true;
|
||||
}
|
||||
|
||||
async function scanParentDirs(destPath: string, scriptName?: string) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let hasScript = false;
|
||||
let hasPackageLockJson = false;
|
||||
let currentDestPath = destPath;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
const packageJsonPath = path.join(currentDestPath, 'package.json');
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
if (await fs.pathExists(packageJsonPath)) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const packageJson = JSON.parse(await fs.readFile(packageJsonPath, 'utf8'));
|
||||
hasScript = Boolean(
|
||||
packageJson.scripts && scriptName && packageJson.scripts[scriptName],
|
||||
);
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
hasPackageLockJson = await fs.pathExists(
|
||||
path.join(currentDestPath, 'package-lock.json'),
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
const newDestPath = path.dirname(currentDestPath);
|
||||
if (currentDestPath === newDestPath) break;
|
||||
currentDestPath = newDestPath;
|
||||
}
|
||||
|
||||
return { hasScript, hasPackageLockJson };
|
||||
}
|
||||
|
||||
export async function installDependencies(destPath: string, args: string[] = []) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
|
||||
let commandArgs = args;
|
||||
console.log(`installing to ${destPath}`);
|
||||
const { hasPackageLockJson } = await scanParentDirs(destPath);
|
||||
|
||||
const opts = {
|
||||
env: {
|
||||
...process.env,
|
||||
// This is a little hack to force `node-gyp` to build for the
|
||||
// Node.js version that `@now/node` and `@now/node-server` use
|
||||
npm_config_target: '8.10.0',
|
||||
},
|
||||
};
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
commandArgs = args.filter(a => a !== '--prefer-offline');
|
||||
await spawnAsync('npm', ['install'].concat(commandArgs), destPath, opts);
|
||||
await spawnAsync('npm', ['cache', 'clean', '--force'], destPath, opts);
|
||||
} else {
|
||||
await spawnAsync(
|
||||
'yarn',
|
||||
['--cwd', destPath].concat(commandArgs),
|
||||
destPath,
|
||||
opts,
|
||||
);
|
||||
await spawnAsync('yarn', ['cache', 'clean'], destPath, opts);
|
||||
}
|
||||
}
|
||||
|
||||
export async function runPackageJsonScript(destPath: string, scriptName: string) {
|
||||
assert(path.isAbsolute(destPath));
|
||||
const { hasScript, hasPackageLockJson } = await scanParentDirs(
|
||||
destPath,
|
||||
scriptName,
|
||||
);
|
||||
if (!hasScript) return false;
|
||||
|
||||
if (hasPackageLockJson) {
|
||||
console.log(`running "npm run ${scriptName}"`);
|
||||
await spawnAsync('npm', ['run', scriptName], destPath);
|
||||
} else {
|
||||
console.log(`running "yarn run ${scriptName}"`);
|
||||
await spawnAsync('yarn', ['--cwd', destPath, 'run', scriptName], destPath);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export const runNpmInstall = installDependencies;
|
||||
26
packages/now-build-utils/src/fs/stream-to-buffer.ts
Normal file
26
packages/now-build-utils/src/fs/stream-to-buffer.ts
Normal file
@@ -0,0 +1,26 @@
|
||||
import eos from 'end-of-stream';
|
||||
|
||||
export default function streamToBuffer(stream: NodeJS.ReadableStream): Promise<Buffer> {
|
||||
return new Promise<Buffer>((resolve, reject) => {
|
||||
const buffers: Buffer[] = [];
|
||||
|
||||
stream.on('data', buffers.push.bind(buffers))
|
||||
|
||||
eos(stream, (err) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
return;
|
||||
}
|
||||
switch (buffers.length) {
|
||||
case 0:
|
||||
resolve(Buffer.allocUnsafe(0));
|
||||
break;
|
||||
case 1:
|
||||
resolve(buffers[0]);
|
||||
break;
|
||||
default:
|
||||
resolve(Buffer.concat(buffers));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
31
packages/now-build-utils/src/index.ts
Normal file
31
packages/now-build-utils/src/index.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
import FileBlob from './file-blob';
|
||||
import FileFsRef from './file-fs-ref';
|
||||
import FileRef from './file-ref';
|
||||
import { File, Files, AnalyzeOptions, BuildOptions, PrepareCacheOptions } from './types';
|
||||
import { Lambda, createLambda } from './lambda';
|
||||
import download from './fs/download';
|
||||
import getWriteableDirectory from './fs/get-writable-directory'
|
||||
import glob from './fs/glob';
|
||||
import rename from './fs/rename';
|
||||
import { installDependencies, runPackageJsonScript, runNpmInstall, runShellScript } from './fs/run-user-scripts';
|
||||
import streamToBuffer from './fs/stream-to-buffer';
|
||||
|
||||
export {
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
FileRef,
|
||||
Files,
|
||||
File,
|
||||
Lambda,
|
||||
createLambda,
|
||||
download,
|
||||
getWriteableDirectory,
|
||||
glob,
|
||||
rename,
|
||||
installDependencies, runPackageJsonScript, runNpmInstall, runShellScript,
|
||||
streamToBuffer,
|
||||
AnalyzeOptions,
|
||||
BuildOptions,
|
||||
PrepareCacheOptions,
|
||||
};
|
||||
|
||||
80
packages/now-build-utils/src/lambda.ts
Normal file
80
packages/now-build-utils/src/lambda.ts
Normal file
@@ -0,0 +1,80 @@
|
||||
import assert from 'assert';
|
||||
import Sema from 'async-sema';
|
||||
import { ZipFile } from 'yazl';
|
||||
import streamToBuffer from './fs/stream-to-buffer';
|
||||
import { Files } from './types';
|
||||
|
||||
interface Environment {
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
interface LambdaOptions {
|
||||
zipBuffer: Buffer;
|
||||
handler: string;
|
||||
runtime: string;
|
||||
environment: Environment;
|
||||
}
|
||||
|
||||
interface CreateLambdaOptions {
|
||||
files: Files;
|
||||
handler: string;
|
||||
runtime: string;
|
||||
environment?: Environment;
|
||||
}
|
||||
|
||||
export class Lambda {
|
||||
public type: string;
|
||||
public zipBuffer: Buffer;
|
||||
public handler: string;
|
||||
public runtime: string;
|
||||
public environment: Environment;
|
||||
|
||||
constructor({
|
||||
zipBuffer, handler, runtime, environment,
|
||||
}: LambdaOptions) {
|
||||
this.type = 'Lambda';
|
||||
this.zipBuffer = zipBuffer;
|
||||
this.handler = handler;
|
||||
this.runtime = runtime;
|
||||
this.environment = environment;
|
||||
}
|
||||
}
|
||||
|
||||
const sema = new Sema(10);
|
||||
const mtime = new Date(1540000000000);
|
||||
|
||||
export async function createLambda({
|
||||
files, handler, runtime, environment = {},
|
||||
}: CreateLambdaOptions): Promise<Lambda> {
|
||||
assert(typeof files === 'object', '"files" must be an object');
|
||||
assert(typeof handler === 'string', '"handler" is not a string');
|
||||
assert(typeof runtime === 'string', '"runtime" is not a string');
|
||||
assert(typeof environment === 'object', '"environment" is not an object');
|
||||
|
||||
await sema.acquire();
|
||||
try {
|
||||
const zipFile = new ZipFile();
|
||||
const zipBuffer = await new Promise<Buffer>((resolve, reject) => {
|
||||
Object.keys(files)
|
||||
.sort()
|
||||
.forEach((name) => {
|
||||
const file = files[name];
|
||||
const stream = file.toStream() as import('stream').Readable;
|
||||
stream.on('error', reject);
|
||||
zipFile.addReadStream(stream, name, { mode: file.mode, mtime });
|
||||
});
|
||||
|
||||
zipFile.end();
|
||||
streamToBuffer(zipFile.outputStream).then(resolve).catch(reject);
|
||||
});
|
||||
|
||||
return new Lambda({
|
||||
zipBuffer,
|
||||
handler,
|
||||
runtime,
|
||||
environment,
|
||||
});
|
||||
} finally {
|
||||
sema.release();
|
||||
}
|
||||
}
|
||||
101
packages/now-build-utils/src/types.ts
Normal file
101
packages/now-build-utils/src/types.ts
Normal file
@@ -0,0 +1,101 @@
|
||||
export interface File {
|
||||
type: string;
|
||||
mode: number;
|
||||
toStream: () => NodeJS.ReadableStream;
|
||||
}
|
||||
|
||||
export interface Files {
|
||||
[filePath: string]: File
|
||||
}
|
||||
|
||||
export interface Config {
|
||||
[key: string]: string
|
||||
}
|
||||
|
||||
export interface AnalyzeOptions {
|
||||
/**
|
||||
* All source files of the project
|
||||
*/
|
||||
files: Files;
|
||||
|
||||
/**
|
||||
* Name of entrypoint file for this particular build job. Value
|
||||
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||
* expanded into separate builds at deployment time.
|
||||
*/
|
||||
entrypoint: string;
|
||||
|
||||
/**
|
||||
* A writable temporary directory where you are encouraged to perform your
|
||||
* build process. This directory will be populated with the restored cache.
|
||||
*/
|
||||
workPath: string;
|
||||
|
||||
/**
|
||||
* An arbitrary object passed by the user in the build definition defined
|
||||
* in `now.json`.
|
||||
*/
|
||||
config: Config;
|
||||
}
|
||||
|
||||
|
||||
export interface BuildOptions {
|
||||
/**
|
||||
* All source files of the project
|
||||
*/
|
||||
files: Files;
|
||||
|
||||
/**
|
||||
* Name of entrypoint file for this particular build job. Value
|
||||
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||
* expanded into separate builds at deployment time.
|
||||
*/
|
||||
entrypoint: string;
|
||||
|
||||
/**
|
||||
* A writable temporary directory where you are encouraged to perform your
|
||||
* build process. This directory will be populated with the restored cache.
|
||||
*/
|
||||
workPath: string;
|
||||
|
||||
/**
|
||||
* An arbitrary object passed by the user in the build definition defined
|
||||
* in `now.json`.
|
||||
*/
|
||||
config: Config;
|
||||
}
|
||||
|
||||
export interface PrepareCacheOptions {
|
||||
/**
|
||||
* All source files of the project
|
||||
*/
|
||||
files: Files;
|
||||
|
||||
/**
|
||||
* Name of entrypoint file for this particular build job. Value
|
||||
* `files[entrypoint]` is guaranteed to exist and be a valid File reference.
|
||||
* `entrypoint` is always a discrete file and never a glob, since globs are
|
||||
* expanded into separate builds at deployment time.
|
||||
*/
|
||||
entrypoint: string;
|
||||
|
||||
/**
|
||||
* A writable temporary directory where you are encouraged to perform your
|
||||
* build process.
|
||||
*/
|
||||
workPath: string;
|
||||
|
||||
/**
|
||||
* A writable temporary directory where you can build a cache to use for
|
||||
* the next run.
|
||||
*/
|
||||
cachePath: string;
|
||||
|
||||
/**
|
||||
* An arbitrary object passed by the user in the build definition defined
|
||||
* in `now.json`.
|
||||
*/
|
||||
config: Config;
|
||||
}
|
||||
24
packages/now-build-utils/tsconfig.json
Normal file
24
packages/now-build-utils/tsconfig.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"declaration": true,
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"module": "commonjs",
|
||||
"moduleResolution": "node",
|
||||
"noEmitOnError": true,
|
||||
"noFallthroughCasesInSwitch": true,
|
||||
"noImplicitReturns": true,
|
||||
"noUnusedLocals": true,
|
||||
"noUnusedParameters": true,
|
||||
"outDir": "./dist",
|
||||
"types": ["node"],
|
||||
"strict": true,
|
||||
"target": "esnext"
|
||||
},
|
||||
"include": [
|
||||
"src/**/*"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules"
|
||||
]
|
||||
}
|
||||
@@ -21,12 +21,12 @@ const getGoUrl = (version, platform, arch) => {
|
||||
return `https://dl.google.com/go/go${version}.${goPlatform}-${goArch}.${ext}`;
|
||||
};
|
||||
|
||||
function getExportedFunctionName(filePath) {
|
||||
async function getExportedFunctionName(filePath) {
|
||||
debug('Detecting handler name for %o', filePath);
|
||||
const bin = join(__dirname, 'get-exported-function-name');
|
||||
const args = [filePath];
|
||||
const name = execa.stdout(bin, args);
|
||||
debug('Detected exported name %o', filePath);
|
||||
const name = await execa.stdout(bin, args);
|
||||
debug('Detected exported name %o', name);
|
||||
return name;
|
||||
}
|
||||
|
||||
@@ -68,6 +68,7 @@ async function createGo(
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
opts = {},
|
||||
goMod = false,
|
||||
) {
|
||||
const env = {
|
||||
...process.env,
|
||||
@@ -76,6 +77,10 @@ async function createGo(
|
||||
...opts.env,
|
||||
};
|
||||
|
||||
if (goMod) {
|
||||
env.GO111MODULE = 'on';
|
||||
}
|
||||
|
||||
function go(...args) {
|
||||
debug('Exec %o', `go ${args.join(' ')}`);
|
||||
return execa('go', args, { stdio: 'inherit', ...opts, env });
|
||||
@@ -90,7 +95,7 @@ async function createGo(
|
||||
|
||||
async function downloadGo(
|
||||
dir = GO_DIR,
|
||||
version = '1.11.5',
|
||||
version = '1.12',
|
||||
platform = process.platform,
|
||||
arch = process.arch,
|
||||
) {
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
const { join, dirname } = require('path');
|
||||
const { readFile, writeFile } = require('fs-extra');
|
||||
const {
|
||||
readFile, writeFile, pathExists, move,
|
||||
} = require('fs-extra');
|
||||
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
@@ -23,9 +25,9 @@ async function build({ files, entrypoint }) {
|
||||
const downloadedFiles = await download(files, srcPath);
|
||||
|
||||
console.log(`Parsing AST for "${entrypoint}"`);
|
||||
let handlerFunctionName;
|
||||
let parseFunctionName;
|
||||
try {
|
||||
handlerFunctionName = await getExportedFunctionName(
|
||||
parseFunctionName = await getExportedFunctionName(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
);
|
||||
} catch (err) {
|
||||
@@ -33,7 +35,7 @@ async function build({ files, entrypoint }) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
if (!handlerFunctionName) {
|
||||
if (!parseFunctionName) {
|
||||
const err = new Error(
|
||||
`Could not find an exported function in "${entrypoint}"`,
|
||||
);
|
||||
@@ -41,53 +43,146 @@ async function build({ files, entrypoint }) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
const handlerFunctionName = parseFunctionName.split(',')[0];
|
||||
|
||||
console.log(
|
||||
`Found exported function "${handlerFunctionName}" in "${entrypoint}"`,
|
||||
);
|
||||
|
||||
const origianlMainGoContents = await readFile(
|
||||
join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
// in order to allow the user to have `main.go`, we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// we need `main.go` in the same dir as the entrypoint,
|
||||
// otherwise `go build` will refuse to build
|
||||
const entrypointDirname = dirname(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
// check if package name other than main
|
||||
const packageName = parseFunctionName.split(',')[1];
|
||||
const isGoModExist = await pathExists(`${entrypointDirname}/go.mod`);
|
||||
if (packageName !== 'main') {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
true,
|
||||
);
|
||||
if (!isGoModExist) {
|
||||
try {
|
||||
const defaultGoModContent = `module ${packageName}`;
|
||||
|
||||
const go = await createGo(goPath, process.platform, process.arch, {
|
||||
cwd: entrypointDirname,
|
||||
});
|
||||
await writeFile(join(entrypointDirname, 'go.mod'), defaultGoModContent);
|
||||
} catch (err) {
|
||||
console.log(`failed to create default go.mod for ${packageName}`);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||
// and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
const mainModGoFileName = 'main__mod__.go';
|
||||
const modMainGoContents = await readFile(
|
||||
join(__dirname, mainModGoFileName),
|
||||
'utf8',
|
||||
);
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
let goPackageName = `${packageName}/${packageName}`;
|
||||
const goFuncName = `${packageName}.${handlerFunctionName}`;
|
||||
|
||||
if (isGoModExist) {
|
||||
const goModContents = await readFile(
|
||||
`${entrypointDirname}/go.mod`,
|
||||
'utf8',
|
||||
);
|
||||
goPackageName = `${
|
||||
goModContents.split('\n')[0].split(' ')[1]
|
||||
}/${packageName}`;
|
||||
}
|
||||
|
||||
const mainModGoContents = modMainGoContents
|
||||
.replace('__NOW_HANDLER_PACKAGE_NAME', goPackageName)
|
||||
.replace('__NOW_HANDLER_FUNC_NAME', goFuncName);
|
||||
|
||||
// write main__mod__.go
|
||||
await writeFile(
|
||||
join(entrypointDirname, mainModGoFileName),
|
||||
mainModGoContents,
|
||||
);
|
||||
|
||||
// move user go file to folder
|
||||
try {
|
||||
await move(
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
`${join(entrypointDirname, packageName, entrypoint)}`,
|
||||
);
|
||||
} catch (err) {
|
||||
console.log('failed to move entry to package folder');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('tidy go.mod file');
|
||||
try {
|
||||
// ensure go.mod up-to-date
|
||||
await go('mod', 'tidy');
|
||||
} catch (err) {
|
||||
console.log('failed to `go mod tidy`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [join(entrypointDirname, mainModGoFileName)];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
} else {
|
||||
const go = await createGo(
|
||||
goPath,
|
||||
process.platform,
|
||||
process.arch,
|
||||
{
|
||||
cwd: entrypointDirname,
|
||||
},
|
||||
false,
|
||||
);
|
||||
const origianlMainGoContents = await readFile(
|
||||
join(__dirname, 'main.go'),
|
||||
'utf8',
|
||||
);
|
||||
const mainGoContents = origianlMainGoContents.replace(
|
||||
'__NOW_HANDLER_FUNC_NAME',
|
||||
handlerFunctionName,
|
||||
);
|
||||
|
||||
// in order to allow the user to have `main.go`,
|
||||
// we need our `main.go` to be called something else
|
||||
const mainGoFileName = 'main__now__go__.go';
|
||||
|
||||
// Go doesn't like to build files in different directories,
|
||||
// so now we place `main.go` together with the user code
|
||||
await writeFile(join(entrypointDirname, mainGoFileName), mainGoContents);
|
||||
|
||||
// `go get` will look at `*.go` (note we set `cwd`), parse the `import`s
|
||||
// and download any packages that aren't part of the stdlib
|
||||
try {
|
||||
await go.get();
|
||||
} catch (err) {
|
||||
console.log('failed to `go get`');
|
||||
throw err;
|
||||
}
|
||||
|
||||
console.log('Running `go build`...');
|
||||
const destPath = join(outDir, 'handler');
|
||||
try {
|
||||
const src = [
|
||||
join(entrypointDirname, mainGoFileName),
|
||||
downloadedFiles[entrypoint].fsPath,
|
||||
];
|
||||
await go.build({ src, dest: destPath });
|
||||
} catch (err) {
|
||||
console.log('failed to `go build`');
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const lambda = await createLambda({
|
||||
|
||||
12
packages/now-go/main__mod__.go
Normal file
12
packages/now-go/main__mod__.go
Normal file
@@ -0,0 +1,12 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"__NOW_HANDLER_PACKAGE_NAME"
|
||||
|
||||
now "github.com/zeit/now-builders/utils/go/bridge"
|
||||
)
|
||||
|
||||
func main() {
|
||||
now.Start(http.HandlerFunc(__NOW_HANDLER_FUNC_NAME))
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/go",
|
||||
"version": "0.2.13-canary.1",
|
||||
"version": "0.3.1-canary.2",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -13,6 +13,7 @@
|
||||
"files": [
|
||||
"*.js",
|
||||
"main.go",
|
||||
"main__mod__.go",
|
||||
"util"
|
||||
],
|
||||
"dependencies": {
|
||||
|
||||
@@ -34,7 +34,7 @@ func main() {
|
||||
if fn.Name.IsExported() == true {
|
||||
// we found the first exported function
|
||||
// we're done!
|
||||
fmt.Print(fn.Name.Name)
|
||||
fmt.Print(fn.Name.Name, ",", parsed.Name.Name)
|
||||
os.Exit(0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ const defaultOptions = {
|
||||
removeRedundantAttributes: true,
|
||||
useShortDoctype: true,
|
||||
collapseWhitespace: true,
|
||||
collapseInlineTagWhitespace: true,
|
||||
collapseBooleanAttributes: true,
|
||||
caseSensitive: true,
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/html-minifier",
|
||||
"version": "1.0.8-canary.0",
|
||||
"version": "1.0.8-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -34,8 +34,6 @@ exports.build = async ({ files, entrypoint, config }) => {
|
||||
stream: stream.pipe(unifiedStream(processor)),
|
||||
});
|
||||
|
||||
console.log(result.data.toString());
|
||||
|
||||
const replacedEntrypoint = entrypoint.replace(/\.[^.]+$/, '.html');
|
||||
|
||||
return { [replacedEntrypoint]: result };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/md",
|
||||
"version": "0.4.10-canary.1",
|
||||
"version": "0.4.10-canary.2",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -1,14 +1,22 @@
|
||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { createLambda } = require('@now/build-utils/lambda'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileBlob = require('@now/build-utils/file-blob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const path = require('path');
|
||||
const { readFile, writeFile, unlink } = require('fs.promised');
|
||||
const {
|
||||
runNpmInstall,
|
||||
runPackageJsonScript,
|
||||
} = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
} = require('@now/build-utils/fs/run-user-scripts'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const glob = require('@now/build-utils/fs/glob'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const {
|
||||
readFile,
|
||||
writeFile,
|
||||
unlink: unlinkFile,
|
||||
remove: removePath,
|
||||
mkdirp,
|
||||
rename: renamePath,
|
||||
pathExists,
|
||||
} = require('fs-extra');
|
||||
const semver = require('semver');
|
||||
const nextLegacyVersions = require('./legacy-versions');
|
||||
const {
|
||||
@@ -17,6 +25,7 @@ const {
|
||||
includeOnlyEntryDirectory,
|
||||
normalizePackageJson,
|
||||
onlyStaticDirectory,
|
||||
getNextConfig,
|
||||
} = require('./utils');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref').Files } Files */
|
||||
@@ -68,6 +77,36 @@ async function writeNpmRc(workPath, token) {
|
||||
);
|
||||
}
|
||||
|
||||
function getNextVersion(packageJson) {
|
||||
let nextVersion;
|
||||
if (packageJson.dependencies && packageJson.dependencies.next) {
|
||||
nextVersion = packageJson.dependencies.next;
|
||||
} else if (packageJson.devDependencies && packageJson.devDependencies.next) {
|
||||
nextVersion = packageJson.devDependencies.next;
|
||||
}
|
||||
return nextVersion;
|
||||
}
|
||||
|
||||
function isLegacyNext(nextVersion) {
|
||||
// If version is using the dist-tag instead of a version range
|
||||
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the version is an exact match with the legacy versions
|
||||
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
||||
// When the version can't be matched with legacy versions, so it must be a newer version
|
||||
if (maxSatisfying === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '5mb',
|
||||
};
|
||||
@@ -84,52 +123,34 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
await download(files, workPath);
|
||||
const entryPath = path.join(workPath, entryDirectory);
|
||||
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
|
||||
let nextVersion;
|
||||
if (pkg.dependencies && pkg.dependencies.next) {
|
||||
nextVersion = pkg.dependencies.next;
|
||||
} else if (pkg.devDependencies && pkg.devDependencies.next) {
|
||||
nextVersion = pkg.devDependencies.next;
|
||||
if (await pathExists(path.join(entryPath, '.next'))) {
|
||||
console.warn(
|
||||
'WARNING: You should probably not upload the `.next` directory. See https://zeit.co/docs/v2/deployments/official-builders/next-js-now-next/ for more information.',
|
||||
);
|
||||
}
|
||||
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
|
||||
const nextVersion = getNextVersion(pkg);
|
||||
if (!nextVersion) {
|
||||
throw new Error(
|
||||
'No Next.js version could be detected in "package.json". Make sure `"next"` is installed in "dependencies" or "devDependencies"',
|
||||
);
|
||||
}
|
||||
|
||||
const isLegacy = (() => {
|
||||
// If version is using the dist-tag instead of a version range
|
||||
if (nextVersion === 'canary' || nextVersion === 'latest') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// If the version is an exact match with the legacy versions
|
||||
if (nextLegacyVersions.indexOf(nextVersion) !== -1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const maxSatisfying = semver.maxSatisfying(nextLegacyVersions, nextVersion);
|
||||
// When the version can't be matched with legacy versions, so it must be a newer version
|
||||
if (maxSatisfying === null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
})();
|
||||
const isLegacy = isLegacyNext(nextVersion);
|
||||
|
||||
console.log(`MODE: ${isLegacy ? 'legacy' : 'serverless'}`);
|
||||
|
||||
if (isLegacy) {
|
||||
try {
|
||||
await unlink(path.join(entryPath, 'yarn.lock'));
|
||||
await unlinkFile(path.join(entryPath, 'yarn.lock'));
|
||||
} catch (err) {
|
||||
console.log('no yarn.lock removed');
|
||||
}
|
||||
|
||||
try {
|
||||
await unlink(path.join(entryPath, 'package-lock.json'));
|
||||
await unlinkFile(path.join(entryPath, 'package-lock.json'));
|
||||
} catch (err) {
|
||||
console.log('no package-lock.json removed');
|
||||
}
|
||||
@@ -170,7 +191,7 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
}
|
||||
|
||||
if (process.env.NPM_AUTH_TOKEN) {
|
||||
await unlink(path.join(entryPath, '.npmrc'));
|
||||
await unlinkFile(path.join(entryPath, '.npmrc'));
|
||||
}
|
||||
|
||||
const lambdas = {};
|
||||
@@ -273,11 +294,31 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
const pageKeys = Object.keys(pages);
|
||||
|
||||
if (pageKeys.length === 0) {
|
||||
const nextConfig = await getNextConfig(workPath, entryPath);
|
||||
|
||||
if (nextConfig != null) {
|
||||
console.info('Found next.config.js:');
|
||||
console.info(nextConfig);
|
||||
console.info();
|
||||
}
|
||||
|
||||
throw new Error(
|
||||
'No serverless pages were built. https://err.sh/zeit/now-builders/now-next-no-serverless-pages-built',
|
||||
);
|
||||
}
|
||||
|
||||
// An optional assets folder that is placed alongside every page entrypoint
|
||||
const assets = await glob(
|
||||
'assets/**',
|
||||
path.join(entryPath, '.next', 'serverless'),
|
||||
);
|
||||
|
||||
const assetKeys = Object.keys(assets);
|
||||
if (assetKeys.length > 0) {
|
||||
console.log('detected assets to be bundled with lambda:');
|
||||
assetKeys.forEach(assetFile => console.log(`\t${assetFile}`));
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
pageKeys.map(async (page) => {
|
||||
// These default pages don't have to be handled as they'd always 404
|
||||
@@ -291,6 +332,7 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
lambdas[path.join(entryDirectory, pathname)] = await createLambda({
|
||||
files: {
|
||||
...launcherFiles,
|
||||
...assets,
|
||||
'page.js': pages[page],
|
||||
},
|
||||
handler: 'now__launcher.launcher',
|
||||
@@ -313,16 +355,49 @@ exports.build = async ({ files, workPath, entrypoint }) => {
|
||||
{},
|
||||
);
|
||||
|
||||
const nextStaticDirectory = onlyStaticDirectory(
|
||||
const staticDirectoryFiles = onlyStaticDirectory(
|
||||
includeOnlyEntryDirectory(files, entryDirectory),
|
||||
);
|
||||
const staticDirectoryFiles = Object.keys(nextStaticDirectory).reduce(
|
||||
(mappedFiles, file) => ({
|
||||
...mappedFiles,
|
||||
[path.join(entryDirectory, file)]: nextStaticDirectory[file],
|
||||
}),
|
||||
{},
|
||||
entryDirectory,
|
||||
);
|
||||
|
||||
return { ...lambdas, ...staticFiles, ...staticDirectoryFiles };
|
||||
};
|
||||
|
||||
exports.prepareCache = async ({ cachePath, workPath, entrypoint }) => {
|
||||
console.log('preparing cache ...');
|
||||
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
const entryPath = path.join(workPath, entryDirectory);
|
||||
const cacheEntryPath = path.join(cachePath, entryDirectory);
|
||||
|
||||
const pkg = await readPackageJson(entryPath);
|
||||
const nextVersion = getNextVersion(pkg);
|
||||
const isLegacy = isLegacyNext(nextVersion);
|
||||
|
||||
if (isLegacy) {
|
||||
// skip caching legacy mode (swapping deps between all and production can get bug-prone)
|
||||
return {};
|
||||
}
|
||||
|
||||
console.log('clearing old cache ...');
|
||||
await removePath(cacheEntryPath);
|
||||
await mkdirp(cacheEntryPath);
|
||||
|
||||
console.log('copying build files for cache ...');
|
||||
await renamePath(entryPath, cacheEntryPath);
|
||||
|
||||
console.log('producing cache file manifest ...');
|
||||
|
||||
const cacheEntrypoint = path.relative(cachePath, cacheEntryPath);
|
||||
return {
|
||||
...(await glob(
|
||||
path.join(
|
||||
cacheEntrypoint,
|
||||
'node_modules/{**,!.*,.yarn*,.cache/next-minifier/**}',
|
||||
),
|
||||
cachePath,
|
||||
)),
|
||||
...(await glob(path.join(cacheEntrypoint, 'package-lock.json'), cachePath)),
|
||||
...(await glob(path.join(cacheEntrypoint, 'yarn.lock'), cachePath)),
|
||||
};
|
||||
};
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
const { Server } = require('http');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
const page = require('./page.js');
|
||||
const { Bridge } = require('./now__bridge');
|
||||
const page = require('./page');
|
||||
|
||||
const server = new Server(page.render);
|
||||
const bridge = new Bridge(server);
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
const { Server } = require('http');
|
||||
const next = require('next-server');
|
||||
const url = require('url');
|
||||
const { Bridge } = require('./now__bridge.js');
|
||||
const { Bridge } = require('./now__bridge');
|
||||
|
||||
process.env.NODE_ENV = 'production';
|
||||
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
{
|
||||
"name": "@now/next",
|
||||
"version": "0.0.85-canary.6",
|
||||
"version": "0.1.3-canary.2",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"build": "tsc"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/zeit/now-builders.git",
|
||||
"directory": "packages/now-next"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "1.0.0-canary.2",
|
||||
"@now/node-bridge": "^1.0.0",
|
||||
"execa": "^1.0.0",
|
||||
"fs.promised": "^3.0.0",
|
||||
"fs-extra": "^7.0.0",
|
||||
"semver": "^5.6.0"
|
||||
}
|
||||
}
|
||||
|
||||
20
packages/now-next/tsconfig.json
Normal file
20
packages/now-next/tsconfig.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2017",
|
||||
"module": "commonjs",
|
||||
"lib": ["es2017"],
|
||||
"allowJs": true,
|
||||
"checkJs": true,
|
||||
"noEmit": true,
|
||||
"strict": false,
|
||||
"types": ["node"],
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": [
|
||||
"./"
|
||||
],
|
||||
"exclude": [
|
||||
"./launcher.js",
|
||||
"./legacy-launcher.js"
|
||||
]
|
||||
}
|
||||
@@ -1,3 +1,6 @@
|
||||
const fs = require('fs-extra');
|
||||
const path = require('path');
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef { import('@now/build-utils/file-fs-ref') } FileFsRef */
|
||||
/** @typedef {{[filePath: string]: FileRef|FileFsRef}} Files */
|
||||
@@ -79,13 +82,13 @@ function excludeLockFiles(files) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Exclude the static directory from files
|
||||
* Include the static directory from files
|
||||
* @param {Files} files
|
||||
* @returns {Files}
|
||||
*/
|
||||
function onlyStaticDirectory(files) {
|
||||
function onlyStaticDirectory(files, entryDir) {
|
||||
function matcher(filePath) {
|
||||
return !filePath.startsWith('static');
|
||||
return !filePath.startsWith(path.join(entryDir, 'static'));
|
||||
}
|
||||
|
||||
return excludeFiles(files, matcher);
|
||||
@@ -136,6 +139,20 @@ function normalizePackageJson(defaultPackageJson = {}) {
|
||||
};
|
||||
}
|
||||
|
||||
async function getNextConfig(workPath, entryPath) {
|
||||
const entryConfig = path.join(entryPath, './next.config.js');
|
||||
if (await fs.pathExists(entryConfig)) {
|
||||
return fs.readFile(entryConfig, 'utf8');
|
||||
}
|
||||
|
||||
const workConfig = path.join(workPath, './next.config.js');
|
||||
if (await fs.pathExists(workConfig)) {
|
||||
return fs.readFile(workConfig, 'utf8');
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
@@ -143,4 +160,5 @@ module.exports = {
|
||||
excludeLockFiles,
|
||||
normalizePackageJson,
|
||||
onlyStaticDirectory,
|
||||
getNextConfig,
|
||||
};
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/node-bridge",
|
||||
"version": "1.0.0-canary.2",
|
||||
"version": "1.0.1-canary.1",
|
||||
"license": "MIT",
|
||||
"main": "./index.js",
|
||||
"repository": {
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"target": "esnext",
|
||||
"module": "commonjs",
|
||||
"outDir": ".",
|
||||
"strict": true,
|
||||
|
||||
@@ -48,7 +48,7 @@ async function downloadInstallAndBundle(
|
||||
data: JSON.stringify({
|
||||
license: 'UNLICENSED',
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.15.2',
|
||||
'@zeit/ncc': '0.16.1',
|
||||
},
|
||||
}),
|
||||
}),
|
||||
@@ -61,11 +61,33 @@ async function downloadInstallAndBundle(
|
||||
return [downloadedFiles, userPath, nccPath, entrypointFsDirname];
|
||||
}
|
||||
|
||||
async function compile(workNccPath, downloadedFiles, entrypoint) {
|
||||
async function compile(workNccPath, downloadedFiles, entrypoint, config) {
|
||||
const input = downloadedFiles[entrypoint].fsPath;
|
||||
const inputDir = path.dirname(input);
|
||||
const ncc = require(path.join(workNccPath, 'node_modules/@zeit/ncc'));
|
||||
const { code, assets } = await ncc(input, { sourceMap: true });
|
||||
|
||||
if (config && config.includeFiles) {
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const pattern of config.includeFiles) {
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const files = await glob(pattern, inputDir);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const assetName of Object.keys(files)) {
|
||||
const stream = files[assetName].toStream();
|
||||
const { mode } = files[assetName];
|
||||
// eslint-disable-next-line no-await-in-loop
|
||||
const { data } = await FileBlob.fromStream({ stream });
|
||||
|
||||
assets[assetName] = {
|
||||
source: data,
|
||||
permissions: mode,
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const preparedFiles = {};
|
||||
const blob = new FileBlob({ data: code });
|
||||
// move all user code to 'user' subdirectory
|
||||
@@ -115,7 +137,12 @@ exports.build = async ({
|
||||
preparedFiles = rename(preparedFiles, name => path.join('user', name));
|
||||
} else {
|
||||
console.log('compiling entrypoint with ncc...');
|
||||
preparedFiles = await compile(workNccPath, downloadedFiles, entrypoint);
|
||||
preparedFiles = await compile(
|
||||
workNccPath,
|
||||
downloadedFiles,
|
||||
entrypoint,
|
||||
config,
|
||||
);
|
||||
}
|
||||
|
||||
const launcherPath = path.join(__dirname, 'launcher.js');
|
||||
|
||||
@@ -14,6 +14,18 @@ if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'production';
|
||||
}
|
||||
|
||||
// PLACEHOLDER
|
||||
try {
|
||||
// PLACEHOLDER
|
||||
} catch (err) {
|
||||
if (err.code === 'MODULE_NOT_FOUND') {
|
||||
console.error(err.message);
|
||||
console.error(
|
||||
'Did you forget to add it to "dependencies" in `package.json`?',
|
||||
);
|
||||
process.exit(1);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
exports.launcher = bridge.launcher;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/node-server",
|
||||
"version": "0.5.0-canary.3",
|
||||
"version": "0.5.2-canary.3",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,7 +8,7 @@
|
||||
"directory": "packages/now-node-server"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "1.0.0-canary.2",
|
||||
"@now/node-bridge": "^1.0.1-canary.1",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
|
||||
6
packages/now-node-server/test/fixtures/11-include-files/index.js
vendored
Normal file
6
packages/now-node-server/test/fixtures/11-include-files/index.js
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
const express = require('express');
|
||||
|
||||
const app = express();
|
||||
app.use(express.static('templates'));
|
||||
|
||||
app.listen();
|
||||
20
packages/now-node-server/test/fixtures/11-include-files/now.json
vendored
Normal file
20
packages/now-node-server/test/fixtures/11-include-files/now.json
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{
|
||||
"src": "index.js",
|
||||
"use": "@now/node-server",
|
||||
"config": {
|
||||
"includeFiles": [
|
||||
"templates/**"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "Hello Now!"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
packages/now-node-server/test/fixtures/11-include-files/package.json
vendored
Normal file
5
packages/now-node-server/test/fixtures/11-include-files/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"express": "^4.16.4"
|
||||
}
|
||||
}
|
||||
1
packages/now-node-server/test/fixtures/11-include-files/templates/index.html
vendored
Normal file
1
packages/now-node-server/test/fixtures/11-include-files/templates/index.html
vendored
Normal file
@@ -0,0 +1 @@
|
||||
Hello Now!
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/node",
|
||||
"version": "0.5.0-canary.5",
|
||||
"version": "0.5.2-canary.5",
|
||||
"license": "MIT",
|
||||
"main": "./dist/index",
|
||||
"repository": {
|
||||
@@ -9,12 +9,13 @@
|
||||
"directory": "packages/now-node"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/node-bridge": "1.0.0-canary.2",
|
||||
"@now/node-bridge": "^1.0.1-canary.1",
|
||||
"fs-extra": "7.0.1"
|
||||
},
|
||||
"scripts": {
|
||||
"build": "./build.sh",
|
||||
"test": "jest"
|
||||
"test": "npm run build && jest",
|
||||
"prepublish": "npm run build"
|
||||
},
|
||||
"files": [
|
||||
"dist"
|
||||
|
||||
@@ -1,34 +1,35 @@
|
||||
import { join, dirname } from 'path';
|
||||
import { remove, readFile } from 'fs-extra';
|
||||
import * as glob from '@now/build-utils/fs/glob.js';
|
||||
import * as download from '@now/build-utils/fs/download.js';
|
||||
import * as FileBlob from '@now/build-utils/file-blob.js';
|
||||
import * as FileFsRef from '@now/build-utils/file-fs-ref.js';
|
||||
import { createLambda } from '@now/build-utils/lambda.js';
|
||||
import {
|
||||
glob,
|
||||
download,
|
||||
FileBlob,
|
||||
FileFsRef,
|
||||
Files,
|
||||
createLambda,
|
||||
runNpmInstall,
|
||||
runPackageJsonScript
|
||||
} from '@now/build-utils/fs/run-user-scripts.js';
|
||||
runPackageJsonScript,
|
||||
PrepareCacheOptions,
|
||||
BuildOptions,
|
||||
} from '@now/build-utils';
|
||||
|
||||
/** @typedef { import('@now/build-utils/file-ref') } FileRef */
|
||||
/** @typedef {{[filePath: string]: FileRef}} Files */
|
||||
interface CompilerConfig {
|
||||
includeFiles?: string[]
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef {Object} BuildParamsType
|
||||
* @property {Files} files - Files object
|
||||
* @property {string} entrypoint - Entrypoint specified for the builder
|
||||
* @property {string} workPath - Working directory for this build
|
||||
*/
|
||||
interface DownloadOptions {
|
||||
files: Files,
|
||||
entrypoint: string;
|
||||
workPath: string;
|
||||
npmArguments?: string[];
|
||||
}
|
||||
|
||||
/**
|
||||
* @param {BuildParamsType} buildParams
|
||||
* @param {Object} [options]
|
||||
* @param {string[]} [options.npmArguments]
|
||||
*/
|
||||
async function downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath },
|
||||
{ npmArguments = [] } = {}
|
||||
) {
|
||||
async function downloadInstallAndBundle({
|
||||
files,
|
||||
entrypoint,
|
||||
workPath,
|
||||
npmArguments = []
|
||||
}: DownloadOptions) {
|
||||
const userPath = join(workPath, 'user');
|
||||
const nccPath = join(workPath, 'ncc');
|
||||
|
||||
@@ -46,7 +47,7 @@ async function downloadInstallAndBundle(
|
||||
data: JSON.stringify({
|
||||
license: 'UNLICENSED',
|
||||
dependencies: {
|
||||
'@zeit/ncc': '0.15.2',
|
||||
'@zeit/ncc': '0.16.1',
|
||||
}
|
||||
})
|
||||
})
|
||||
@@ -56,14 +57,33 @@ async function downloadInstallAndBundle(
|
||||
|
||||
console.log('installing dependencies for ncc...');
|
||||
await runNpmInstall(nccPath, npmArguments);
|
||||
return [downloadedFiles, nccPath, entrypointFsDirname];
|
||||
const entrypointPath = downloadedFiles[entrypoint].fsPath;
|
||||
return { entrypointPath, workNccPath: nccPath, entrypointFsDirname };
|
||||
}
|
||||
|
||||
async function compile(workNccPath: string, downloadedFiles, entrypoint: string) {
|
||||
const input = downloadedFiles[entrypoint].fsPath;
|
||||
async function compile(workNccPath: string, entrypointPath: string, entrypoint: string, config: CompilerConfig) {
|
||||
const input = entrypointPath;
|
||||
const inputDir = dirname(input);
|
||||
const ncc = require(join(workNccPath, 'node_modules/@zeit/ncc'));
|
||||
const { code, assets } = await ncc(input);
|
||||
|
||||
if (config && config.includeFiles) {
|
||||
for (const pattern of config.includeFiles) {
|
||||
const files = await glob(pattern, inputDir);
|
||||
|
||||
for (const assetName of Object.keys(files)) {
|
||||
const stream = files[assetName].toStream();
|
||||
const { mode } = files[assetName];
|
||||
const { data } = await FileBlob.fromStream({ stream });
|
||||
|
||||
assets[assetName] = {
|
||||
'source': data,
|
||||
'permissions': mode
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const preparedFiles = {};
|
||||
const blob = new FileBlob({ data: code });
|
||||
// move all user code to 'user' subdirectory
|
||||
@@ -82,25 +102,20 @@ export const config = {
|
||||
maxLambdaSize: '5mb'
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {BuildParamsType} buildParams
|
||||
* @returns {Promise<Files>}
|
||||
*/
|
||||
export async function build({ files, entrypoint, workPath }) {
|
||||
const [
|
||||
downloadedFiles,
|
||||
export async function build({ files, entrypoint, workPath, config }: BuildOptions) {
|
||||
const {
|
||||
entrypointPath,
|
||||
workNccPath,
|
||||
entrypointFsDirname
|
||||
] = await downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath },
|
||||
{ npmArguments: ['--prefer-offline'] }
|
||||
} = await downloadInstallAndBundle(
|
||||
{ files, entrypoint, workPath, npmArguments: ['--prefer-offline'] }
|
||||
);
|
||||
|
||||
console.log('running user script...');
|
||||
await runPackageJsonScript(entrypointFsDirname, 'now-build');
|
||||
|
||||
console.log('compiling entrypoint with ncc...');
|
||||
const preparedFiles = await compile(workNccPath, downloadedFiles, entrypoint);
|
||||
const preparedFiles = await compile(workNccPath, entrypointPath, entrypoint, config);
|
||||
const launcherPath = join(__dirname, 'launcher.js');
|
||||
let launcherData = await readFile(launcherPath, 'utf8');
|
||||
|
||||
@@ -127,7 +142,7 @@ export async function build({ files, entrypoint, workPath }) {
|
||||
return { [entrypoint]: lambda };
|
||||
}
|
||||
|
||||
export async function prepareCache({ files, entrypoint, workPath, cachePath }) {
|
||||
export async function prepareCache({ files, entrypoint, workPath, cachePath }: PrepareCacheOptions) {
|
||||
await remove(workPath);
|
||||
await downloadInstallAndBundle({ files, entrypoint, workPath: cachePath });
|
||||
|
||||
|
||||
@@ -7,7 +7,17 @@ if (!process.env.NODE_ENV) {
|
||||
process.env.NODE_ENV = 'production';
|
||||
}
|
||||
|
||||
try {
|
||||
// PLACEHOLDER
|
||||
} catch (err) {
|
||||
if (err.code === 'MODULE_NOT_FOUND') {
|
||||
console.error(err.message);
|
||||
console.error('Did you forget to add it to "dependencies" in `package.json`?');
|
||||
process.exit(1);
|
||||
} else {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const server = new Server(listener);
|
||||
const bridge = new Bridge(server);
|
||||
|
||||
7
packages/now-node/test/fixtures/09-include-files/index.js
vendored
Normal file
7
packages/now-node/test/fixtures/09-include-files/index.js
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
const edge = require('edge.js');
|
||||
|
||||
module.exports = (req, resp) => {
|
||||
edge.registerViews('templates');
|
||||
|
||||
resp.end(edge.render('index', { name: 'Now!' }));
|
||||
};
|
||||
20
packages/now-node/test/fixtures/09-include-files/now.json
vendored
Normal file
20
packages/now-node/test/fixtures/09-include-files/now.json
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{
|
||||
"src": "index.js",
|
||||
"use": "@now/node",
|
||||
"config": {
|
||||
"includeFiles": [
|
||||
"templates/**"
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"probes": [
|
||||
{
|
||||
"path": "/",
|
||||
"mustContain": "hello Now!"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
packages/now-node/test/fixtures/09-include-files/package.json
vendored
Normal file
5
packages/now-node/test/fixtures/09-include-files/package.json
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"edge.js": "^1.1.4"
|
||||
}
|
||||
}
|
||||
1
packages/now-node/test/fixtures/09-include-files/templates/index.edge
vendored
Normal file
1
packages/now-node/test/fixtures/09-include-files/templates/index.edge
vendored
Normal file
@@ -0,0 +1 @@
|
||||
hello {{ name }}
|
||||
@@ -1,6 +1,8 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "es6",
|
||||
"esModuleInterop": true,
|
||||
"lib": ["esnext"],
|
||||
"target": "esnext",
|
||||
"module": "commonjs",
|
||||
"outDir": "dist",
|
||||
"sourceMap": false,
|
||||
|
||||
@@ -64,7 +64,8 @@ async function transformFromAwsRequest({
|
||||
const { pathname, search, query: queryString } = parseUrl(path);
|
||||
let requestUri = pathname + (search || '');
|
||||
|
||||
let filename = pathJoin('/var/task/user', pathname);
|
||||
let filename = pathJoin('/var/task/user',
|
||||
process.env.NOW_ENTRYPOINT || pathname);
|
||||
if (await isDirectory(filename)) {
|
||||
if (!filename.endsWith('/')) {
|
||||
filename += '/';
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/php-bridge",
|
||||
"version": "0.4.14-canary.0",
|
||||
"version": "0.4.16-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
|
||||
@@ -20,6 +20,9 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
files: { ...userFiles, ...bridgeFiles },
|
||||
handler: 'launcher.launcher',
|
||||
runtime: 'nodejs8.10',
|
||||
environment: {
|
||||
NOW_ENTRYPOINT: entrypoint,
|
||||
},
|
||||
});
|
||||
|
||||
return { [entrypoint]: lambda };
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/php",
|
||||
"version": "0.4.14-canary.1",
|
||||
"version": "0.4.16-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,7 +8,7 @@
|
||||
"directory": "packages/now-php"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/php-bridge": "^0.4.14-canary.0"
|
||||
"@now/php-bridge": "^0.4.16-canary.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "jest"
|
||||
|
||||
2
packages/now-php/test/fixtures/19-routes/index.php
vendored
Normal file
2
packages/now-php/test/fixtures/19-routes/index.php
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
<?php
|
||||
print('cow:RANDOMNESS_PLACEHOLDER:' . $_SERVER['REQUEST_URI']);
|
||||
13
packages/now-php/test/fixtures/19-routes/now.json
vendored
Normal file
13
packages/now-php/test/fixtures/19-routes/now.json
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{ "src": "index.php", "use": "@now/php" }
|
||||
],
|
||||
"routes": [
|
||||
{ "src": "/(.*)", "dest": "index.php" }
|
||||
],
|
||||
"probes": [
|
||||
{ "path": "/any", "mustContain": "cow:RANDOMNESS_PLACEHOLDER:/any" },
|
||||
{ "path": "/any?type=some", "mustContain": "cow:RANDOMNESS_PLACEHOLDER:/any?type=some" }
|
||||
]
|
||||
}
|
||||
@@ -8,9 +8,10 @@ const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disabl
|
||||
const downloadAndInstallPip = require('./download-and-install-pip');
|
||||
|
||||
async function pipInstall(pipPath, srcDir, ...args) {
|
||||
console.log(`running "pip install -t ${srcDir} ${args.join(' ')}"...`);
|
||||
console.log(`running "pip install --target ${srcDir} ${args.join(' ')}"...`);
|
||||
try {
|
||||
await execa(pipPath, ['install', '-t', srcDir, ...args], {
|
||||
await execa(pipPath, ['install', '--target', '.', ...args], {
|
||||
cwd: srcDir,
|
||||
stdio: 'inherit',
|
||||
});
|
||||
} catch (err) {
|
||||
@@ -38,6 +39,21 @@ exports.build = async ({ files, entrypoint }) => {
|
||||
|
||||
const pipPath = await downloadAndInstallPip();
|
||||
|
||||
try {
|
||||
// See: https://stackoverflow.com/a/44728772/376773
|
||||
//
|
||||
// The `setup.cfg` is required for `now dev` on MacOS, where without
|
||||
// this file being present in the src dir then this error happens:
|
||||
//
|
||||
// distutils.errors.DistutilsOptionError: must supply either home
|
||||
// or prefix/exec-prefix -- not both
|
||||
const setupCfg = path.join(srcDir, 'setup.cfg');
|
||||
await writeFile(setupCfg, '[install]\nprefix=\n');
|
||||
} catch (err) {
|
||||
console.log('failed to create "setup.cfg" file');
|
||||
throw err;
|
||||
}
|
||||
|
||||
await pipInstall(pipPath, srcDir, 'requests');
|
||||
|
||||
const entryDirectory = path.dirname(entrypoint);
|
||||
|
||||
@@ -6,8 +6,8 @@ from __NOW_HANDLER_FILENAME import handler
|
||||
import _thread
|
||||
|
||||
|
||||
server = HTTPServer(('', 3000), handler)
|
||||
|
||||
server = HTTPServer(('', 0), handler)
|
||||
port = server.server_address[1]
|
||||
|
||||
def now_handler(event, context):
|
||||
_thread.start_new_thread(server.handle_request, ())
|
||||
@@ -25,7 +25,7 @@ def now_handler(event, context):
|
||||
):
|
||||
body = base64.b64decode(body)
|
||||
|
||||
res = requests.request(method, 'http://0.0.0.0:3000' + path,
|
||||
res = requests.request(method, 'http://0.0.0.0:' + str(port) + path,
|
||||
headers=headers, data=body, allow_redirects=False)
|
||||
|
||||
return {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/python",
|
||||
"version": "0.0.41-canary.2",
|
||||
"version": "0.0.42-canary.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
|
||||
2
packages/now-rust/Cargo.lock
generated
2
packages/now-rust/Cargo.lock
generated
@@ -448,7 +448,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
|
||||
[[package]]
|
||||
name = "now_lambda"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
dependencies = [
|
||||
"base64 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
"http 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "now_lambda"
|
||||
version = "0.1.2"
|
||||
version = "0.1.3"
|
||||
authors = ["Antonio Nuno Monteiro <anmonteiro@gmail.com>"]
|
||||
edition = "2018"
|
||||
description = "Rust bindings for Now.sh Lambdas"
|
||||
|
||||
@@ -5,13 +5,20 @@ const toml = require('@iarna/toml');
|
||||
const { createLambda } = require('@now/build-utils/lambda.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const download = require('@now/build-utils/fs/download.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const glob = require('@now/build-utils/fs/glob.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const { runShellScript } = require('@now/build-utils/fs/run-user-scripts.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const FileFsRef = require('@now/build-utils/file-fs-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const installRustAndGCC = require('./download-install-rust-toolchain.js');
|
||||
const FileRef = require('@now/build-utils/file-ref.js'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
const installRust = require('./install-rust.js');
|
||||
|
||||
exports.config = {
|
||||
maxLambdaSize: '25mb',
|
||||
};
|
||||
|
||||
const codegenFlags = [
|
||||
'-C', 'target-cpu=ivybridge',
|
||||
'-C', 'target-feature=-aes,-avx,+fxsr,-popcnt,+sse,+sse2,-sse3,-sse4.1,-sse4.2,-ssse3,-xsave,-xsaveopt',
|
||||
];
|
||||
|
||||
async function inferCargoBinaries(config) {
|
||||
try {
|
||||
const { stdout: manifestStr } = await execa(
|
||||
@@ -45,7 +52,7 @@ async function buildWholeProject({
|
||||
const { debug } = config;
|
||||
console.log('running `cargo build`...');
|
||||
try {
|
||||
await execa('cargo', ['build'].concat(debug ? [] : ['--release']), {
|
||||
await execa('cargo', ['build', '--verbose'].concat(debug ? [] : ['--release']), {
|
||||
env: rustEnv,
|
||||
cwd: entrypointDirname,
|
||||
stdio: 'inherit',
|
||||
@@ -85,6 +92,17 @@ async function buildWholeProject({
|
||||
return lambdas;
|
||||
}
|
||||
|
||||
async function runUserScripts(entrypoint) {
|
||||
const entryDir = path.dirname(entrypoint);
|
||||
const buildScriptPath = path.join(entryDir, 'build.sh');
|
||||
const buildScriptExists = await fs.exists(buildScriptPath);
|
||||
|
||||
if (buildScriptExists) {
|
||||
console.log('running `build.sh`...');
|
||||
await runShellScript(buildScriptPath);
|
||||
}
|
||||
}
|
||||
|
||||
async function cargoLocateProject(config) {
|
||||
try {
|
||||
const { stdout: projectDescriptionStr } = await execa(
|
||||
@@ -171,7 +189,7 @@ async function buildSingleFile({
|
||||
try {
|
||||
await execa(
|
||||
'cargo',
|
||||
['build', '--bin', binName].concat(debug ? [] : ['--release']),
|
||||
['build', '--bin', binName, '--verbose'].concat(debug ? [] : ['--release']),
|
||||
{
|
||||
env: rustEnv,
|
||||
cwd: entrypointDirname,
|
||||
@@ -208,14 +226,16 @@ exports.build = async (m) => {
|
||||
console.log('downloading files');
|
||||
const downloadedFiles = await download(files, workPath);
|
||||
|
||||
const { PATH: toolchainPath, ...otherEnv } = await installRustAndGCC();
|
||||
await installRust();
|
||||
const { PATH, HOME } = process.env;
|
||||
const rustEnv = {
|
||||
...process.env,
|
||||
...otherEnv,
|
||||
PATH: `${path.join(HOME, '.cargo/bin')}:${toolchainPath}:${PATH}`,
|
||||
PATH: `${path.join(HOME, '.cargo/bin')}:${PATH}`,
|
||||
RUSTFLAGS: [process.env.RUSTFLAGS, ...codegenFlags].filter(Boolean).join(' '),
|
||||
};
|
||||
|
||||
await runUserScripts(downloadedFiles[entrypoint].fsPath);
|
||||
|
||||
const newM = Object.assign(m, { downloadedFiles, rustEnv });
|
||||
if (path.extname(entrypoint) === '.toml') {
|
||||
return buildWholeProject(newM);
|
||||
@@ -234,6 +254,7 @@ exports.prepareCache = async ({ cachePath, entrypoint, workPath }) => {
|
||||
const rustEnv = {
|
||||
...process.env,
|
||||
PATH: `${path.join(HOME, '.cargo/bin')}:${PATH}`,
|
||||
RUSTFLAGS: [process.env.RUSTFLAGS, ...codegenFlags].filter(Boolean).join(' '),
|
||||
};
|
||||
const entrypointDirname = path.dirname(path.join(workPath, entrypoint));
|
||||
const cargoTomlFile = await cargoLocateProject({
|
||||
@@ -264,7 +285,52 @@ exports.prepareCache = async ({ cachePath, entrypoint, workPath }) => {
|
||||
path.join(cacheEntrypointDirname, 'target'),
|
||||
);
|
||||
|
||||
return {
|
||||
...(await glob('**/**', path.join(cachePath))),
|
||||
};
|
||||
const cacheFiles = await glob('**/**', cachePath);
|
||||
|
||||
// eslint-disable-next-line no-restricted-syntax
|
||||
for (const f of Object.keys(cacheFiles)) {
|
||||
const accept = (/(?:^|\/)target\/release\/\.fingerprint\//.test(f))
|
||||
|| (/(?:^|\/)target\/release\/build\//.test(f))
|
||||
|| (/(?:^|\/)target\/release\/deps\//.test(f))
|
||||
|| (/(?:^|\/)target\/debug\/\.fingerprint\//.test(f))
|
||||
|| (/(?:^|\/)target\/debug\/build\//.test(f))
|
||||
|| (/(?:^|\/)target\/debug\/deps\//.test(f));
|
||||
if (!accept) {
|
||||
delete cacheFiles[f];
|
||||
}
|
||||
}
|
||||
|
||||
return cacheFiles;
|
||||
};
|
||||
|
||||
function findCargoToml(files, entrypoint) {
|
||||
let currentPath = path.dirname(entrypoint);
|
||||
let cargoTomlPath;
|
||||
|
||||
// eslint-disable-next-line no-constant-condition
|
||||
while (true) {
|
||||
cargoTomlPath = path.join(currentPath, 'Cargo.toml');
|
||||
if (files[cargoTomlPath]) break;
|
||||
const newPath = path.dirname(currentPath);
|
||||
if (currentPath === newPath) break;
|
||||
currentPath = newPath;
|
||||
}
|
||||
|
||||
return cargoTomlPath;
|
||||
}
|
||||
|
||||
/*
|
||||
console.log(findCargoToml({
|
||||
'rust/src/main.rs': true,
|
||||
'rust/Cargo.toml': true,
|
||||
'Cargo.toml': true
|
||||
}, 'rust/src/main.rs'));
|
||||
*/
|
||||
|
||||
exports.getDefaultCache = ({ files, entrypoint }) => {
|
||||
const cargoTomlPath = findCargoToml(files, entrypoint);
|
||||
if (!cargoTomlPath) return undefined;
|
||||
const targetFolderDir = path.dirname(cargoTomlPath);
|
||||
const defaultCacheRef = new FileRef({ digest: 'sha:204e0c840c43473bbd130d7bc704fe5588b4eab43cda9bc940f10b2a0ae14b16' });
|
||||
return { [targetFolderDir]: defaultCacheRef };
|
||||
};
|
||||
|
||||
@@ -3,7 +3,6 @@ const fetch = require('node-fetch');
|
||||
const execa = require('execa');
|
||||
|
||||
const rustUrl = 'https://dmmcy0pwk6bqi.cloudfront.net/rust.tar.gz';
|
||||
const ccUrl = 'https://dmmcy0pwk6bqi.cloudfront.net/gcc-4.8.5.tgz';
|
||||
|
||||
async function downloadRustToolchain() {
|
||||
console.log('downloading the rust toolchain');
|
||||
@@ -22,38 +21,6 @@ async function downloadRustToolchain() {
|
||||
});
|
||||
}
|
||||
|
||||
async function downloadGCC() {
|
||||
console.log('downloading GCC');
|
||||
const res = await fetch(ccUrl);
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to download: ${ccUrl}`);
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
res.body
|
||||
.on('error', reject)
|
||||
// NOTE(anmonteiro): We pipe GCC into `/tmp` instead of getting a writable
|
||||
// directory from `@now/build-utils` because the GCC distribution that we
|
||||
// use is specifically packaged for AWS Lambda (where `/tmp` is writable)
|
||||
// and contains several hardcoded symlinks to paths in `/tmp`.
|
||||
.pipe(tar.extract({ gzip: true, cwd: '/tmp' }))
|
||||
.on('finish', async () => {
|
||||
const { LD_LIBRARY_PATH } = process.env;
|
||||
// Set the environment variables as per
|
||||
// https://github.com/lambci/lambci/blob/e6c9c7/home/init/gcc#L14-L17
|
||||
const newEnv = {
|
||||
PATH: '/tmp/bin:/tmp/sbin',
|
||||
LD_LIBRARY_PATH: `/tmp/lib:/tmp/lib64:${LD_LIBRARY_PATH}`,
|
||||
CPATH: '/tmp/include',
|
||||
LIBRARY_PATH: '/tmp/lib',
|
||||
};
|
||||
|
||||
return resolve(newEnv);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function installOpenSSL() {
|
||||
console.log('installing openssl-devel...');
|
||||
try {
|
||||
@@ -77,8 +44,5 @@ async function installOpenSSL() {
|
||||
|
||||
module.exports = async () => {
|
||||
await downloadRustToolchain();
|
||||
const newEnv = await downloadGCC();
|
||||
await installOpenSSL();
|
||||
|
||||
return newEnv;
|
||||
};
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/rust",
|
||||
"version": "0.0.3-canary.2",
|
||||
"version": "0.1.2-canary.1",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -9,7 +9,7 @@
|
||||
},
|
||||
"files": [
|
||||
"index.js",
|
||||
"download-install-rust-toolchain.js",
|
||||
"install-rust.js",
|
||||
"launcher.rs"
|
||||
],
|
||||
"dependencies": {
|
||||
|
||||
@@ -109,7 +109,7 @@ impl<'a> From<NowRequest<'a>> for HttpRequest<Body> {
|
||||
// todo: document failure behavior
|
||||
Body::from(::base64::decode(b.as_ref()).unwrap_or_default())
|
||||
}
|
||||
(Some(b), Some(_)) => Body::from(b.into_owned()),
|
||||
(Some(b), _) => Body::from(b.into_owned()),
|
||||
_ => Body::from(()),
|
||||
})
|
||||
.expect("failed to build request");
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@now/wordpress",
|
||||
"version": "0.4.15-canary.0",
|
||||
"version": "0.4.16-canary.0",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
@@ -8,7 +8,7 @@
|
||||
"directory": "packages/now-wordpress"
|
||||
},
|
||||
"dependencies": {
|
||||
"@now/php-bridge": "^0.4.14-canary.0",
|
||||
"@now/php-bridge": "^0.4.16-canary.0",
|
||||
"node-fetch": "2.3.0",
|
||||
"yauzl": "2.10.0"
|
||||
},
|
||||
|
||||
@@ -27,6 +27,7 @@ it(
|
||||
path.join(__dirname, 'monorepo'),
|
||||
);
|
||||
expect(buildResult['www/index']).toBeDefined();
|
||||
expect(buildResult['www/static/test.txt']).toBeDefined();
|
||||
const filePaths = Object.keys(buildResult);
|
||||
const hasUnderScoreAppStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_app\.js$/));
|
||||
const hasUnderScoreErrorStaticFile = filePaths.some(filePath => filePath.match(/static.*\/pages\/_error\.js$/));
|
||||
@@ -75,7 +76,7 @@ it('Should throw when package.json or next.config.js is not the "src"', async ()
|
||||
});
|
||||
|
||||
it(
|
||||
'Should build the static-files test',
|
||||
'Should build the static-files test on legacy',
|
||||
async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, 'legacy-static-files'),
|
||||
@@ -84,3 +85,14 @@ it(
|
||||
},
|
||||
FOUR_MINUTES,
|
||||
);
|
||||
|
||||
it(
|
||||
'Should build the static-files test',
|
||||
async () => {
|
||||
const { buildResult } = await runBuildLambda(
|
||||
path.join(__dirname, 'static-files'),
|
||||
);
|
||||
expect(buildResult['static/test.txt']).toBeDefined();
|
||||
},
|
||||
FOUR_MINUTES,
|
||||
);
|
||||
|
||||
1
test/integration/now-next/monorepo/www/static/test.txt
Normal file
1
test/integration/now-next/monorepo/www/static/test.txt
Normal file
@@ -0,0 +1 @@
|
||||
hello world
|
||||
3
test/integration/now-next/static-files/next.config.js
Normal file
3
test/integration/now-next/static-files/next.config.js
Normal file
@@ -0,0 +1,3 @@
|
||||
module.exports = {
|
||||
target: 'serverless',
|
||||
};
|
||||
6
test/integration/now-next/static-files/now.json
Normal file
6
test/integration/now-next/static-files/now.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"version": 2,
|
||||
"builds": [
|
||||
{"src": "next.config.js", "use": "@now/next"}
|
||||
]
|
||||
}
|
||||
10
test/integration/now-next/static-files/package.json
Normal file
10
test/integration/now-next/static-files/package.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"scripts": {
|
||||
"now-build": "next build"
|
||||
},
|
||||
"dependencies": {
|
||||
"next": "8",
|
||||
"react": "16",
|
||||
"react-dom": "16"
|
||||
}
|
||||
}
|
||||
1
test/integration/now-next/static-files/pages/index.js
Normal file
1
test/integration/now-next/static-files/pages/index.js
Normal file
@@ -0,0 +1 @@
|
||||
export default () => 'Index page';
|
||||
1
test/integration/now-next/static-files/static/test.txt
Normal file
1
test/integration/now-next/static-files/static/test.txt
Normal file
@@ -0,0 +1 @@
|
||||
hello world
|
||||
@@ -20,7 +20,12 @@ async function nowDeploy (bodies, randomness) {
|
||||
version: 2,
|
||||
public: true,
|
||||
env: { ...nowJson.env, RANDOMNESS_ENV_VAR: randomness },
|
||||
build: { env: { ...(nowJson.build || {}).env, RANDOMNESS_BUILD_ENV_VAR: randomness } },
|
||||
build: {
|
||||
env: {
|
||||
...(nowJson.build || {}).env,
|
||||
RANDOMNESS_BUILD_ENV_VAR: randomness
|
||||
}
|
||||
},
|
||||
name: 'test',
|
||||
files,
|
||||
builds: nowJson.builds,
|
||||
@@ -31,10 +36,7 @@ async function nowDeploy (bodies, randomness) {
|
||||
console.log(`posting ${files.length} files`);
|
||||
|
||||
for (const { file: filename } of files) {
|
||||
await filePost(
|
||||
bodies[filename],
|
||||
digestOfFile(bodies[filename])
|
||||
);
|
||||
await filePost(bodies[filename], digestOfFile(bodies[filename]));
|
||||
}
|
||||
|
||||
let deploymentId;
|
||||
@@ -119,8 +121,7 @@ async function fetchWithAuth (url, opts = {}) {
|
||||
|
||||
if (NOW_TOKEN) {
|
||||
token = NOW_TOKEN;
|
||||
} else
|
||||
if (NOW_TOKEN_FACTORY_URL) {
|
||||
} else if (NOW_TOKEN_FACTORY_URL) {
|
||||
const resp = await fetch(NOW_TOKEN_FACTORY_URL);
|
||||
token = (await resp.json()).token;
|
||||
} else {
|
||||
@@ -151,6 +152,8 @@ async function fetchApi (url, opts = {}) {
|
||||
opts.headers.Accept = 'application/json';
|
||||
}
|
||||
|
||||
opts.headers['x-now-trace-priority'] = '1';
|
||||
|
||||
return await fetch(urlWithHost, opts);
|
||||
}
|
||||
|
||||
|
||||
@@ -8,11 +8,12 @@ const fetch = require('./fetch-retry.js');
|
||||
const { nowDeploy } = require('./now-deploy.js');
|
||||
|
||||
async function packAndDeploy (builderPath) {
|
||||
const tgzName = (await spawnAsync('npm', [ '--loglevel', 'warn', 'pack' ], {
|
||||
stdio: [ 'ignore', 'pipe', 'inherit' ],
|
||||
await spawnAsync('npm', [ '--loglevel', 'warn', 'pack' ], {
|
||||
stdio: 'inherit',
|
||||
cwd: builderPath
|
||||
})).trim();
|
||||
const tgzPath = path.join(builderPath, tgzName);
|
||||
});
|
||||
const tarballs = await glob('*.tgz', { cwd: builderPath });
|
||||
const tgzPath = path.join(builderPath, tarballs[0]);
|
||||
console.log('tgzPath', tgzPath);
|
||||
const url = await nowDeployIndexTgz(tgzPath);
|
||||
await fetchTgzUrl(`https://${url}`);
|
||||
@@ -22,7 +23,11 @@ async function packAndDeploy (builderPath) {
|
||||
|
||||
const RANDOMNESS_PLACEHOLDER_STRING = 'RANDOMNESS_PLACEHOLDER';
|
||||
|
||||
async function testDeployment ({ builderUrl, buildUtilsUrl }, fixturePath, buildDelegate) {
|
||||
async function testDeployment (
|
||||
{ builderUrl, buildUtilsUrl },
|
||||
fixturePath,
|
||||
buildDelegate
|
||||
) {
|
||||
console.log('testDeployment', fixturePath);
|
||||
const globResult = await glob(`${fixturePath}/**`, { nodir: true });
|
||||
const bodies = globResult.reduce((b, f) => {
|
||||
@@ -77,7 +82,7 @@ async function testDeployment ({ builderUrl, buildUtilsUrl }, fixturePath, build
|
||||
for (const probe of nowJson.probes || []) {
|
||||
console.log('testing', JSON.stringify(probe));
|
||||
const probeUrl = `https://${deploymentUrl}${probe.path}`;
|
||||
const text = await fetchDeploymentUrl(probeUrl, {
|
||||
const { text, resp } = await fetchDeploymentUrl(probeUrl, {
|
||||
method: probe.method,
|
||||
body: probe.body ? JSON.stringify(probe.body) : undefined,
|
||||
headers: {
|
||||
@@ -87,9 +92,13 @@ async function testDeployment ({ builderUrl, buildUtilsUrl }, fixturePath, build
|
||||
if (probe.mustContain) {
|
||||
if (!text.includes(probe.mustContain)) {
|
||||
await fs.writeFile(path.join(__dirname, 'failed-page.txt'), text);
|
||||
const headers = Array.from(resp.headers.entries())
|
||||
.map(([ k, v ]) => ` ${k}=${v}`)
|
||||
.join('\n');
|
||||
throw new Error(
|
||||
`Fetched page ${probeUrl} does not contain ${probe.mustContain}.`
|
||||
+ ` Instead it contains ${text.slice(0, 60)}`
|
||||
+ ` Instead it contains ${text.slice(0, 60)}`
|
||||
+ ` Response headers:\n ${headers}`
|
||||
);
|
||||
}
|
||||
} else {
|
||||
@@ -118,9 +127,12 @@ async function fetchDeploymentUrl (url, opts) {
|
||||
for (let i = 0; i < 500; i += 1) {
|
||||
const resp = await fetch(url, opts);
|
||||
const text = await resp.text();
|
||||
if (text && !text.includes('Join Free')
|
||||
&& !text.includes('The page could not be found')) {
|
||||
return text;
|
||||
if (
|
||||
text
|
||||
&& !text.includes('Join Free')
|
||||
&& !text.includes('The page could not be found')
|
||||
) {
|
||||
return { resp, text };
|
||||
}
|
||||
|
||||
await new Promise((r) => setTimeout(r, 1000));
|
||||
|
||||
13
test/unit/now-next/__snapshots__/utils.test.js.snap
Normal file
13
test/unit/now-next/__snapshots__/utils.test.js.snap
Normal file
@@ -0,0 +1,13 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`getNextConfig return null on nothing 1`] = `null`;
|
||||
|
||||
exports[`getNextConfig should find entry file 1`] = `
|
||||
"module.exports = {};
|
||||
"
|
||||
`;
|
||||
|
||||
exports[`getNextConfig should find work file second 1`] = `
|
||||
"module.exports = { target: 'serverless' };
|
||||
"
|
||||
`;
|
||||
1
test/unit/now-next/fixtures/entry/next.config.js
Normal file
1
test/unit/now-next/fixtures/entry/next.config.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = {};
|
||||
1
test/unit/now-next/fixtures/next.config.js
Normal file
1
test/unit/now-next/fixtures/next.config.js
Normal file
@@ -0,0 +1 @@
|
||||
module.exports = { target: 'serverless' };
|
||||
@@ -1,11 +1,33 @@
|
||||
const path = require('path');
|
||||
const {
|
||||
excludeFiles,
|
||||
validateEntrypoint,
|
||||
includeOnlyEntryDirectory,
|
||||
normalizePackageJson,
|
||||
getNextConfig,
|
||||
} = require('@now/next/utils');
|
||||
const FileRef = require('@now/build-utils/file-ref'); // eslint-disable-line import/no-extraneous-dependencies
|
||||
|
||||
describe('getNextConfig', () => {
|
||||
const workPath = path.join(__dirname, 'fixtures');
|
||||
const entryPath = path.join(__dirname, 'fixtures', 'entry');
|
||||
|
||||
it('should find entry file', async () => {
|
||||
const file = await getNextConfig(workPath, entryPath);
|
||||
expect(file).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('should find work file second', async () => {
|
||||
const file = await getNextConfig(workPath, '/');
|
||||
expect(file).toMatchSnapshot();
|
||||
});
|
||||
|
||||
it('return null on nothing', async () => {
|
||||
const file = await getNextConfig('/', '/');
|
||||
expect(file).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
||||
describe('excludeFiles', () => {
|
||||
it('should exclude files', () => {
|
||||
const files = {
|
||||
|
||||
@@ -1,26 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2017",
|
||||
"module": "commonjs",
|
||||
"lib": ["es2017"],
|
||||
"allowJs": true,
|
||||
"checkJs": true,
|
||||
"noEmit": true,
|
||||
"strict": false,
|
||||
"types": ["node"],
|
||||
"esModuleInterop": true
|
||||
},
|
||||
"include": [
|
||||
"./packages/now-node/index.js",
|
||||
"./packages/now-build-utils/file-ref.js",
|
||||
"./packages/now-build-utils/file-fs-ref.js",
|
||||
"./packages/now-build-utils/fs/rename.js",
|
||||
"./packages/now-build-utils/fs/download.js",
|
||||
"./packages/now-build-utils/fs/glob.js",
|
||||
"./packages/now-next"
|
||||
],
|
||||
"exclude": [
|
||||
"./packages/now-next/launcher.js",
|
||||
"./packages/now-next/legacy-launcher.js"
|
||||
]
|
||||
}
|
||||
61
yarn.lock
61
yarn.lock
@@ -728,6 +728,11 @@
|
||||
resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz#2b5a3ab3f918cca48a8c754c08168e3f03eba61b"
|
||||
integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==
|
||||
|
||||
"@now/node-bridge@^1.0.0":
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@now/node-bridge/-/node-bridge-1.0.0.tgz#3e7c1cd5760dc681febb4689201bbea6fdd35230"
|
||||
integrity sha512-tHcr0GlgNlE8ucjuw4w7mseJ68R4VkxuHbFQpSUJ5cZqTJbKggtsdv9UL3u5uBJBXgqDmidlEepF3wOm7A99IQ==
|
||||
|
||||
"@samverschueren/stream-to-observable@^0.3.0":
|
||||
version "0.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@samverschueren/stream-to-observable/-/stream-to-observable-0.3.0.tgz#ecdf48d532c58ea477acfcab80348424f8d0662f"
|
||||
@@ -735,11 +740,23 @@
|
||||
dependencies:
|
||||
any-observable "^0.3.0"
|
||||
|
||||
"@types/async-retry@^1.2.1":
|
||||
version "1.2.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/async-retry/-/async-retry-1.2.1.tgz#fa9ac165907a8ee78f4924f4e393b656c65b5bb4"
|
||||
integrity sha512-yMQ6CVgICWtyFNBqJT3zqOc+TnqqEPLo4nKJNPFwcialiylil38Ie6q1ENeFTjvaLOkVim9K5LisHgAKJWidGQ==
|
||||
|
||||
"@types/aws-lambda@8.10.19":
|
||||
version "8.10.19"
|
||||
resolved "https://registry.yarnpkg.com/@types/aws-lambda/-/aws-lambda-8.10.19.tgz#913a8016a4599d262960d97cb11faf7e963ec0e1"
|
||||
integrity sha512-dEhQow/1awGGIf/unEpb97vsTtnQ3qRPAhSmZZcXKzs4nOVbIuWo5LCCzOYdSIkGkkoFXVvc8pBaSVKRYIFUBA==
|
||||
|
||||
"@types/end-of-stream@^1.4.0":
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/end-of-stream/-/end-of-stream-1.4.0.tgz#4e73ac87d15b6cc89cdaf2d26a59f617c778cb07"
|
||||
integrity sha512-d0FD2A4vpFI8wyQeQbr9VDVKtA1PmeGO3Ntn+6j626QTtAQ9HSqWFACP7rTHaV2cspVhLijl00Vvkf/U2UZGWA==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/events@*":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@types/events/-/events-1.2.0.tgz#81a6731ce4df43619e5c8c945383b3e62a89ea86"
|
||||
@@ -752,6 +769,13 @@
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/fs-extra@^5.0.5":
|
||||
version "5.0.5"
|
||||
resolved "https://registry.yarnpkg.com/@types/fs-extra/-/fs-extra-5.0.5.tgz#080d90a792f3fa2c5559eb44bd8ef840aae9104b"
|
||||
integrity sha512-w7iqhDH9mN8eLClQOYTkhdYUOSpp25eXxfc6VbFOGtzxW34JcvctH2bKjj4jD4++z4R5iO5D+pg48W2e03I65A==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/glob@^7.1.1":
|
||||
version "7.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.1.1.tgz#aa59a1c6e3fbc421e07ccd31a944c30eba521575"
|
||||
@@ -773,6 +797,13 @@
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/node-fetch@^2.1.6":
|
||||
version "2.1.6"
|
||||
resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.1.6.tgz#4326288b49f352a142f03c63526ebce0f4c50877"
|
||||
integrity sha512-Hv1jgh3pfpUEl2F2mqUd1AfLSk1YbUCeBJFaP36t7esAO617dErqdxWb5cdG2NfJGOofkmBW36fdx0dVewxDRg==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@types/node@*", "@types/node@^10.12.8":
|
||||
version "10.12.10"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-10.12.10.tgz#4fa76e6598b7de3f0cb6ec3abacc4f59e5b3a2ce"
|
||||
@@ -783,6 +814,13 @@
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-11.9.4.tgz#ceb0048a546db453f6248f2d1d95e937a6f00a14"
|
||||
integrity sha512-Zl8dGvAcEmadgs1tmSPcvwzO1YRsz38bVJQvH1RvRqSR9/5n61Q1ktcDL0ht3FXWR+ZpVmXVwN1LuH4Ax23NsA==
|
||||
|
||||
"@types/yazl@^2.4.1":
|
||||
version "2.4.1"
|
||||
resolved "https://registry.yarnpkg.com/@types/yazl/-/yazl-2.4.1.tgz#0441a6ee151bf8be9307a2318b89df50f174ea00"
|
||||
integrity sha512-uTgQOl6gCKZ6ys5x2BmnNCd/Em8TqCltjPtyHFc1mz8Q6/+Na7yWnoPgCPhsl44M7S6MfaL6spL6pUM1c7NcDg==
|
||||
dependencies:
|
||||
"@types/node" "*"
|
||||
|
||||
"@zeit/best@0.4.3":
|
||||
version "0.4.3"
|
||||
resolved "https://registry.yarnpkg.com/@zeit/best/-/best-0.4.3.tgz#eaebdfa8b24121a97b1753501ea8c9330d549b30"
|
||||
@@ -3060,13 +3098,6 @@ fast-levenshtein@~2.0.4:
|
||||
resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917"
|
||||
integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc=
|
||||
|
||||
fast-stream-to-buffer@1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fast-stream-to-buffer/-/fast-stream-to-buffer-1.0.0.tgz#793340cc753e7ec9c7fb6d57a53a0b911cb0f588"
|
||||
integrity sha512-bI/544WUQlD2iXBibQbOMSmG07Hay7YrpXlKaeGTPT7H7pC0eitt3usak5vUwEvCGK/O7rUAM3iyQValGU22TQ==
|
||||
dependencies:
|
||||
end-of-stream "^1.4.1"
|
||||
|
||||
fastcgi-client@0.0.1:
|
||||
version "0.0.1"
|
||||
resolved "https://registry.yarnpkg.com/fastcgi-client/-/fastcgi-client-0.0.1.tgz#1046d42ff2cee2a9ac03fea04695b3ef7311861c"
|
||||
@@ -4100,10 +4131,10 @@ inspect-with-kind@^1.0.4:
|
||||
dependencies:
|
||||
kind-of "^6.0.2"
|
||||
|
||||
into-stream@4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-4.0.0.tgz#ef10ee2ffb6f78af34c93194bbdc36c35f7d8a9d"
|
||||
integrity sha512-i29KNyE5r0Y/UQzcQ0IbZO1MYJ53Jn0EcFRZPj5FzWKYH17kDFEOwuA+3jroymOI06SW1dEDnly9A1CAreC5dg==
|
||||
into-stream@5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "https://registry.yarnpkg.com/into-stream/-/into-stream-5.0.0.tgz#690569d7806b29d7cbd496cb05972fbe725b42a5"
|
||||
integrity sha512-VcdJDRK7+vZrcGCdklXy9Zu6lwh2BFVwxCUhqYwolNYAsJE5og3aY4PR+03Hup8pwKV6JhvQ4dxRMOHUgrutdg==
|
||||
dependencies:
|
||||
from2 "^2.1.1"
|
||||
p-is-promise "^2.0.0"
|
||||
@@ -8788,10 +8819,10 @@ typescript@3.3.3:
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.3.3.tgz#f1657fc7daa27e1a8930758ace9ae8da31403221"
|
||||
integrity sha512-Y21Xqe54TBVp+VDSNbuDYdGw0BpoR/Q6wo/+35M8PAU0vipahnyduJWirxxdxjsAkS7hue53x2zp8gz7F05u0A==
|
||||
|
||||
typescript@^3.1.6:
|
||||
version "3.1.6"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.1.6.tgz#b6543a83cfc8c2befb3f4c8fba6896f5b0c9be68"
|
||||
integrity sha512-tDMYfVtvpb96msS1lDX9MEdHrW4yOuZ4Kdc4Him9oU796XldPYF/t2+uKoX0BBa0hXXwDlqYQbXY5Rzjzc5hBA==
|
||||
typescript@3.3.4000:
|
||||
version "3.3.4000"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-3.3.4000.tgz#76b0f89cfdbf97827e1112d64f283f1151d6adf0"
|
||||
integrity sha512-jjOcCZvpkl2+z7JFn0yBOoLQyLoIkNZAs/fYJkUG6VKy6zLPHJGfQJYFHzibB6GJaF/8QrcECtlQ5cpvRHSMEA==
|
||||
|
||||
uglify-js@3.4.x, uglify-js@^3.1.4:
|
||||
version "3.4.9"
|
||||
|
||||
Reference in New Issue
Block a user